repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/helpers.rb | slack-gamebot/api/helpers.rb | require 'slack-gamebot/api/helpers/cursor_helpers'
require 'slack-gamebot/api/helpers/pagination_parameters'
require 'slack-gamebot/api/helpers/sort_helpers'
require 'slack-gamebot/api/helpers/error_helpers'
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/status_presenter.rb | slack-gamebot/api/presenters/status_presenter.rb | module Api
module Presenters
module StatusPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
link :self do |opts|
"#{base_url(opts)}/status"
end
property :games_count
property :games
def games_count
Game.count
end
def games
Game.all.each_with_object({}) do |game, h|
h[game.name] = {}
h[game.name][:teams_count] = game.teams.count
h[game.name][:active_teams_count] = game.teams.active.count
h[game.name][:api_teams_count] = game.teams.api.count
h[game.name][:users_count] = game.users.count
h[game.name][:challenges_count] = game.challenges.count
h[game.name][:matches_count] = game.matches.count
h[game.name][:seasons_count] = game.seasons.count
team = game.teams.active.asc(:_id).first
h[game.name][:ping] = team.ping_if_active! if team
end
end
private
def base_url(opts)
request = Grape::Request.new(opts[:env])
request.base_url
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/challenge_presenter.rb | slack-gamebot/api/presenters/challenge_presenter.rb | module Api
module Presenters
module ChallengePresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'Challenge ID.'
property :state, type: String, desc: 'Current state of the challenge.'
property :channel, type: String, desc: 'Channel where the challenge was created.'
property :created_at, type: DateTime, desc: 'Date/time when the challenge was created.'
property :updated_at, type: DateTime, desc: 'Date/time when the challenge was accepted, declined or canceled.'
link :team do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams/#{represented.team.id}" if represented.team
end
link :created_by do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users/#{represented.created_by.id}" if represented.created_by
end
link :updated_by do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users/#{represented.updated_by.id}" if represented.updated_by
end
link :challengers do |opts|
request = Grape::Request.new(opts[:env])
represented.challengers.map do |challenger|
"#{request.base_url}/api/users/#{challenger.id}"
end
end
link :challenged do |opts|
request = Grape::Request.new(opts[:env])
represented.challenged.map do |challenged|
"#{request.base_url}/api/users/#{challenged.id}"
end
end
link :match do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/matches/#{represented.match.id}" if represented.match
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/challenges/#{id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/matches_presenter.rb | slack-gamebot/api/presenters/matches_presenter.rb | module Api
module Presenters
module MatchesPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: MatchPresenter, as: :matches, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/users_presenter.rb | slack-gamebot/api/presenters/users_presenter.rb | module Api
module Presenters
module UsersPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: UserPresenter, as: :users, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/team_presenter.rb | slack-gamebot/api/presenters/team_presenter.rb | module Api
module Presenters
module TeamPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'Team ID.'
property :team_id, type: String, desc: 'Slack team ID.'
property :name, type: String, desc: 'Team name.'
property :domain, type: String, desc: 'Team domain.'
property :active, type: ::Grape::API::Boolean, desc: 'Team is active.'
property :subscribed, type: ::Grape::API::Boolean, desc: 'Team is a subscriber.'
property :gifs, type: ::Grape::API::Boolean, desc: 'Team loves animated GIFs.'
property :aliases, type: Array, desc: 'Game aliases.'
property :elo, type: Integer, desc: 'Base elo.'
property :unbalanced, type: ::Grape::API::Boolean, desc: 'Permits unbalanced challenges.'
property :created_at, type: DateTime, desc: 'Date/time when the team was created.'
property :updated_at, type: DateTime, desc: 'Date/time when the team was accepted, declined or canceled.'
link :challenges do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/challenges?team_id=#{represented.id}"
end
link :matches do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/matches?team_id=#{represented.id}"
end
link :seasons do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/seasons?team_id=#{represented.id}"
end
link :users do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users?team_id=#{represented.id}"
end
link :captains do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users?team_id=#{represented.id}&captain=true"
end
link :game do |opts|
if represented.game_id
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/games/#{represented.game_id}"
end
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams/#{id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/teams_presenter.rb | slack-gamebot/api/presenters/teams_presenter.rb | module Api
module Presenters
module TeamsPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: TeamPresenter, as: :teams, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/paginated_presenter.rb | slack-gamebot/api/presenters/paginated_presenter.rb | module Api
module Presenters
module PaginatedPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :total_count
link :self do |opts|
"#{request_url(opts)}#{query_string_for_cursor(nil, opts)}"
end
link :next do |opts|
"#{request_url(opts)}#{query_string_for_cursor(represented.next, opts)}" if represented.next
end
private
def request_url(opts)
request = Grape::Request.new(opts[:env])
"#{request.base_url}#{opts[:env]['PATH_INFO']}"
end
# replace the page and offset parameters in the query string
def query_string_for_cursor(cursor, opts)
qs = Hashie::Mash.new(Rack::Utils.parse_nested_query(opts[:env]['QUERY_STRING']))
if cursor
qs[:cursor] = cursor
qs.delete(:offset)
end
"?#{qs.to_query}" unless qs.empty?
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/games_presenter.rb | slack-gamebot/api/presenters/games_presenter.rb | module Api
module Presenters
module GamesPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: GamePresenter, as: :games, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/seasons_presenter.rb | slack-gamebot/api/presenters/seasons_presenter.rb | module Api
module Presenters
module SeasonsPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: SeasonPresenter, as: :seasons, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/user_presenter.rb | slack-gamebot/api/presenters/user_presenter.rb | module Api
module Presenters
module UserPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'User ID.'
property :user_name, type: String, desc: 'User name.'
property :nickname, type: String, desc: 'Optional nickname.'
property :wins, type: Integer, desc: 'Number of wins.'
property :losses, type: Integer, desc: 'Number of losses.'
property :elo, type: Integer, desc: 'Elo.'
property :elo_history, type: [Integer], desc: 'Elo history.'
property :rank, type: Integer, desc: 'Rank.'
property :winning_streak, type: Integer, desc: 'Longest winning streak this season.'
property :losing_streak, type: Integer, desc: 'Longest losing streak this season.'
property :registered, type: ::Grape::API::Boolean, desc: 'User registered or unregistered.'
property :created_at, as: :registered_at, type: DateTime, desc: 'Date/time when the user has registered.'
property :captain, type: ::Grape::API::Boolean, desc: 'Team captain.'
link :team do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams/#{represented.team.id}" if represented.team
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users/#{id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/challenges_presenter.rb | slack-gamebot/api/presenters/challenges_presenter.rb | module Api
module Presenters
module ChallengesPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: ChallengePresenter, as: :challenges, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/season_presenter.rb | slack-gamebot/api/presenters/season_presenter.rb | module Api
module Presenters
module SeasonPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :current_id, as: :id, type: String, desc: 'Season ID.'
property :created_at, type: DateTime, desc: 'Date/time when the season was created.'
collection :user_ranks, extend: UserRankPresenter, embedded: true
link :team do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams/#{represented.team.id}" if represented.team
end
link :created_by do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users/#{represented.created_by.id}" if represented.created_by
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/seasons/#{current_id}"
end
def current_id
represented.persisted? ? represented.id : 'current'
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/root_presenter.rb | slack-gamebot/api/presenters/root_presenter.rb | module Api
module Presenters
module RootPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
link :self do |opts|
"#{base_url(opts)}/api/"
end
link :status do |opts|
"#{base_url(opts)}/api/status"
end
link :users do |opts|
{
href: "#{base_url(opts)}/api/users/#{link_params(Api::Helpers::PaginationParameters::ALL, :team_id, :captain)}",
templated: true
}
end
link :challenges do |opts|
{
href: "#{base_url(opts)}/api/challenges/#{link_params(Api::Helpers::PaginationParameters::ALL, :team_id)}",
templated: true
}
end
link :matches do |opts|
{
href: "#{base_url(opts)}/api/matches/#{link_params(Api::Helpers::PaginationParameters::ALL, :team_id)}",
templated: true
}
end
link :current_season do |opts|
{
href: "#{base_url(opts)}/api/seasons/current/{?team_id}",
templated: true
}
end
link :seasons do |opts|
{
href: "#{base_url(opts)}/api/seasons/#{link_params(Api::Helpers::PaginationParameters::ALL, :team_id)}",
templated: true
}
end
link :teams do |opts|
{
href: "#{base_url(opts)}/api/teams/#{link_params(Api::Helpers::PaginationParameters::ALL, :active, :game_id)}",
templated: true
}
end
link :games do |opts|
{
href: "#{base_url(opts)}/api/games/#{link_params(Api::Helpers::PaginationParameters::ALL)}",
templated: true
}
end
link :subscriptions do |opts|
"#{base_url(opts)}/api/subscriptions"
end
link :credit_cards do |opts|
"#{base_url(opts)}/api/credit_cards"
end
%i[challenge match user season team game].each do |model|
link model do |opts|
{
href: "#{base_url(opts)}/api/#{model.to_s.pluralize}/{id}",
templated: true
}
end
end
private
def base_url(opts)
request = Grape::Request.new(opts[:env])
request.base_url
end
def link_params(*args)
"{?#{args.join(',')}}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/user_ranks_presenter.rb | slack-gamebot/api/presenters/user_ranks_presenter.rb | module Api
module Presenters
module UserRanksPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
include Api::Presenters::PaginatedPresenter
collection :results, extend: UserRankPresenter, as: :user_ranks, embedded: true
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/game_presenter.rb | slack-gamebot/api/presenters/game_presenter.rb | module Api
module Presenters
module GamePresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'Game ID.'
property :name, type: String, desc: 'Name of the game.'
property :bot_name, type: String, desc: 'Bot name.'
property :aliases, type: Array, desc: 'Game aliases.'
property :client_id, type: String, desc: 'Slack client ID.'
property :created_at, type: DateTime, desc: 'Date/time when the game was created.'
property :updated_at, type: DateTime, desc: 'Date/time when the game was accepted, declined or canceled.'
link :teams do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams?game_id=#{represented.id}"
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/games/#{id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/match_presenter.rb | slack-gamebot/api/presenters/match_presenter.rb | module Api
module Presenters
module MatchPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'Match ID.'
property :tied, type: ::Grape::API::Boolean, desc: 'Match is a tie.'
property :resigned, type: ::Grape::API::Boolean, desc: 'The loser resigned.'
property :scores, type: Array, desc: 'Match scores.'
property :created_at, type: DateTime, desc: 'Date/time when the match was created.'
link :team do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/teams/#{represented.team.id}" if represented.team
end
link :challenge do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/challenges/#{represented.challenge.id}" if represented.challenge
end
link :winners do |opts|
request = Grape::Request.new(opts[:env])
represented.winners.map do |user|
"#{request.base_url}/api/users/#{user.id}"
end
end
link :losers do |opts|
request = Grape::Request.new(opts[:env])
represented.losers.map do |user|
"#{request.base_url}/api/users/#{user.id}"
end
end
link :self do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/matches/#{id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/presenters/user_rank_presenter.rb | slack-gamebot/api/presenters/user_rank_presenter.rb | module Api
module Presenters
module UserRankPresenter
include Roar::JSON::HAL
include Roar::Hypermedia
include Grape::Roar::Representer
property :id, type: String, desc: 'UserRank ID.'
property :user_name, type: String, desc: 'UserRank name.'
property :wins, type: Integer, desc: 'Number of wins.'
property :losses, type: Integer, desc: 'Number of losses.'
property :elo, type: Integer, desc: 'Elo.'
property :rank, type: Integer, desc: 'Rank.'
link :user do |opts|
request = Grape::Request.new(opts[:env])
"#{request.base_url}/api/users/#{user_id}"
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/helpers/cursor_helpers.rb | slack-gamebot/api/helpers/cursor_helpers.rb | module Api
module Helpers
module CursorHelpers
extend ActiveSupport::Concern
# apply cursor-based pagination to a collection
# returns a hash:
# results: (paginated collection subset)
# next: (cursor to the next page)
def paginate_by_cursor(coll, &)
raise 'Both cursor and offset parameters are present, these are mutually exclusive.' if params.key?(:offset) && params.key?(:cursor)
results = { results: [], next: nil }
size = (params[:size] || 10).to_i
if params.key?(:offset)
skip = params[:offset].to_i
coll = coll.skip(skip)
end
# some items may be skipped with a block
query = block_given? ? coll : coll.limit(size)
query.scroll(params[:cursor]) do |record, iterator|
record = yield(record) if block_given?
results[:results] << record if record
results[:next] = iterator.next_cursor.to_s
break if results[:results].count >= size
end
results[:total_count] = coll.count if params[:total_count] && coll.respond_to?(:count)
results
end
def paginate_and_sort_by_cursor(coll, options = {}, &)
Hashie::Mash.new(paginate_by_cursor(sort(coll, options), &))
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/helpers/sort_helpers.rb | slack-gamebot/api/helpers/sort_helpers.rb | module Api
module Helpers
module SortHelpers
extend ActiveSupport::Concern
def sort_order(options = {})
params[:sort] = options[:default_sort_order] unless params[:sort]
return [] unless params[:sort]
sort_order = params[:sort].to_s
unless options[:default_sort_order] == sort_order
supported_sort_orders = route_sort
error!("This API doesn't support sorting", 400) if supported_sort_orders.blank?
unless supported_sort_orders.include?(sort_order)
error!("Invalid sort order: #{sort_order}, must be#{' one of' unless supported_sort_orders.count == 1} '#{supported_sort_orders.join('\', \'')}'", 400)
end
end
sort_order.split(',').map do |sort_entry|
sort_order = {}
if sort_entry[0] == '-'
sort_order[:direction] = :desc
sort_order[:column] = sort_entry[1..-1]
else
sort_order[:direction] = :asc
sort_order[:column] = sort_entry
end
error!("Invalid sort: #{sort_entry}", 400) if sort_order[:column].blank?
sort_order
end
end
def route_sort
(env['api.endpoint'].route_setting(:sort) || {})[:sort]
end
def sort(coll, options = {})
sort_order = sort_order(options)
unless sort_order.empty?
if coll.respond_to?(:asc) && coll.respond_to?(:desc)
sort_order.each do |s|
coll = coll.send(s[:direction], s[:column])
end
else
error!("Cannot sort #{coll.class.name}", 500)
end
end
coll.is_a?(Module) && coll.respond_to?(:all) ? coll.all : coll
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/helpers/error_helpers.rb | slack-gamebot/api/helpers/error_helpers.rb | module Api
module Helpers
module ErrorHelpers
extend ActiveSupport::Concern
included do
rescue_from :all, backtrace: true do |e|
backtrace = e.backtrace[0..5].join("\n ")
Api::Middleware.logger.error "#{e.class.name}: #{e.message}\n #{backtrace}"
error = { type: 'other_error', message: e.message }
error[:backtrace] = backtrace
error!(error, 400)
end
# rescue document validation errors into detail json
rescue_from Mongoid::Errors::Validations do |e|
backtrace = e.backtrace[0..5].join("\n ")
Api::Middleware.logger.warn "#{e.class.name}: #{e.message}\n #{backtrace}"
error!({
type: 'param_error',
message: e.document.errors.full_messages.uniq.join(', ') + '.',
detail: e.document.errors.messages.transform_values(&:uniq)
}, 400)
end
rescue_from Grape::Exceptions::Validation do |e|
backtrace = e.backtrace[0..5].join("\n ")
Api::Middleware.logger.warn "#{e.class.name}: #{e.message}\n #{backtrace}"
error!({
type: 'param_error',
message: 'Invalid parameters.',
detail: { e.params.join(', ') => [e.message] }
}, 400)
end
rescue_from Grape::Exceptions::ValidationErrors do |e|
backtrace = e.backtrace[0..5].join("\n ")
Api::Middleware.logger.warn "#{e.class.name}: #{e.message}\n #{backtrace}"
error!({
type: 'param_error',
message: 'Invalid parameters.',
detail: e.errors.transform_keys do |k|
# JSON does not permit having a key of type Array
k.count == 1 ? k.first : k.join(', ')
end
}, 400)
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/helpers/pagination_parameters.rb | slack-gamebot/api/helpers/pagination_parameters.rb | module Api
module Helpers
module PaginationParameters
extend Grape::API::Helpers
params :pagination do
optional :offset, type: Integer, desc: 'Offset from which to retrieve.'
optional :size, type: Integer, desc: 'Number of items to retrieve for this page or from the current offset.'
optional :cursor, type: String, desc: 'Cursor for pagination.'
optional :total_count, desc: 'Include total count in the response.'
mutually_exclusive :offset, :cursor
end
ALL = %w[cursor size sort offset total_count].freeze
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/challenges_endpoint.rb | slack-gamebot/api/endpoints/challenges_endpoint.rb | module Api
module Endpoints
class ChallengesEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :challenges do
desc 'Get a challenge.'
params do
requires :id, type: String, desc: 'Challenge ID.'
end
get ':id' do
challenge = Challenge.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless challenge.team.api?
present challenge, with: Api::Presenters::ChallengePresenter
end
desc 'Get all the challenges.'
params do
requires :team_id, type: String
use :pagination
end
sort Challenge::SORT_ORDERS
get do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
challenges = paginate_and_sort_by_cursor(team.challenges, default_sort_order: '-_id')
present challenges, with: Api::Presenters::ChallengesPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/subscriptions_endpoint.rb | slack-gamebot/api/endpoints/subscriptions_endpoint.rb | module Api
module Endpoints
class SubscriptionsEndpoint < Grape::API
format :json
namespace :subscriptions do
desc 'Subscribe to slack-playplay.'
params do
requires :stripe_token, type: String
requires :stripe_token_type, type: String
requires :stripe_email, type: String
optional :stripe_coupon, type: String
requires :team_id, type: String
end
post do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
Api::Middleware.logger.info "Creating a subscription for team #{team}, email=#{params[:stripe_email]}, coupon=#{params[:stripe_coupon]}."
error!('Already a Subscriber', 400) if team.subscribed?
error!('Existing Subscription Already Active', 400) if team.stripe_customer_id && team.stripe_customer.subscriptions.any?
data = {
source: params[:stripe_token],
plan: 'slack-playplay-yearly',
email: params[:stripe_email],
coupon: params[:stripe_coupon],
metadata: {
id: team._id.to_s,
team_id: team.team_id,
name: team.name,
domain: team.domain,
game: team.game.name
}
}
customer = team.stripe_customer_id ? Stripe::Customer.update(team.stripe_customer_id, data) : Stripe::Customer.create(data)
Api::Middleware.logger.info "Subscription for team #{team} created, stripe_customer_id=#{customer['id']}."
team.update_attributes!(subscribed: true, stripe_customer_id: customer['id'])
present team, with: Api::Presenters::TeamPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/seasons_endpoint.rb | slack-gamebot/api/endpoints/seasons_endpoint.rb | module Api
module Endpoints
class SeasonsEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :seasons do
desc 'Get current season.'
params do
requires :team_id, type: String
end
get 'current' do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
present Season.new(team:), with: Api::Presenters::SeasonPresenter
end
desc 'Get a season.'
params do
requires :id, type: String, desc: 'Season ID.'
end
get ':id' do
season = Season.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless season.team.api?
present season, with: Api::Presenters::SeasonPresenter
end
desc 'Get all past seasons.'
params do
requires :team_id, type: String
use :pagination
end
sort Season::SORT_ORDERS
get do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
seasons = paginate_and_sort_by_cursor(team.seasons, default_sort_order: '-_id')
present seasons, with: Api::Presenters::SeasonsPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/teams_endpoint.rb | slack-gamebot/api/endpoints/teams_endpoint.rb | module Api
module Endpoints
class TeamsEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :teams do
desc 'Get a team.'
params do
requires :id, type: String, desc: 'Team ID.'
end
get ':id' do
team = Team.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
present team, with: Api::Presenters::TeamPresenter
end
desc 'Get all the teams.'
params do
optional :active, type: Boolean, desc: 'Return active teams only.'
optional :game, type: String, desc: 'Return teams for a given game by name.'
optional :game_id, type: String, desc: 'Return teams for a given game by ID.'
mutually_exclusive :game, :game_id
use :pagination
end
sort Team::SORT_ORDERS
get do
game = Game.find(params[:game_id]) if params.key?(:game_id)
game ||= Game.where(name: params[:game]) if params.key?(:game)
teams = game ? game.teams : Team.all
teams = teams.api
teams = teams.active if params[:active]
teams = paginate_and_sort_by_cursor(teams, default_sort_order: '-_id')
present teams, with: Api::Presenters::TeamsPresenter
end
desc 'Create a team using an OAuth token.'
params do
requires :code, type: String
optional :game, type: String
optional :game_id, type: String
exactly_one_of :game, :game_id
end
post do
game = Game.find(params[:game_id]) if params.key?(:game_id)
game ||= Game.where(name: params[:game]).first if params.key?(:game)
error!('Game Not Found', 404) unless game
client = Slack::Web::Client.new
rc = client.oauth_access(
client_id: game.client_id,
client_secret: game.client_secret,
code: params[:code]
)
token = rc['bot']['bot_access_token']
bot_user_id = rc['bot']['bot_user_id']
user_id = rc['user_id']
access_token = rc['access_token']
team = Team.where(token:).first
team ||= Team.where(team_id: rc['team_id'], game:).first
if team
error!('Invalid Game', 400) unless team.game == game
team.ping_if_active!
team.update_attributes!(
token:,
activated_user_id: user_id,
activated_user_access_token: access_token,
bot_user_id:,
dead_at: nil
)
raise "Team #{team.name} is already registered." if team.active?
team.activate!(token)
else
team = Team.create!(
game:,
aliases: game.aliases,
token:,
team_id: rc['team_id'],
name: rc['team_name'],
activated_user_id: user_id,
activated_user_access_token: access_token,
bot_user_id:
)
end
SlackRubyBotServer::Service.instance.create!(team)
present team, with: Api::Presenters::TeamPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/matches_endpoint.rb | slack-gamebot/api/endpoints/matches_endpoint.rb | module Api
module Endpoints
class MatchesEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :matches do
desc 'Get a match.'
params do
requires :id, type: String, desc: 'Match ID.'
end
get ':id' do
match = Match.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless match.team.api?
present match, with: Api::Presenters::MatchPresenter
end
desc 'Get all the matches.'
params do
requires :team_id, type: String
use :pagination
end
sort Match::SORT_ORDERS
get do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
matches = paginate_and_sort_by_cursor(team.matches, default_sort_order: '-_id')
present matches, with: Api::Presenters::MatchesPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/credit_cards_endpoint.rb | slack-gamebot/api/endpoints/credit_cards_endpoint.rb | module Api
module Endpoints
class CreditCardsEndpoint < Grape::API
format :json
namespace :credit_cards do
desc 'Updates a credit card.'
params do
requires :stripe_token, type: String
optional :stripe_token_type, type: String
optional :stripe_email, type: String
requires :team_id, type: String
end
post do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not a Subscriber', 400) unless team.stripe_customer_id
customer = team.stripe_customer
customer.source = params['stripe_token']
customer.save
Api::Middleware.logger.info "Updated credit card for team #{team}, email=#{params[:stripe_email]}."
present team, with: Api::Presenters::TeamPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/users_endpoint.rb | slack-gamebot/api/endpoints/users_endpoint.rb | module Api
module Endpoints
class UsersEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :users do
desc 'Get a user.'
params do
requires :id, type: String, desc: 'User ID.'
end
get ':id' do
user = User.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless user.team.api?
present user, with: Api::Presenters::UserPresenter
end
desc 'Get all the users.'
params do
requires :team_id, type: String
optional :captain, type: Boolean
use :pagination
end
sort User::SORT_ORDERS
get do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
query = team.users
query = query.captains if params[:captain]
users = paginate_and_sort_by_cursor(query, default_sort_order: '-elo')
present users, with: Api::Presenters::UsersPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/root_endpoint.rb | slack-gamebot/api/endpoints/root_endpoint.rb | module Api
module Endpoints
class RootEndpoint < Grape::API
include Api::Helpers::ErrorHelpers
prefix :api
format :json
formatter :json, Grape::Formatter::Roar
get do
present self, with: Api::Presenters::RootPresenter
end
mount Api::Endpoints::StatusEndpoint
mount Api::Endpoints::UsersEndpoint
mount Api::Endpoints::ChallengesEndpoint
mount Api::Endpoints::MatchesEndpoint
mount Api::Endpoints::SeasonsEndpoint
mount Api::Endpoints::TeamsEndpoint
mount Api::Endpoints::GamesEndpoint
mount Api::Endpoints::SubscriptionsEndpoint
mount Api::Endpoints::CreditCardsEndpoint
add_swagger_documentation
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/games_endpoint.rb | slack-gamebot/api/endpoints/games_endpoint.rb | module Api
module Endpoints
class GamesEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :games do
desc 'Get a game.'
params do
requires :id, type: String, desc: 'Game ID.'
end
get ':id' do
game = Game.find(params[:id]) || error!('Not Found', 404)
present game, with: Api::Presenters::GamePresenter
end
desc 'Get all the games.'
params do
optional :active, type: Boolean, desc: 'Return active games only.'
use :pagination
end
sort Game::SORT_ORDERS
get do
games = paginate_and_sort_by_cursor(Game.all, default_sort_order: '-_id')
present games, with: Api::Presenters::GamesPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/slack-gamebot/api/endpoints/status_endpoint.rb | slack-gamebot/api/endpoints/status_endpoint.rb | module Api
module Endpoints
class StatusEndpoint < Grape::API
format :json
namespace :status do
desc 'Get system status.'
get do
present self, with: Api::Presenters::StatusPresenter
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/slack_ruby_bot_client.rb | config/initializers/slack_ruby_bot_client.rb | # frozen_string_literal: true
module SlackRubyBot
class Client < Slack::RealTime::Client
attr_accessor :send_gifs, :aliases
def initialize(attrs = {})
super
@send_gifs = attrs[:send_gifs]
@aliases = attrs[:aliases]
end
def send_gifs?
send_gifs.nil? || send_gifs
end
def say(options = {})
options = options.dup
# get GIF
keywords = options.delete(:gif)
# text
text = options.delete(:text)
gif = Giphy.random(keywords) if keywords && send_gifs?
text = [text, gif].compact.join("\n")
message({ text: }.merge(options))
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/giphy.rb | config/initializers/giphy.rb | module Giphy
def self.random(keywords)
return unless ENV.key?('GIPHY_API_KEY')
url = "http://api.giphy.com/v1/gifs/random?q=#{keywords}&api_key=#{ENV.fetch('GIPHY_API_KEY', nil)}&rating=G"
result = JSON.parse(Net::HTTP.get_response(URI.parse(url)).body)
result['data']['images']['fixed_height']['url']
rescue StandardError => e
logger.warn "Giphy.random: #{e.message}"
nil
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/array.rb | config/initializers/array.rb | class Array
def and
join_with 'and'
end
def or
join_with 'or'
end
def same?
uniq.length == 1
end
private
def join_with(separator)
if count > 1
"#{self[0..-2].join(', ')} #{separator} #{self[-1]}"
else
first
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/slack_ruby_bot_commands_base.rb | config/initializers/slack_ruby_bot_commands_base.rb | module SlackRubyBot
module Commands
class Base
class << self
def invoke(client, data)
_invoke client, data
rescue Mongoid::Errors::Validations => e
logger.info "#{name.demodulize.upcase}: #{client.owner}, error - #{e.document.class}, #{e.document.errors.to_hash}"
client.say(channel: data.channel, text: e.document.errors.map(&:type).join("\n"))
true
rescue StandardError => e
logger.info "#{name.demodulize.upcase}: #{client.owner}, #{e.class}: #{e}"
logger.debug e.backtrace.join("\n")
client.say(channel: data.channel, text: e.message)
true
end
end
end
end
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/active_support.rb | config/initializers/active_support.rb | ActiveSupport.to_time_preserves_timezone = :zone
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/slack_ruby_bot_server_mailchimp.rb | config/initializers/slack_ruby_bot_server_mailchimp.rb | SlackRubyBotServer::Mailchimp.configure do |config|
config.mailchimp_api_key = ENV.fetch('MAILCHIMP_API_KEY', nil)
config.mailchimp_list_id = ENV.fetch('MAILCHIMP_LIST_ID', nil)
config.additional_member_tags = ['gamebot']
config.additional_merge_fields = ->(team, _options) { { 'BOT' => team.game.name.capitalize } }
end
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
dblock/slack-gamebot | https://github.com/dblock/slack-gamebot/blob/0af9dc9bf8c61523ed46c1007a96a3c9daa488c8/config/initializers/stripe.rb | config/initializers/stripe.rb | Stripe.api_key = ENV['STRIPE_API_KEY'] if ENV.key?('STRIPE_API_KEY')
| ruby | MIT | 0af9dc9bf8c61523ed46c1007a96a3c9daa488c8 | 2026-01-04T17:48:57.592423Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/ownership_test.rb | test/ownership_test.rb | require_relative "test_helper"
class OwnershipTest < Minitest::Test
def test_around
owner :logistics do
$around_calls << "middle"
end
assert_equal ["start", "middle", "finish"], $around_calls
end
def test_exception
error = assert_raises do
owner :logistics do
raise "boom"
end
end
assert_equal :logistics, error.owner
end
def test_nested_exception
error = assert_raises do
owner :logistics do
owner :sales do
raise "boom"
end
end
end
assert_equal :sales, error.owner
end
def test_default_owner
assert_nil Ownership.owner
Ownership.default_owner = :logistics
assert_equal :logistics, Ownership.owner
ensure
Ownership.default_owner = nil
end
def test_respond_to?
refute nil.respond_to?(:owner)
end
def test_method_owner
assert_equal Kernel, method(:puts).owner
end
def test_pry
assert_equal Kernel, Pry::Method.new(method(:puts)).wrapped_owner.wrapped
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/rollbar_test.rb | test/rollbar_test.rb | require_relative "test_helper"
require "rollbar"
Rollbar.configure do |config|
config.logger = Logger.new(nil)
config.access_token = "footoken"
config.transmit = false
config.disable_monkey_patch = true
config.use_payload_access_token = true
end
Ownership::Rollbar.access_token = {
logistics: "logistics-token",
sales: "sales-token",
support: "support-token"
}
Rollbar.configure do |config|
config.transform << proc do |options|
$errors << options
end
end
class RollbarTest < Minitest::Test
def setup
super
$errors = []
end
def test_error
begin
owner :logistics do
raise "Error"
end
rescue => e
Rollbar.error(e)
end
assert_equal 1, $errors.size
assert_equal "logistics-token", $errors.last[:payload]["access_token"]
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/active_record_test.rb | test/active_record_test.rb | require_relative "test_helper"
$io = StringIO.new
ActiveRecord::Base.logger = ActiveSupport::Logger.new($io)
class ActiveRecordTest < Minitest::Test
def setup
ActiveRecord::QueryLogs.tags = [:owner]
super
$io.truncate(0)
end
def teardown
ActiveRecord::QueryLogs.tags = []
end
def test_owner
owner(:logistics) do
User.last
end
assert_match "/*owner='logistics'*/", logs
end
def test_no_owner
User.last
refute_match "owner", logs
end
def logs
$io.rewind
$io.read
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/controller_test.rb | test/controller_test.rb | require_relative "test_helper"
class ControllerTest < ActionDispatch::IntegrationTest
def test_controller
get root_url
assert_equal :logistics, $current_owner
end
def test_only
get users_url
assert_equal :logistics, $current_owner
end
def test_except
get user_url(1)
assert_equal :customers, $current_owner
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/test_helper.rb | test/test_helper.rb | require "bundler/setup"
require "combustion"
Bundler.require(:default)
require "minitest/autorun"
logger = ActiveSupport::Logger.new(ENV["VERBOSE"] ? STDOUT : nil)
Combustion.path = "test/internal"
Combustion.initialize! :active_record, :action_controller, :active_job do
config.load_defaults Rails::VERSION::STRING.to_f
config.action_controller.logger = logger
config.active_record.logger = logger
config.active_job.logger = logger
config.active_record.query_log_tags_enabled = true
end
class Minitest::Test
def setup
$current_owner = nil
$around_calls = []
end
end
Ownership.around_change = proc do |owner, block|
$around_calls << "start"
block.call
$around_calls << "finish"
end
# https://github.com/rails/rails/issues/54595
if RUBY_ENGINE == "jruby" && Rails::VERSION::MAJOR >= 8
Rails.application.reload_routes_unless_loaded
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/honeybadger_test.rb | test/honeybadger_test.rb | require_relative "test_helper"
require "honeybadger/ruby"
Honeybadger.init!(framework: :ruby, env: "test", "logging.path": Tempfile.new.path)
Honeybadger.configure do |config|
config.api_key = "default-key"
config.backend = "test"
config.logger = Logger.new(IO::NULL)
end
Ownership::Honeybadger.api_keys = {
logistics: "logistics-key",
sales: "sales-key",
support: "support-key"
}
class HoneybadgerTest < Minitest::Test
def setup
super
Honeybadger.config.backend.notifications.clear
Honeybadger.context.clear!
end
def test_tagging
Honeybadger.context tags: 'critical, badgers'
owner :logistics do
Honeybadger.notify("boom for logistics", sync: true)
end
assert_equal %w[critical badgers logistics], notices.last.tags
Honeybadger.context.clear!
owner :sales do
Honeybadger.notify("boom for sales", sync: true)
end
assert_equal %w[sales], notices.last.tags
end
def test_uses_default_key_without_ownership_block
Honeybadger.notify("boom for default", sync: true)
assert_equal "default-key", notices.last.api_key
end
def test_uses_owner_key_within_ownership_block
owner :logistics do
Honeybadger.notify("boom for logistics", sync: true)
end
assert_equal "logistics-key", notices.last.api_key
end
def test_uses_default_key_and_warns_with_unknown_owner
assert_output(nil, /Missing Honeybadger API key for owner: unknown/) do
owner :unknown do
Honeybadger.notify("boom for default", sync: true)
end
end
assert_equal "default-key", notices.last.api_key
end
def test_async_works_properly
owner :logistics do
Honeybadger.notify("boom for logistics")
Honeybadger.flush
end
assert_equal "logistics-key", notices.last.api_key
end
def test_prefer_exception_owner_over_thread_local_ownership
owner :logistics do
ex = StandardError.new("boom for sales")
ex.owner = :sales
Honeybadger.notify(ex, sync: true)
end
assert_equal "sales-key", notices.last.api_key
end
private
def notices
Honeybadger.config.backend.notifications[:notices]
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/job_test.rb | test/job_test.rb | require_relative "test_helper"
class JobTest < Minitest::Test
def test_job
TestJob.perform_now
assert_equal :logistics, $current_owner
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/app/jobs/test_job.rb | test/internal/app/jobs/test_job.rb | class TestJob < ActiveJob::Base
owner :logistics
def perform
$current_owner = Ownership.owner
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/app/controllers/home_controller.rb | test/internal/app/controllers/home_controller.rb | class HomeController < ActionController::Base
owner :logistics
def index
$current_owner = Ownership.owner
head :ok
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/app/controllers/users_controller.rb | test/internal/app/controllers/users_controller.rb | class UsersController < ActionController::Base
owner :logistics, only: [:index]
owner :customers, except: [:index]
def index
$current_owner = Ownership.owner
head :ok
end
def show
$current_owner = Ownership.owner
head :ok
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/app/models/user.rb | test/internal/app/models/user.rb | class User < ActiveRecord::Base
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/db/schema.rb | test/internal/db/schema.rb | ActiveRecord::Schema.define do
create_table :users do |t|
t.string :name
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/test/internal/config/routes.rb | test/internal/config/routes.rb | Rails.application.routes.draw do
root "home#index"
resources :users, only: [:index, :show]
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership.rb | lib/ownership.rb | # modules
require_relative "ownership/global_methods"
require_relative "ownership/version"
# integrations
require_relative "ownership/honeybadger"
require_relative "ownership/rollbar"
module Ownership
class << self
attr_accessor :around_change, :default_owner
def owner
Thread.current[:ownership_owner] || default_owner
end
end
end
Object.include Ownership::GlobalMethods
if defined?(ActiveSupport.on_load)
ActiveSupport.on_load(:action_controller) do
require_relative "ownership/controller_methods"
include Ownership::ControllerMethods
end
ActiveSupport.on_load(:active_record) do
# taggings is frozen in Active Record 8
if !ActiveRecord::QueryLogs.taggings[:owner]
ActiveRecord::QueryLogs.taggings = ActiveRecord::QueryLogs.taggings.merge({owner: -> { Ownership.owner }})
end
end
ActiveSupport.on_load(:active_job) do
require_relative "ownership/job_methods"
include Ownership::JobMethods
end
end
class Exception
attr_accessor :owner
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/version.rb | lib/ownership/version.rb | module Ownership
VERSION = "0.5.0"
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/controller_methods.rb | lib/ownership/controller_methods.rb | require "active_support/concern"
module Ownership
module ControllerMethods
extend ActiveSupport::Concern
class_methods do
def owner(owner, **options)
around_action(**options) do |_, block|
owner(owner) { block.call }
end
end
end
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/rollbar.rb | lib/ownership/rollbar.rb | module Ownership
module Rollbar
class << self
attr_reader :access_token
def access_token=(access_token)
@access_token = access_token
@configure ||= configure # just once
access_token
end
private
def owner_access_token(owner)
access_token.respond_to?(:call) ? access_token.call(owner) : access_token[owner]
end
def configure
::Rollbar.configure do |config|
config.before_process << proc do |options|
options[:scope][:ownership_owner] = Ownership.owner if Ownership.owner
end
config.transform << proc do |options|
# clean up payload
options[:payload]["data"].delete(:ownership_owner)
owner = options[:exception].owner if options[:exception].respond_to?(:owner)
unless owner
owner = options[:scope][:ownership_owner] if options[:scope].is_a?(Hash)
owner ||= Ownership.default_owner
end
if owner
access_token = owner_access_token(owner)
if access_token
options[:payload]["access_token"] = access_token
else
warn "[ownership] Missing Rollbar access token for owner: #{owner}"
end
end
end
end
true
end
end
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/job_methods.rb | lib/ownership/job_methods.rb | require "active_support/concern"
module Ownership
module JobMethods
extend ActiveSupport::Concern
class_methods do
def owner(owner)
around_perform do |_, block|
owner(owner) { block.call }
end
end
end
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/global_methods.rb | lib/ownership/global_methods.rb | module Ownership
module GlobalMethods
private
def owner(owner, &block)
raise ArgumentError, "Missing block" unless block_given?
previous_value = Thread.current[:ownership_owner]
begin
Thread.current[:ownership_owner] = owner
# callbacks
if Ownership.around_change
Ownership.around_change.call(owner, block)
else
block.call
end
rescue Exception => e
e.owner ||= owner
raise
ensure
Thread.current[:ownership_owner] = previous_value
end
end
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
ankane/ownership | https://github.com/ankane/ownership/blob/c72a57652e35d746a6fcdabaed26b1dab30d6d94/lib/ownership/honeybadger.rb | lib/ownership/honeybadger.rb | module Ownership
module Honeybadger
class << self
attr_reader :api_keys
def api_keys=(api_keys)
@api_keys = api_keys
@configuration ||= configure
api_keys
end
private
def add_owner_as_tag(notice, current_owner)
return unless current_owner
notice.tags << current_owner.to_s
end
def configure
::Honeybadger.configure do |config|
config.before_notify do |notice|
current_owner = notice.exception.owner if notice.exception.is_a?(Exception)
current_owner ||= Ownership.owner
add_owner_as_tag(notice, current_owner)
use_owner_api_key(notice, current_owner)
end
end
end
def owner_api_key(current_owner)
api_keys.respond_to?(:call) ? api_keys.call(current_owner) : api_keys[current_owner]
end
def use_owner_api_key(notice, current_owner)
return unless current_owner
if (api_key = owner_api_key(current_owner))
notice.api_key = api_key
else
warn "[ownership] Missing Honeybadger API key for owner: #{current_owner}"
end
end
end
end
end
| ruby | MIT | c72a57652e35d746a6fcdabaed26b1dab30d6d94 | 2026-01-04T17:51:02.493959Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/test/test_memstat.rb | test/test_memstat.rb | require 'memstat'
require 'minitest/autorun'
require 'benchmark'
class TestCli < Minitest::Test
SMAPS_PATH = File.expand_path('../../test/files/smaps.txt', __FILE__)
STATUS_PATH = File.expand_path('../../test/files/status.txt', __FILE__)
def test_print
smaps = Memstat::Proc::Smaps.new(:path => SMAPS_PATH)
puts smaps.print
STDOUT.flush
end
def test_smaps
smaps = Memstat::Proc::Smaps.new(:path => SMAPS_PATH)
assert_equal smaps.size, 277760 * 1024
assert_equal smaps.rss, 131032 * 1024
assert_equal smaps.pss, 75944 * 1024
assert_equal smaps.shared_clean, 8504 * 1024
assert_equal smaps.shared_dirty, 53716 * 1024
assert_equal smaps.private_clean, 4 * 1024
assert_equal smaps.private_dirty, 68808 * 1024
assert_equal smaps.swap, 0 * 1024
end
def test_status
status = Memstat::Proc::Status.new(:path => STATUS_PATH)
assert_equal status.peak, 277756 * 1024
assert_equal status.size, 277756 * 1024
assert_equal status.lck, 0 * 1024
assert_equal status.pin, 0 * 1024
assert_equal status.hwm, 131044 * 1024
assert_equal status.rss, 131044 * 1024
assert_equal status.data, 133064 * 1024
assert_equal status.stk, 136 * 1024
assert_equal status.exe, 4 * 1024
assert_equal status.lib, 21524 * 1024
assert_equal status.pte, 540 * 1024
assert_equal status.swap, 0 * 1024
end
def test_oobgc
Memstat::OobGC::Unicorn
end
def test_benchmark
n = 100
Benchmark.bm(10) do |x|
x.report("ps:") { n.times.each { `ps -o rss -p #{Process.pid}`.strip.to_i } }
x.report("memstat:") { n.times.each { Memstat::Proc::Status.new(:path => STATUS_PATH).rss } }
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat.rb | lib/memstat.rb | require 'thor'
module Memstat
autoload :Cli, 'memstat/cli'
autoload :Version, 'memstat/version'
module OobGC
autoload :Unicorn, 'memstat/oob_gc/unicorn'
end
module Proc
autoload :Base, 'memstat/proc/base'
autoload :Smaps, 'memstat/proc/smaps'
autoload :Status, 'memstat/proc/status'
end
Error = Class.new(StandardError)
def linux?
RUBY_PLATFORM =~ /linux/
end
module_function :linux?
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/version.rb | lib/memstat/version.rb | module Memstat
VERSION = '0.1.3'
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/cli.rb | lib/memstat/cli.rb | module Memstat
class Cli < Thor
# Aggregate information from /proc/[pid]/smaps
#
# pss - Roughly the amount of memory that is "really" being used by the pid
# swap - Amount of swap this process is currently using
#
desc 'smaps', 'Print useful information from /proc/[pid]/smaps'
def smaps(pid)
abort 'Error: unsupported OS' unless Memstat.linux?
result = Memstat::Proc::Smaps.new(:pid => pid)
puts result.print
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/oob_gc/unicorn.rb | lib/memstat/oob_gc/unicorn.rb | module Memstat
module OobGC
module Unicorn
def self.new(app, threshold = (1024**3))
self.const_set :OOBGC_THRESHOLD, threshold
app # pretend to be Rack middleware since it was in the past
end
def process_client(client)
super(client) # Unicorn::HttpServer#process_client
if Memstat.linux?
status = Memstat::Proc::Status.new(:pid => Process.pid)
if status.rss > OOBGC_THRESHOLD
GC.start
end
end
end
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/proc/smaps.rb | lib/memstat/proc/smaps.rb | module Memstat
module Proc
class Smaps < Base
FIELDS = %w[size rss pss shared_clean shared_dirty private_clean private_dirty swap]
attr_accessor *FIELDS
attr_accessor :lines, :items
def initialize(options = {})
super
@path ||= "/proc/#{@pid}/smaps"
FIELDS.each do |field|
send("#{field}=", 0)
end
run
end
def run
@lines = File.readlines(@path).map(&:strip)
@items = []
item = nil
@lines.each.with_index do |line, index|
case line
when /[0-9a-f]+:[0-9a-f]+\s+/
item = Item.new
@items << item
item.parse_first_line(line)
when /\w+:\s+/
item.parse_field_line(line)
else
raise Error.new("invalid format at line #{index + 1}: #{line}")
end
end
@items.each do |item|
FIELDS.each do |field|
send "#{field}=", (send(field) + item.send(field))
end
end
end
def print
@print ||= begin
lines = []
lines << "#{"Process:".ljust(20)} #{@pid || '[unspecified]'}"
lines << "#{"Command Line:".ljust(20)} #{command || '[unspecified]'}"
lines << "Memory Summary:"
FIELDS.each do |field|
lines << " #{field.ljust(20)} #{number_with_delimiter(send(field)/1024).rjust(12)} kB"
end
lines.join("\n")
end
end
def command
return unless pid?
commandline = File.read("/proc/#{@pid}/cmdline").split("\0")
if commandline.first =~ /java$/ then
loop { break if commandline.shift == "-jar" }
return "[java] #{commandline.shift}"
end
return commandline.join(' ')
end
def number_with_delimiter(n)
n.to_s.gsub(/(\d)(?=\d{3}+$)/, '\\1,')
end
#
# Memstat::Proc::Smaps::Item
#
class Item
attr_accessor *FIELDS
attr_reader :address_start
attr_reader :address_end
attr_reader :perms
attr_reader :offset
attr_reader :device_major
attr_reader :device_minor
attr_reader :inode
attr_reader :region
def initialize
FIELDS.each do |field|
send("#{field}=", 0)
end
end
def parse_first_line(line)
parts = line.strip.split
@address_start, @address_end = parts[0].split('-')
@perms = parts[1]
@offset = parts[2]
@device_major, @device_minor = parts[3].split(':')
@inode = parts[4]
@region = parts[5] || 'anonymous'
end
def parse_field_line(line)
parts = line.strip.split
field = parts[0].downcase.sub(':','')
return if field == 'vmflags'
value = Integer(parts[1]) * 1024
send("#{field}=", value) if respond_to? "#{field}="
end
end
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/proc/base.rb | lib/memstat/proc/base.rb | module Memstat
module Proc
class Base
attr_accessor :pid, :path
def initialize(options = {})
raise Error.new('path or pid must be given') unless options[:path] || options[:pid]
@pid = options[:pid]
@path = options[:path]
end
def pid?
!!@pid
end
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
kenn/memstat | https://github.com/kenn/memstat/blob/f6922c277b05e028cc05ad6aba60358d5cb27829/lib/memstat/proc/status.rb | lib/memstat/proc/status.rb | module Memstat
module Proc
class Status < Base
FIELDS = %w[peak size lck pin hwm rss data stk exe lib pte swap]
attr_accessor *FIELDS
def initialize(options = {})
super
@path ||= "/proc/#{@pid}/status"
run
end
def run
@lines = File.readlines(@path).map(&:strip)
@hash = {}
@lines.each do |line|
match = line.match(/(\w+):(.*)/)
key = match[1]
value = match[2]
@hash[key] = value
if match = key.match(/Vm(\w+)/)
field = match[1].downcase
if respond_to? "#{field}="
send("#{field}=", Integer(value.strip.split.first) * 1024)
end
end
end
end
end
end
end
| ruby | MIT | f6922c277b05e028cc05ad6aba60358d5cb27829 | 2026-01-04T17:51:02.738031Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb_spec.rb | spec/timescaledb_spec.rb | RSpec.describe Timescaledb do
it "has a version number" do
expect(Timescaledb::VERSION).not_to be nil
end
describe ".extension" do
describe ".installed?" do
it { expect(Timescaledb.extension.installed?).to be_truthy }
end
describe ".version" do
it { expect(Timescaledb.extension.version).not_to be_empty }
end
end
describe ".chunks" do
subject { Timescaledb.chunks }
context "when no data is inserted" do
it { is_expected.to be_empty }
end
context "when data is added" do
before do
Event.create identifier: "sign_up", payload: {"name" => "Eon"}
end
after do
destroy_all_chunks_for!(Event)
end
it { is_expected.not_to be_empty }
it { expect(Event.chunks).not_to be_empty }
it { expect(subject.first.hypertable_name).to eq('events') }
it { expect(subject.first.attributes).to eq(Event.chunks.first.attributes) }
end
end
describe ".hypertables" do
subject { Timescaledb.hypertables }
context "with default example from main setup" do
it { is_expected.not_to be_empty }
specify do
expect(subject.first.attributes)
.to eq(Event.hypertable.attributes)
end
end
end
describe ".default_hypertable_options" do
subject { Timescaledb.default_hypertable_options }
it { is_expected.to eq(Timescaledb::ActsAsHypertable::DEFAULT_OPTIONS) }
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/spec_helper.rb | spec/spec_helper.rb | require "bundler/setup"
require "pry"
require "rspec/its"
require "timescaledb"
require 'timescaledb/toolkit'
require "dotenv"
require "database_cleaner/active_record"
require "active_support/testing/time_helpers"
Dotenv.load! if File.exist?(".env")
ActiveSupport.on_load(:active_record_postgresqladapter) do
self.datetime_type = :timestamptz
end
ActiveRecord::Base.establish_connection(ENV['PG_URI_TEST'])
Timescaledb.establish_connection(ENV['PG_URI_TEST'])
require_relative "support/active_record/models"
require_relative "support/active_record/schema"
def destroy_all_chunks_for!(klass)
sql = <<-SQL
SELECT drop_chunks('#{klass.table_name}', '#{1.week.from_now}'::date)
SQL
ActiveRecord::Base.connection.execute(sql)
end
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.before(:suite) do
Time.zone = 'UTC'
end
config.expect_with :rspec do |c|
c.syntax = :expect
end
config.before(:each) do |example|
DatabaseCleaner.strategy = example.metadata.fetch(:database_cleaner_strategy, :transaction)
DatabaseCleaner.start
end
config.after(:each) do
retries = 3
begin
DatabaseCleaner.clean
rescue ActiveRecord::StatementInvalid => e
if e.message =~ /deadlock detected/ && (retries -= 1) > 0
sleep 0.1
retry
else
raise
end
end
end
config.include ActiveSupport::Testing::TimeHelpers
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/support/active_record/models.rb | spec/support/active_record/models.rb | ActiveSupport.on_load(:active_record) { extend Timescaledb::ActsAsHypertable }
class Event < ActiveRecord::Base
acts_as_hypertable
end
class HypertableWithNoOptions < ActiveRecord::Base
acts_as_hypertable
end
class HypertableWithOptions < ActiveRecord::Base
acts_as_hypertable time_column: :timestamp
end
class HypertableWithCustomTimeColumn < ActiveRecord::Base
self.table_name = "hypertable_with_custom_time_column"
acts_as_hypertable time_column: :timestamp
end
class HypertableSkipAllScopes < ActiveRecord::Base
self.table_name = "hypertable_skipping_all_scopes"
acts_as_hypertable time_column: :timestamp, skip_association_scopes: true, skip_default_scopes: true
end
class HypertableWithContinuousAggregates < ActiveRecord::Base
extend Timescaledb::ActsAsHypertable
include Timescaledb::ContinuousAggregatesHelper
acts_as_hypertable time_column: 'ts',
segment_by: :identifier,
value_column: "cast(payload->>'price' as float)"
scope :total, -> { select("count(*) as total") }
scope :by_identifier, -> { select("identifier, count(*) as total").group(:identifier) }
scope :by_version, -> { select("identifier, version, count(*) as total").group(:identifier, :version) }
scope :purchase, -> { where("identifier = 'purchase'") }
scope :purchase_stats, -> { select("stats_agg(#{value_column}) as stats_agg").purchase }
continuous_aggregates(
time_column: 'ts',
timeframes: [:minute, :hour, :day, :month],
scopes: [:total, :by_identifier, :by_version, :purchase_stats],
refresh_policy: {
minute: { start_offset: "10 minutes", end_offset: "1 minute", schedule_interval: "1 minute" },
hour: { start_offset: "4 hour", end_offset: "1 hour", schedule_interval: "1 hour" },
day: { start_offset: "3 day", end_offset: "1 day", schedule_interval: "1 hour" },
month: { start_offset: "3 month", end_offset: "1 hour", schedule_interval: "1 hour" }
}
)
descendants.each do |cagg|
cagg.hypertable_options = hypertable_options.merge(value_column: :total)
cagg.scope :stats, -> { select("average(stats_agg), stddev(stats_agg)") }
end
end
class NonHypertable < ActiveRecord::Base
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/support/active_record/schema.rb | spec/support/active_record/schema.rb |
def setup_tables
ActiveRecord::Schema.define(version: 1) do
hypertable_options = { chunk_time_interval: '1 min', compress_segmentby: 'identifier', compress_after: '7 days' }
create_table(:events, id: false, hypertable: hypertable_options) do |t|
t.string :identifier, null: false
t.jsonb :payload
t.datetime :created_at
end
create_table(:hypertable_with_options, id: false, hypertable: {
time_column: :ts,
chunk_time_interval: '5 min',
compress_segmentby: 'identifier',
compress_orderby: 'ts',
compress_after: '15 min',
drop_after: '1 hour',
if_not_exists: true
}) do |t|
t.serial :id, primary_key: false
t.datetime :ts
t.string :identifier
t.index [:id, :ts], name: "index_hypertable_with_options_on_id_and_ts"
end
create_table(:hypertable_with_id_partitioning, hypertable: {
time_column: 'id',
chunk_time_interval: 1_000_000
})
create_table(:non_hypertables) do |t|
t.string :name
end
end
end
def teardown_tables
ActiveRecord::Base.connection.tables.each do |table|
ActiveRecord::Base.connection.drop_table(table, force: :cascade)
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/schema_dumper_spec.rb | spec/timescaledb/schema_dumper_spec.rb | RSpec.describe Timescaledb::SchemaDumper, database_cleaner_strategy: :truncation do
let(:con) { ActiveRecord::Base.connection }
let(:query) do
Event.select("time_bucket('1m', created_at) as time,
identifier as label,
count(*) as value").group("1,2")
end
let(:query_daily) do
Event
.from("event_counts")
.select("time_bucket('1d', time) as time,
sum(value) as value").group("1")
end
context "schema" do
it "should include the timescaledb extension" do
dump = dump_output
expect(dump).to include 'enable_extension "timescaledb"'
expect(dump).to include 'enable_extension "timescaledb_toolkit"'
end
it "should skip internal schemas" do
dump = dump_output
expect(dump).not_to include 'create_schema "_timescaledb_cache"'
expect(dump).not_to include 'create_schema "_timescaledb_config"'
expect(dump).not_to include 'create_schema "_timescaledb_catalog"'
expect(dump).not_to include 'create_schema "_timescaledb_debug"'
expect(dump).not_to include 'create_schema "_timescaledb_functions"'
expect(dump).not_to include 'create_schema "_timescaledb_internal"'
expect(dump).not_to include 'create_schema "timescaledb_experimental"'
expect(dump).not_to include 'create_schema "timescaledb_information"'
expect(dump).not_to include 'create_schema "toolkit_experimental"'
end
end
context "hypertables" do
let(:sorted_hypertables) do
%w[events hypertable_with_options migration_tests]
end
it "dump the create_table sorted by hypertable_name" do
previous = 0
dump = dump_output
sorted_hypertables.each do |name|
index = dump.index(%|create_hypertable "#{name}"|)
if index.nil?
puts "couldn't find hypertable #{name} in the output", dump
end
expect(index).to be > previous
previous = index
end
end
context "with retention policies" do
before do
con.create_retention_policy("events", drop_after: "1 week")
end
after do
con.remove_retention_policy("events")
end
it "add retention policies after hypertables" do
dump = dump_output
last_hypertable = dump.index(%|create_hypertable "#{sorted_hypertables.last}"|)
index = dump.index(%|create_retention_policy "events", drop_after: "P7D"|)
expect(index).to be > last_hypertable
end
end
end
let(:dump_output) do
stream = StringIO.new
ActiveRecord::SchemaDumper.dump(con, stream)
stream.string
end
it "dumps a create_continuous_aggregate for a view in the database" do
con.execute("DROP MATERIALIZED VIEW IF EXISTS event_daily_counts")
con.execute("DROP MATERIALIZED VIEW IF EXISTS event_counts")
con.create_continuous_aggregate(:event_counts, query, materialized_only: true, finalized: true)
con.create_continuous_aggregate(:event_daily_counts, query_daily, materialized_only: true, finalized: true)
if defined?(Scenic)
Scenic.load # Normally this happens in a railtie, but we aren't loading a full rails env here
con.execute("DROP VIEW IF EXISTS searches")
con.create_view :searches, sql_definition: "SELECT 'needle'::text AS haystack"
end
dump = dump_output
expect(dump).to include 'create_continuous_aggregate("event_counts"'
expect(dump).to include 'materialized_only: true, finalized: true'
expect(dump).not_to include ', ,'
expect(dump).not_to include 'create_view "event_counts"' # Verify Scenic ignored this view
expect(dump).to include 'create_view "searches", sql_definition: <<-SQL' if defined?(Scenic)
hypertable_creation = dump.index('create_hypertable "events"')
caggs_creation = dump.index('create_continuous_aggregate("event_counts"')
expect(hypertable_creation).to be < caggs_creation
caggs_dependent_creation = dump.index('create_continuous_aggregate("event_daily_counts"')
expect(caggs_creation).to be < caggs_dependent_creation
end
describe "dumping hypertable options" do
before(:each) do
con.drop_table :partition_by_hash_tests, force: :cascade, if_exists: true
con.drop_table :partition_by_range_tests, force: :cascade, if_exists: true
con.drop_table :partition_by_integer_tests, force: :cascade, if_exists: true
end
it "extracts by_hash options" do
options = { partition_column: "category", number_partitions: 3, create_default_indexes: false }
con.create_table :partition_by_hash_tests, id: false, hypertable: options do |t|
t.string :category
t.datetime :created_at, default: -> { "now()" }
t.index [:category, :created_at], unique: true, name: "index_partition_by_hash_tests_on_category_and_created_at"
end
dump = dump_output
expect(dump).to include 'create_hypertable "partition_by_hash_tests", time_column: "created_at", chunk_time_interval: "7 days", partition_column: "category", number_partitions: 3, create_default_indexes: false'
end
it "extracts index options" do
options = { create_default_indexes: false }
con.create_table :partition_by_range_tests, id: false, hypertable: options do |t|
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "partition_by_range_tests", time_column: "created_at", chunk_time_interval: "7 days"'
end
it "extracts integer chunk_time_interval" do
options = { time_column: :id, chunk_time_interval: 10000 }
con.create_table :partition_by_integer_tests, hypertable: options do |t|
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "partition_by_integer_tests", time_column: "id", chunk_time_interval: 10000'
end
context "compress_segmentby" do
before(:each) do
con.drop_table :segmentby_tests, if_exists: true, force: :cascade
end
it "handles multiple compress_segmentby" do
options = { compress_segmentby: "identifier,second_identifier" }
con.create_table :segmentby_tests, hypertable: options, id: false do |t|
t.string :identifier
t.string :second_identifier
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "segmentby_tests", time_column: "created_at", chunk_time_interval: "7 days", compress_segmentby: "identifier, second_identifier", compress_orderby: "created_at ASC"'
end
end
context "compress_orderby" do
before(:each) do
con.drop_table :orderby_tests, if_exists: true, force: :cascade
end
context "ascending order" do
context "nulls first" do
it "extracts compress_orderby correctly" do
options = { compress_segmentby: "identifier", compress_orderby: "created_at ASC NULLS FIRST" }
con.create_table :orderby_tests, hypertable: options, id: false do |t|
t.string :identifier
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "orderby_tests", time_column: "created_at", chunk_time_interval: "7 days", compress_segmentby: "identifier", compress_orderby: "created_at ASC NULLS FIRST"'
end
end
context "nulls last" do
it "extracts compress_orderby correctly" do
options = { compress_segmentby: "identifier", compress_orderby: "created_at DESC NULLS LAST" }
con.create_table :orderby_tests, hypertable: options, id: false do |t|
t.string :identifier
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "orderby_tests", time_column: "created_at", chunk_time_interval: "7 days", compress_segmentby: "identifier", compress_orderby: "created_at DESC NULLS LAST"'
end
end
end
context "descending order" do
context "nulls first" do
it "extracts compress_orderby correctly" do
options = { compress_segmentby: "identifier", compress_orderby: "created_at DESC NULLS FIRST" }
con.create_table :orderby_tests, hypertable: options, id: false do |t|
t.string :identifier
t.timestamps
end
dump = dump_output
expect(dump).to include 'compress_orderby: "created_at DESC"'
end
end
context "nulls last" do
it "extracts compress_orderby correctly" do
options = { compress_segmentby: "identifier", compress_after: "1 month", compress_orderby: "created_at DESC NULLS LAST" }
con.create_table :orderby_tests, hypertable: options, id: false do |t|
t.string :identifier
t.timestamps
end
dump = dump_output
expect(dump).to include 'create_hypertable "orderby_tests", time_column: "created_at", chunk_time_interval: "7 days", compress_segmentby: "identifier", compress_orderby: "created_at DESC NULLS LAST", compress_after: "P1M"'
end
end
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/stats_spec.rb | spec/timescaledb/stats_spec.rb | RSpec.describe Timescaledb::Stats do
let(:hypertables) { Timescaledb::Hypertable.all }
subject(:stats) { described_class.new(hypertables) }
describe '.to_h' do
it 'returns expected structure' do
approximate_row_count = hypertables.each_with_object(Hash.new) do |hypertable, count|
name = [hypertable.hypertable_schema, hypertable.hypertable_name].join('.')
count[name] = a_kind_of(Integer)
end
expect(subject.to_h).to match(
a_hash_including(
continuous_aggregates: { total: a_kind_of(Integer) },
hypertables: {
approximate_row_count: approximate_row_count,
chunks: { compressed: a_kind_of(Integer), total: a_kind_of(Integer), uncompressed: a_kind_of(Integer) },
count: hypertables.count,
size: {
compressed: a_kind_of(String),
uncompressed: a_kind_of(String)
},
uncompressed_count: a_kind_of(Integer)
},
jobs_stats: {
failures: a_kind_of(Integer), runs: a_kind_of(Integer), success: a_kind_of(Integer)
}
)
)
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/migration_helper_spec.rb | spec/timescaledb/migration_helper_spec.rb | RSpec.describe Timescaledb::MigrationHelpers, database_cleaner_strategy: :truncation do
describe ".create_table" do
let(:con) { ActiveRecord::Base.connection }
before(:each) do
con.drop_table :migration_tests, if_exists: true, force: :cascade
end
subject(:create_table) do
con.create_table :migration_tests, hypertable: hypertable_options, id: false do |t|
t.string :identifier
t.jsonb :payload
t.timestamps
end
end
let(:hypertable_options) do
{
time_column: 'created_at',
chunk_time_interval: '1 min',
compress_segmentby: 'identifier',
compress_orderby: 'created_at',
compress_after: '7 days'
}
end
it 'call create_hypertable with params' do
expect(ActiveRecord::Base.connection)
.to receive(:create_hypertable)
.with(:migration_tests, hypertable_options)
.once
create_table
end
context 'with hypertable options' do
let(:hypertable) do
Timescaledb::Hypertable.find_by(hypertable_name: :migration_tests)
end
it 'enables compression' do
create_table
expect(hypertable.attributes).to include({
"compression_enabled"=>true,
"hypertable_name"=>"migration_tests",
"hypertable_schema" => "public",
"num_chunks" => 0,
"num_dimensions" => 1,
"tablespaces" => nil})
end
end
end
describe ".create_caggs" do
let(:con) { ActiveRecord::Base.connection }
before(:each) do
con.drop_table :ticks, if_exists: true, force: :cascade
con.create_table :ticks, hypertable: hypertable_options, id: false do |t|
t.string :symbol
t.decimal :price
t.integer :volume
t.timestamps
end
end
after(:each) do
con.drop_continuous_aggregates(:ohlc_1m)
end
let(:hypertable_options) do
{
time_column: 'created_at',
chunk_time_interval: '1 min',
compress_segmentby: 'symbol',
compress_orderby: 'created_at',
compress_after: '7 days'
}
end
let(:model) do
Tick = Class.new(ActiveRecord::Base) do
self.table_name = 'ticks'
self.primary_key = 'symbol'
acts_as_hypertable
end
end
let(:query) do
model.select("time_bucket('1m', created_at) as time,
symbol,
FIRST(price, created_at) as open,
MAX(price) as high,
MIN(price) as low,
LAST(price, created_at) as close,
SUM(volume) as volume").group("1,2")
end
let(:options) do
{with_data: true}
end
subject(:create_caggs) { con.create_continuous_aggregates('ohlc_1m', query, **options) }
specify do
expect do
create_caggs
end.to change { model.caggs.count }.from(0).to(1)
expect(model.caggs.first.jobs).to be_empty
end
context 'when using refresh policies' do
let(:options) do
{
with_data: false,
refresh_policies: {
start_offset: "INTERVAL '1 month'",
end_offset: "INTERVAL '1 minute'",
schedule_interval: "INTERVAL '1 minute'"
}
}
end
specify do
expect do
create_caggs
end.to change { model.caggs.count }.from(0).to(1)
expect(model.caggs.first.jobs).not_to be_empty
end
end
context 'when overriding WITH clauses' do
let(:options) do
{
materialized_only: true,
create_group_indexes: true,
finalized: true
}
end
before do
allow(ActiveRecord::Base.connection).to(receive(:execute).and_call_original)
end
specify do
expect do
create_caggs
end.to change { model.caggs.count }.from(0).to(1)
end
context 'when overriding WITH clause timescaledb.materialized_only' do
let(:options) do
{
materialized_only: true
}
end
specify do
create_caggs
expect(ActiveRecord::Base.connection).to have_received(:execute).with(include('timescaledb.materialized_only=true'))
end
end
context 'when overriding WITH clause timescaledb.create_group_indexes' do
let(:options) do
{
create_group_indexes: true
}
end
specify do
create_caggs
expect(ActiveRecord::Base.connection).to have_received(:execute).with(include('timescaledb.create_group_indexes=true'))
end
end
context 'when overriding WITH clause timescaledb.finalized' do
let(:options) do
{
finalized: true
}
end
specify do
create_caggs
expect(ActiveRecord::Base.connection).to have_received(:execute).with(include('timescaledb.finalized=true'))
end
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/connection_spec.rb | spec/timescaledb/connection_spec.rb | require 'ostruct'
RSpec.describe Timescaledb do
describe '.establish_connection' do
it 'returns a PG::Connection object' do
expect do
Timescaledb.establish_connection(ENV['PG_URI_TEST'])
end.to_not raise_error
end
end
describe ::Timescaledb::Connection do
subject(:connection) { Timescaledb::Connection.instance }
it 'returns a Connection object' do
is_expected.to be_a(Timescaledb::Connection)
end
it 'has fast access to the connection' do
expect(connection.send(:connection)).to be_a(PG::Connection)
end
describe '#connected?' do
it { expect(connection.connected?).to be_truthy }
end
describe '#query_first' do
let(:sql) { "select 1 as one" }
subject(:result) { connection.query_first(sql) }
it { expect(result).to be_a(OpenStruct) }
end
describe '#query' do
let(:sql) { "select 1 as one" }
subject(:result) { connection.query(sql) }
it { expect(result).to eq([OpenStruct.new({"one" => "1"})]) }
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/acts_as_hypertable_spec.rb | spec/timescaledb/acts_as_hypertable_spec.rb | RSpec.describe Timescaledb::ActsAsHypertable do
describe ".acts_as_hypertable?" do
context "when the model has not been declared as a hypertable" do
it "returns false" do
expect(NonHypertable.acts_as_hypertable?).to eq(false)
end
end
context "when the model has been declared as a hypertable" do
it "returns true" do
expect(HypertableWithOptions.acts_as_hypertable?).to eq(true)
end
end
end
describe "#define_association_scopes" do
context "when the model is a hypertable" do
it "defines the association scopes" do
expect(Event).to respond_to(:chunks)
expect(Event).to respond_to(:hypertable)
expect(Event).to respond_to(:jobs)
expect(Event).to respond_to(:job_stats)
expect(Event).to respond_to(:compression_settings)
expect(Event).to respond_to(:caggs)
end
end
context "when model skips association scopes" do
it "does not define the association scopes" do
expect(HypertableSkipAllScopes).not_to respond_to(:chunks)
expect(HypertableSkipAllScopes).not_to respond_to(:hypertable)
expect(HypertableSkipAllScopes).not_to respond_to(:jobs)
expect(HypertableSkipAllScopes).not_to respond_to(:job_stats)
expect(HypertableSkipAllScopes).not_to respond_to(:compression_settings)
expect(HypertableSkipAllScopes).not_to respond_to(:continuous_aggregates)
end
end
end
describe 'when model skips default scopes' do
context "when the model is a hypertable" do
it "defines the association scopes" do
expect(Event).to respond_to(:previous_month)
expect(Event).to respond_to(:previous_week)
end
end
it 'does not define the default scopes' do
expect(HypertableSkipAllScopes).not_to respond_to(:previous_month)
expect(HypertableSkipAllScopes).not_to respond_to(:previous_week)
expect(HypertableSkipAllScopes).not_to respond_to(:this_month)
expect(HypertableSkipAllScopes).not_to respond_to(:this_week)
expect(HypertableSkipAllScopes).not_to respond_to(:yesterday)
expect(HypertableSkipAllScopes).not_to respond_to(:today)
expect(HypertableSkipAllScopes).not_to respond_to(:last_hour)
end
end
describe ".hypertable_options" do
context "when non-default options are set" do
let(:model) { HypertableWithCustomTimeColumn }
it "uses the non-default options" do
expect(model.hypertable_options).not_to eq(Timescaledb.default_hypertable_options)
expect(model.hypertable_options[:time_column]).to eq(:timestamp)
end
end
context "when no options are set" do
let(:model) { HypertableWithNoOptions }
it "uses the default options" do
expect(model.hypertable_options).to eq(Timescaledb.default_hypertable_options)
end
end
end
describe ".hypertable" do
subject { Event.hypertable }
it "has compression enabled by default" do
is_expected.to be_compression_enabled
end
its(:num_dimensions) { is_expected.to eq(1) }
its(:tablespaces) { is_expected.to be_nil }
its(:hypertable_name) { is_expected.to eq(Event.table_name) }
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/toolkit_helper_spec.rb | spec/timescaledb/toolkit_helper_spec.rb | RSpec.describe Timescaledb::Toolkit::Helpers, database_cleaner_strategy: :truncation do
let(:con) { ActiveRecord::Base.connection }
let(:hypertable_options) do
{
time_column: 'ts',
chunk_time_interval: '1 day',
compress_segmentby: 'device_id',
compress_orderby: 'ts',
compress_after: '7 days'
}
end
describe "add_toolkit_to_search_path!" do
it "adds toolkit_experimental to search path" do
expect do
con.add_toolkit_to_search_path!
end.to change(con, :schema_search_path)
.from('"$user", public')
.to('"$user", public, toolkit_experimental')
end
end
describe "pipeline functions" do
before(:each) do
con.add_toolkit_to_search_path!
if con.table_exists?(:measurements)
# We need to truncate to avoid foreign key constraint errors + deadlocks
con.execute("TRUNCATE measurements CASCADE")
con.execute("DROP TABLE measurements CASCADE")
end
con.create_table :measurements, hypertable: hypertable_options, id: false do |t|
t.integer :device_id
t.decimal :val
t.timestamp :ts
end
end
let(:model) do
Measurement = Class.new(ActiveRecord::Base) do
extend Timescaledb::ActsAsHypertable
self.table_name = 'measurements'
self.primary_key = nil
acts_as_hypertable time_column: "ts",
segment_by: "device_id",
value_column: "val"
end
end
let(:yesterday) { 1.day.ago }
before do
[1,2,3].each_with_index do |v,i|
model.create(device_id: 1, ts: yesterday + i.hour, val: v)
end
end
describe "#volatility" do
let(:plain_volatility_query) do
model.select(<<~SQL).group("device_id")
device_id, timevector(ts, val) -> sort() -> delta() -> abs() -> sum() as volatility
SQL
end
it "works with plain sql"do
expect(plain_volatility_query.first.volatility).to eq(2)
end
it { expect(model.value_column).to eq("val") }
it { expect(model.time_column).to eq(:ts) }
context "with columns specified in the volatility scope" do
let(:query) do
model.volatility(segment_by: "device_id")
end
it "segment by the param in the volatility"do
expect(query.to_sql).to eq(plain_volatility_query.to_sql.tr("\n", ""))
end
end
context "without columns" do
let(:query) do
model.volatility
end
it "uses the default segment_by_column"do
expect(query.to_sql).to eq(plain_volatility_query.to_sql.tr("\n", ""))
end
end
context "several devices" do
before :each do
[1,2,3].each_with_index do |v,i|
model.create(device_id: 2, ts: yesterday + i.hour, val: v+i)
model.create(device_id: 3, ts: yesterday + i.hour, val: i * i)
end
end
# Dataset example now
## model.all.order(:device_id, :ts).map(&:attributes)
#=> [
# {"device_id"=>1, "val"=>1.0, "ts"=>...},
# {"device_id"=>1, "val"=>2.0, "ts"=>...},
# {"device_id"=>1, "val"=>3.0, "ts"=>...},
# {"device_id"=>2, "val"=>1.0, "ts"=>...},
# {"device_id"=>2, "val"=>3.0, "ts"=>...},
# {"device_id"=>2, "val"=>5.0, "ts"=>...},
# {"device_id"=>3, "val"=>0.0, "ts"=>...},
# {"device_id"=>3, "val"=>1.0, "ts"=>...},
# {"device_id"=>3, "val"=>4.0, "ts"=>...}]
let(:volatility_query_for_all) do
model.volatility(segment_by: nil)
end
let(:volatility_query_for_every_device) do
model.order("device_id")
.volatility(segment_by: "device_id")
end
specify do
expect(volatility_query_for_all.map(&:attributes)).to eq([
{"volatility"=>11.0}])
expect(volatility_query_for_every_device.map(&:attributes)).to eq([
{"device_id"=>1, "volatility"=>2.0},
{"device_id"=>2, "volatility"=>4.0},
{"device_id"=>3, "volatility"=>4.0}])
end
end
end
describe "interpolate and backfill" do
before do
model.create(device_id: 1, ts: yesterday + 4.hour, val: 5)
model.create(device_id: 1, ts: yesterday + 6.hour, val: 7)
model.create(device_id: 1, ts: yesterday + 8.hour, val: 9)
end
specify do
res = model.select(<<SQL).group("hour, device_id").order("hour")
time_bucket_gapfill('1 hour', ts,
now() - INTERVAL '24 hours',
now() - INTERVAL '16 hours') AS hour,
device_id,
avg(val) AS value,
interpolate(avg(val))
SQL
expect(res.map{|e|[e["value"]&.to_f,e["interpolate"]]}).to eq([
[1.0, 1.0],
[2.0, 2.0],
[3.0, 3.0],
[nil, 4.0],
[5.0, 5.0],
[nil, 6.0],
[7.0, 7.0],
[nil, 8.0],
[9.0, 9.0]])
end
end
describe "stats_aggs" do
let(:query) do
model.select(<<~SQL).group(1)
time_bucket('1 h'::interval, ts) as bucket,
stats_agg(val) as stats
SQL
end
let(:options) { { with_data: true } }
before(:each) { con.create_continuous_aggregates('measurements_stats', query, **options) }
after(:each) { con.drop_continuous_aggregates('measurements_stats') rescue nil }
let(:view) do
con.execute(<<~SQL)
SELECT
bucket,
average(rolling(stats) OVER (ORDER BY bucket RANGE '#{preceeding_range}' PRECEDING)),
stddev(rolling(stats) OVER (ORDER BY bucket RANGE '#{preceeding_range}' PRECEDING))
FROM measurements_stats;
SQL
end
context 'when one hour preceeding' do
let(:preceeding_range) { '1 hour' }
specify do
expect(view.map{|e|e["average"]}).to eq([1,1.5,2.5])
end
end
context 'when two hour preceeding' do
let(:preceeding_range) { '2 hours' }
specify do
expect(view.map{|e|e["average"]}).to eq([1,1.5,2.0])
end
end
end
end
describe 'lttb' do
before(:each) do
con.add_toolkit_to_search_path!
if con.table_exists?(:measurements)
con.execute("TRUNCATE measurements CASCADE")
con.execute("DROP TABLE measurements CASCADE")
end
con.create_table :measurements, hypertable: hypertable_options, id: false do |t|
t.integer :device_id
t.decimal :val
t.datetime :ts
end
end
let(:model) do
Measurement = Class.new(ActiveRecord::Base) do
extend Timescaledb::ActsAsHypertable
self.table_name = 'measurements'
self.primary_key = nil
acts_as_hypertable time_column: "ts",
segment_by: "device_id",
value_column: "val"
end
end
before do
[['2020-1-1', 10],
['2020-1-2', 21],
['2020-1-3', 19],
['2020-1-4', 32],
['2020-1-5', 12],
['2020-1-6', 14],
['2020-1-7', 18],
['2020-1-8', 29],
['2020-1-9', 23],
['2020-1-10', 27],
['2020-1-11', 14]].each do |row|
time= Time.mktime(*row[0].split('-'))
model.create(device_id: 1, ts: time, val: row[1])
end
end
context 'when segment_by is nil' do
it 'downsample as an array' do
downsampled = model.lttb(threshold: 5, segment_by: nil)
data = downsampled.map do |result|
time, value = result
[time.to_date.to_s, value.to_i]
end
expect(data.size).to eq(5)
expect(data).to eq([
["2020-01-01", 10],
["2020-01-04", 32],
["2020-01-05", 12],
["2020-01-08", 29],
["2020-01-11", 14]])
end
end
context 'when segment_by is a column' do
it 'downsample as a hash' do
downsampled = model.lttb(threshold: 5, segment_by: "device_id")
key = downsampled.keys.first
data = downsampled[key].map do |result|
time, value = result
[time.to_date.to_s, value.to_i]
end
expect(data.size).to eq(5)
expect(data).to eq([
["2020-01-01", 10],
["2020-01-04", 32],
["2020-01-05", 12],
["2020-01-08", 29],
["2020-01-11", 14]])
end
end
end
describe 'candlestick' do
before(:each) do
con.add_toolkit_to_search_path!
if con.table_exists?(:ticks)
con.execute("TRUNCATE ticks CASCADE")
con.execute("DROP TABLE ticks CASCADE")
end
con.create_table :ticks, hypertable: hypertable_options, id: false do |t|
t.text :symbol
t.decimal :price
t.decimal :volume
t.datetime :time
end
end
let(:hypertable_options) do
{
time_column: 'time',
chunk_time_interval: '1 month',
compress_segmentby: 'symbol',
compress_orderby: 'time'
}
end
let(:model) do
Tick = Class.new(ActiveRecord::Base) do
extend Timescaledb::ActsAsHypertable
self.table_name = 'ticks'
self.primary_key = nil
acts_as_hypertable time_column: "time",
segment_by: "symbol",
value_column: "price"
end
end
before do
[['2020-1-2', 10],
['2020-1-3', 13],
['2020-1-4', 9],
['2020-1-5', 12]].each do |row|
time= Time.utc(*row[0].split('-'))
model.create(time: time, price: row[1], symbol: "FIRST")
end
end
context "when call ohlc without segment_by" do
let(:ohlcs) do
model.where(symbol: "FIRST").candlestick(timeframe: '1w', segment_by: nil)
end
it "process open, high, low, close" do
expect(ohlcs.size).to eq(1)
ohlc = ohlcs.first.attributes
expect(ohlc.slice(*%w[open high low close]))
.to eq({"open"=>10.0, "high"=>13.0, "low"=>9.0, "close"=>12.0})
expect(ohlc.slice(*%w[open_time high_time low_time close_time]).transform_values(&:day))
.to eq({"open_time"=>2, "high_time"=>3, "low_time"=>4, "close_time"=>5})
end
end
context "when call ohlc wth segment_by symbol" do
before do
[['2020-1-2', 20],
['2020-1-3', 23],
['2020-1-4', 19],
['2020-1-5', 14]].each do |row|
time= Time.utc(*row[0].split('-'))
model.create(time: time, price: row[1], symbol: "SECOND")
end
end
let!(:ohlcs) do
model.candlestick(timeframe: '1w', segment_by: :symbol)
end
it "process open, high, low, close" do
expect(ohlcs.size).to eq(2)
data = ohlcs.group_by(&:symbol).transform_values{|v|v.first.attributes}
first = data["FIRST"]
second = data["SECOND"]
expect(first.slice(*%w[open high low close]))
.to eq({"open"=>10.0, "high"=>13.0, "low"=>9.0, "close"=>12.0})
expect(second.slice(*%w[open high low close]))
.to eq({"open"=>20.0, "high"=>23.0, "low"=>14.0, "close"=>14.0})
expect(first.slice(*%w[open_time high_time low_time close_time]).transform_values(&:day))
.to eq({"open_time"=>2, "high_time"=>3, "low_time"=>4, "close_time"=>5})
expect(second.slice(*%w[open_time high_time low_time close_time]).transform_values(&:day))
.to eq({"open_time"=>2, "high_time"=>3, "low_time"=>5, "close_time"=>5})
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/continuous_aggregates_helper_spec.rb | spec/timescaledb/continuous_aggregates_helper_spec.rb | require 'spec_helper'
RSpec.describe Timescaledb::ContinuousAggregatesHelper do
let(:test_class) do
HypertableWithContinuousAggregates
end
before(:all) do
ActiveRecord::Base.connection.instance_exec do
hypertable_options = {
time_column: 'ts',
chunk_time_interval: '1 day',
compress_segmentby: 'identifier, version',
compress_orderby: 'ts DESC',
}
create_table(:hypertable_with_continuous_aggregates, id: false, hypertable: hypertable_options) do |t|
t.datetime :ts, null: false
t.text :identifier, :version, null: false
t.jsonb :payload
end
end
end
after(:all) do
ActiveRecord::Base.connection.drop_table :hypertable_with_continuous_aggregates, if_exists: true, force: :cascade
Object.send(:remove_const, :HypertableWithContinuousAggregates) if Object.const_defined?(:HypertableWithContinuousAggregates)
end
describe '.continuous_aggregates' do
it 'defines aggregate classes' do
expect(test_class.const_defined?(:TotalPerMinute)).to be true
expect(test_class.const_defined?(:TotalPerHour)).to be true
expect(test_class.const_defined?(:TotalPerDay)).to be true
expect(test_class.const_defined?(:TotalPerMonth)).to be true
expect(test_class.const_defined?(:ByVersionPerMinute)).to be true
expect(test_class.const_defined?(:ByVersionPerHour)).to be true
expect(test_class.const_defined?(:ByVersionPerDay)).to be true
expect(test_class.const_defined?(:ByVersionPerMonth)).to be true
expect(test_class.const_defined?(:ByIdentifierPerMinute)).to be true
expect(test_class.const_defined?(:ByIdentifierPerHour)).to be true
expect(test_class.const_defined?(:ByIdentifierPerDay)).to be true
expect(test_class.const_defined?(:ByIdentifierPerMonth)).to be true
end
it 'sets up correct table names for aggregates' do
expect(test_class::TotalPerMinute.table_name).to eq('total_per_minute')
expect(test_class::TotalPerHour.table_name).to eq('total_per_hour')
expect(test_class::TotalPerDay.table_name).to eq('total_per_day')
expect(test_class::TotalPerMonth.table_name).to eq('total_per_month')
expect(test_class::ByVersionPerMinute.table_name).to eq('by_version_per_minute')
expect(test_class::ByVersionPerHour.table_name).to eq('by_version_per_hour')
expect(test_class::ByVersionPerDay.table_name).to eq('by_version_per_day')
expect(test_class::ByVersionPerMonth.table_name).to eq('by_version_per_month')
expect(test_class::ByIdentifierPerMinute.table_name).to eq('by_identifier_per_minute')
expect(test_class::ByIdentifierPerHour.table_name).to eq('by_identifier_per_hour')
expect(test_class::ByIdentifierPerDay.table_name).to eq('by_identifier_per_day')
expect(test_class::ByIdentifierPerMonth.table_name).to eq('by_identifier_per_month')
end
it 'setups up configuration for each aggregate' do
expected_config = {
scope_name: :total,
select: "count(*) as total",
where: nil,
group_by: [],
refresh_policy: {
minute: { start_offset: "10 minutes", end_offset: "1 minute", schedule_interval: "1 minute" },
hour: { start_offset: "4 hour", end_offset: "1 hour", schedule_interval: "1 hour" },
day: { start_offset: "3 day", end_offset: "1 day", schedule_interval: "1 hour" },
month: { start_offset: "3 month", end_offset: "1 hour", schedule_interval: "1 hour" }
}
}
base_query = test_class::TotalPerMinute.base_query
expect(base_query).to eq("SELECT time_bucket('1 minute', ts) as ts, count(*) as total FROM \"hypertable_with_continuous_aggregates\" GROUP BY time_bucket('1 minute', ts)")
expect(test_class::TotalPerMinute.config).to eq(expected_config)
end
it "sets the where clause for each aggregate" do
base_query = test_class::PurchaseStatsPerMinute.base_query
expect(base_query).to include("WHERE (identifier = 'purchase')")
end
it 'defines rollup scope for aggregates' do
test_class.create_continuous_aggregates
aggregate_classes = [test_class::TotalPerMinute, test_class::TotalPerHour, test_class::TotalPerDay, test_class::TotalPerMonth]
expect(test_class::TotalPerMinute.base_query).to eq("SELECT time_bucket('1 minute', ts) as ts, count(*) as total FROM \"hypertable_with_continuous_aggregates\" GROUP BY time_bucket('1 minute', ts)")
expect(test_class::TotalPerMonth.base_query).to eq("SELECT time_bucket('1 month', ts) as ts, sum(total) as total FROM \"total_per_day\" GROUP BY time_bucket('1 month', ts)")
expect(test_class::TotalPerDay.base_query).to eq("SELECT time_bucket('1 day', ts) as ts, sum(total) as total FROM \"total_per_hour\" GROUP BY time_bucket('1 day', ts)")
expect(test_class::TotalPerHour.base_query).to eq("SELECT time_bucket('1 hour', ts) as ts, sum(total) as total FROM \"total_per_minute\" GROUP BY time_bucket('1 hour', ts)")
expect(test_class::ByVersionPerMinute.base_query).to eq("SELECT time_bucket('1 minute', ts) as ts, identifier, version, count(*) as total FROM \"hypertable_with_continuous_aggregates\" GROUP BY time_bucket('1 minute', ts), identifier, version")
expect(test_class::ByVersionPerMonth.base_query).to eq("SELECT time_bucket('1 month', ts) as ts, identifier, version, sum(total) as total FROM \"by_version_per_day\" GROUP BY time_bucket('1 month', ts), identifier, version")
expect(test_class::ByVersionPerDay.base_query).to eq("SELECT time_bucket('1 day', ts) as ts, identifier, version, sum(total) as total FROM \"by_version_per_hour\" GROUP BY time_bucket('1 day', ts), identifier, version")
expect(test_class::ByVersionPerHour.base_query).to eq("SELECT time_bucket('1 hour', ts) as ts, identifier, version, sum(total) as total FROM \"by_version_per_minute\" GROUP BY time_bucket('1 hour', ts), identifier, version")
expect(test_class::ByIdentifierPerMinute.base_query).to eq("SELECT time_bucket('1 minute', ts) as ts, identifier, count(*) as total FROM \"hypertable_with_continuous_aggregates\" GROUP BY time_bucket('1 minute', ts), identifier")
expect(test_class::ByIdentifierPerMonth.base_query).to eq("SELECT time_bucket('1 month', ts) as ts, identifier, sum(total) as total FROM \"by_identifier_per_day\" GROUP BY time_bucket('1 month', ts), identifier")
expect(test_class::ByIdentifierPerDay.base_query).to eq("SELECT time_bucket('1 day', ts) as ts, identifier, sum(total) as total FROM \"by_identifier_per_hour\" GROUP BY time_bucket('1 day', ts), identifier")
expect(test_class::ByIdentifierPerHour.base_query).to eq("SELECT time_bucket('1 hour', ts) as ts, identifier, sum(total) as total FROM \"by_identifier_per_minute\" GROUP BY time_bucket('1 hour', ts), identifier")
expect(test_class::PurchaseStatsPerMinute.base_query).to eq("SELECT time_bucket('1 minute', ts) as ts, stats_agg(cast(payload->>'price' as float)) as stats_agg FROM \"hypertable_with_continuous_aggregates\" WHERE (identifier = 'purchase') GROUP BY time_bucket('1 minute', ts)")
expect(test_class::PurchaseStatsPerHour.base_query).to eq("SELECT time_bucket('1 hour', ts) as ts, rollup(stats_agg) as stats_agg FROM \"purchase_stats_per_minute\" GROUP BY time_bucket('1 hour', ts)")
expect(test_class::PurchaseStatsPerDay.base_query).to eq("SELECT time_bucket('1 day', ts) as ts, rollup(stats_agg) as stats_agg FROM \"purchase_stats_per_hour\" GROUP BY time_bucket('1 day', ts)")
expect(test_class::PurchaseStatsPerMonth.base_query).to eq("SELECT time_bucket('1 month', ts) as ts, rollup(stats_agg) as stats_agg FROM \"purchase_stats_per_day\" GROUP BY time_bucket('1 month', ts)")
end
end
describe '.create_continuous_aggregates' do
before do
allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
end
it 'creates materialized views for each aggregate' do
test_class.create_continuous_aggregates
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS total_per_minute/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS total_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS total_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS total_per_month/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS by_version_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS by_version_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS by_version_per_minute/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS by_identifier_per_month/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS purchase_stats_per_minute/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS purchase_stats_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS purchase_stats_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/CREATE MATERIALIZED VIEW IF NOT EXISTS purchase_stats_per_month/i)
end
it 'sets up refresh policies for each aggregate' do
test_class.create_continuous_aggregates
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*total_per_minute/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*total_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*total_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*by_version_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*by_identifier_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*by_identifier_per_month/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*by_version_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*by_version_per_month/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*purchase_stats_per_minute/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*purchase_stats_per_hour/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*purchase_stats_per_day/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/add_continuous_aggregate_policy.*purchase_stats_per_month/i)
end
end
describe 'refresh policies' do
it 'defines appropriate refresh policies for each timeframe' do
policies = {
minute: { start_offset: "10 minutes", end_offset: "1 minute", schedule_interval: "1 minute" },
hour: { start_offset: "4 hour", end_offset: "1 hour", schedule_interval: "1 hour" },
day: { start_offset: "3 day", end_offset: "1 day", schedule_interval: "1 hour" },
month: { start_offset: "3 month", end_offset: "1 hour", schedule_interval: "1 hour" }
}
policies.each do |timeframe, expected_policy|
%w[Total ByVersion ByIdentifier PurchaseStats].each do |klass|
actual_policy = test_class.const_get("#{klass}Per#{timeframe.to_s.capitalize}").refresh_policy
expect(actual_policy).to eq(expected_policy)
end
end
end
end
describe '.drop_continuous_aggregates' do
before do
allow(ActiveRecord::Base.connection).to receive(:execute).and_call_original
end
it 'drops all continuous aggregates' do
test_class.drop_continuous_aggregates
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS total_per_month CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS total_per_day CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS total_per_hour CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS total_per_minute CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_version_per_month CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_version_per_day CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_version_per_hour CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_version_per_minute CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_identifier_per_month CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_identifier_per_day CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_identifier_per_hour CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS by_identifier_per_minute CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS purchase_stats_per_month CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS purchase_stats_per_day CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS purchase_stats_per_hour CASCADE/i)
expect(ActiveRecord::Base.connection).to have_received(:execute).with(/DROP MATERIALIZED VIEW IF EXISTS purchase_stats_per_minute CASCADE/i)
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/stats/job_stats_spec.rb | spec/timescaledb/stats/job_stats_spec.rb | RSpec.describe Timescaledb::Stats::JobStats do
subject(:stats) { described_class.new }
describe '.to_h' do
it 'returns expected structure' do
expect(stats.to_h).to match(
a_hash_including(failures: a_kind_of(Integer), runs: a_kind_of(Integer), success: a_kind_of(Integer))
)
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/stats/hypertables_spec.rb | spec/timescaledb/stats/hypertables_spec.rb | RSpec.describe Timescaledb::Stats::Hypertables do
let(:hypertables) { Timescaledb.connection.query('SELECT * FROM timescaledb_information.hypertables') }
subject(:stats) { described_class.new(hypertables) }
describe '.to_h' do
it 'returns expected structure' do
approximate_row_count = hypertables.each_with_object(Hash.new) do |hypertable, count|
name = [hypertable.hypertable_schema, hypertable.hypertable_name].join('.')
count[name] = a_kind_of(Integer)
end
expect(stats.to_h).to match(
a_hash_including(
approximate_row_count: approximate_row_count,
chunks: { compressed: a_kind_of(Integer), total: a_kind_of(Integer), uncompressed: a_kind_of(Integer) },
count: hypertables.count,
size: {
compressed: a_kind_of(String),
uncompressed: a_kind_of(String)
},
uncompressed_count: a_kind_of(Integer)
)
)
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/stats/continuous_aggregates_spec.rb | spec/timescaledb/stats/continuous_aggregates_spec.rb | RSpec.describe Timescaledb::Stats::ContinuousAggregates do
subject(:stats) { described_class.new }
describe '.to_h' do
it 'returns expected structure' do
expect(stats.to_h).to match(a_hash_including(total: a_kind_of(Integer)))
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/stats/chunks_spec.rb | spec/timescaledb/stats/chunks_spec.rb | RSpec.describe Timescaledb::Stats::Chunks do
let(:hypertables) { Timescaledb.connection.query('SELECT * FROM timescaledb_information.hypertables') }
subject(:stats) { described_class.new(hypertables) }
describe '.to_h' do
it 'returns expected structure' do
expect(stats.to_h).to match(
a_hash_including(compressed: a_kind_of(Integer), total: a_kind_of(Integer), uncompressed: a_kind_of(Integer))
)
end
end
end | ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/database/quoting_spec.rb | spec/timescaledb/database/quoting_spec.rb | # frozen_string_literal: true
require 'spec_helper'
require 'timescaledb/database'
RSpec.describe Timescaledb::Database do
describe '.quote' do
it 'wraps given text between single quotes' do
expect(described_class.quote('events')).to eq("'events'")
end
context 'when including single quotes' do
it 'escapes those characters' do
expect(described_class.quote("event's")).to eq("'event''s'")
end
end
context 'when including backslashes' do
it 'escapes those characters' do
expect(described_class.quote("ev\\ents")).to eq("'ev\\\\ents'")
end
end
context 'when including a mix of single quote and backslash characters' do
it 'escapes all characters' do
expect(described_class.quote("ev\\ent's")).to eq("'ev\\\\ent''s'")
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/database/types_spec.rb | spec/timescaledb/database/types_spec.rb | # frozen_string_literal: true
require 'spec_helper'
require 'timescaledb/database'
RSpec.describe Timescaledb::Database do
describe '.interval_to_sql' do
context 'when passing nil' do
it 'returns NULL' do
expect(described_class.interval_to_sql(nil)).to eq('NULL')
end
end
context 'when passing an integer' do
it 'returns raw integer value' do
expect(described_class.interval_to_sql(60*60*24)).to eq(86400)
end
end
context 'when passing a string' do
it 'returns the interval SQL statement' do
expect(described_class.interval_to_sql('1 day')).to eq("INTERVAL '1 day'")
end
end
end
describe '.boolean_to_sql' do
context 'when passing true' do
it 'returns expected SQL value' do
expect(described_class.boolean_to_sql(true)).to eq("'TRUE'")
end
end
context 'when passing false' do
it 'returns expected SQL value' do
expect(described_class.boolean_to_sql(false)).to eq("'FALSE'")
end
end
context 'when passing nil' do
it 'returns expected SQL value' do
expect(described_class.boolean_to_sql(nil)).to eq("'FALSE'")
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/database/chunk_statements_spec.rb | spec/timescaledb/database/chunk_statements_spec.rb | # frozen_string_literal: true
require 'spec_helper'
require 'timescaledb/database'
RSpec.describe Timescaledb::Database do
describe '.compress_chunk_sql' do
it 'returns expected SQL' do
expect(
described_class.compress_chunk_sql('_timescaledb_internal._hyper_1_2_chunk')
).to eq("SELECT compress_chunk('_timescaledb_internal._hyper_1_2_chunk');")
end
end
describe '.decompress_chunk_sql' do
it 'returns expected SQL' do
expect(
described_class.decompress_chunk_sql('_timescaledb_internal._hyper_1_2_chunk')
).to eq("SELECT decompress_chunk('_timescaledb_internal._hyper_1_2_chunk');")
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/database/schema_statements_spec.rb | spec/timescaledb/database/schema_statements_spec.rb | # frozen_string_literal: true
require 'spec_helper'
require 'timescaledb/database'
RSpec.describe Timescaledb::Database do
describe '.create_hypertable_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.create_hypertable_sql('events', 'created_at')
).to eq("SELECT create_hypertable('events', 'created_at');")
end
end
context 'when passing both partitioning_column and number_partitions' do
it 'returns expected SQL' do
expect(
described_class.create_hypertable_sql('events', 'created_at', partitioning_column: 'category', number_partitions: 3)
).to eq("SELECT create_hypertable('events', 'created_at', 'category', 3);")
end
end
context 'when passing interval params' do
it 'returns expected SQL' do
expect(
described_class.create_hypertable_sql('events', 'created_at', chunk_time_interval: '1 week')
).to eq("SELECT create_hypertable('events', 'created_at', chunk_time_interval => INTERVAL '1 week');")
end
end
context 'when passing boolean params' do
it 'returns expected SQL' do
optional_params = { if_not_exists: true, create_default_indexes: true, migrate_data: false, distributed: false }
expect(
described_class.create_hypertable_sql('events', 'created_at', **optional_params)
).to eq("SELECT create_hypertable('events', 'created_at', if_not_exists => 'TRUE', create_default_indexes => 'TRUE', migrate_data => 'FALSE', distributed => 'FALSE');")
end
end
context 'when passing string params' do
it 'returns expected SQL' do
optional_params = {
partitioning_func: 'category_func',
associated_schema_name: '_timescaledb',
associated_table_prefix: '_hypertable',
time_partitioning_func: 'created_at_func'
}
expect(
described_class.create_hypertable_sql('events', 'created_at', **optional_params)
).to eq("SELECT create_hypertable('events', 'created_at', partitioning_func => 'category_func', associated_schema_name => '_timescaledb', associated_table_prefix => '_hypertable', time_partitioning_func => 'created_at_func');")
end
end
context 'when passing a mix of param types' do
it 'returns expected SQL' do
optional_params = {
if_not_exists: true,
partitioning_column: 'category',
number_partitions: 3,
partitioning_func: 'category_func',
distributed: false
}
expect(
described_class.create_hypertable_sql('events', 'created_at', **optional_params)
).to eq("SELECT create_hypertable('events', 'created_at', 'category', 3, if_not_exists => 'TRUE', partitioning_func => 'category_func', distributed => 'FALSE');")
end
end
end
describe '.enable_hypertable_compression_sql' do
context 'when passing only hypertable params' do
it 'returns expected SQL' do
expect(
described_class.enable_hypertable_compression_sql('events')
).to eq("ALTER TABLE events SET (timescaledb.compress);")
end
end
context 'when passing compress_orderby' do
it 'returns expected SQL' do
expect(
described_class.enable_hypertable_compression_sql('events', compress_orderby: 'timestamp DESC')
).to eq("ALTER TABLE events SET (timescaledb.compress, timescaledb.compress_orderby = 'timestamp DESC');")
end
end
context 'when passing compress_segmentby' do
it 'returns expected SQL' do
expect(
described_class.enable_hypertable_compression_sql('events', compress_segmentby: 'identifier')
).to eq("ALTER TABLE events SET (timescaledb.compress, timescaledb.compress_segmentby = 'identifier');")
end
end
context 'when passing all params' do
it 'returns expected SQL' do
expect(
described_class.enable_hypertable_compression_sql('events', compress_orderby: 'timestamp DESC', compress_segmentby: 'identifier')
).to eq("ALTER TABLE events SET (timescaledb.compress, timescaledb.compress_orderby = 'timestamp DESC', timescaledb.compress_segmentby = 'identifier');")
end
end
end
describe '.disable_hypertable_compression_sql' do
it 'returns expected SQL' do
expect(
described_class.disable_hypertable_compression_sql('events')
).to eq("ALTER TABLE events SET (timescaledb.compress = FALSE);")
end
end
describe '.add_compression_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.add_compression_policy_sql('events', '1 day')
).to eq("SELECT add_compression_policy('events', INTERVAL '1 day');")
end
end
context 'when passing initial_start param' do
it 'returns expected SQL' do
expect(
described_class.add_compression_policy_sql('events', '1 day', initial_start: '2023-01-01 10:00:00')
).to eq("SELECT add_compression_policy('events', INTERVAL '1 day', initial_start => '2023-01-01 10:00:00');")
end
end
context 'when passing timezone param' do
it 'returns expected SQL' do
expect(
described_class.add_compression_policy_sql('events', '1 day', timezone: 'America/Montevideo')
).to eq("SELECT add_compression_policy('events', INTERVAL '1 day', timezone => 'America/Montevideo');")
end
end
context 'when passing all params' do
it 'returns expected SQL' do
expect(
described_class.add_compression_policy_sql('events', '1 day', initial_start: '2023-01-01 10:00:00', timezone: 'America/Montevideo', if_not_exists: false)
).to eq("SELECT add_compression_policy('events', INTERVAL '1 day', initial_start => '2023-01-01 10:00:00', timezone => 'America/Montevideo', if_not_exists => 'FALSE');")
end
end
end
describe '.remove_compression_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.remove_compression_policy_sql('events')
).to eq("SELECT remove_compression_policy('events');")
end
end
context 'when passing if_exists param' do
it 'returns expected SQL' do
expect(
described_class.remove_compression_policy_sql('events', if_exists: true)
).to eq("SELECT remove_compression_policy('events', if_exists => 'TRUE');")
end
end
end
describe '.add_retention_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.add_retention_policy_sql('events', '1 day')
).to eq("SELECT add_retention_policy('events', INTERVAL '1 day');")
end
end
context 'when passing initial_start param' do
it 'returns expected SQL' do
expect(
described_class.add_retention_policy_sql('events', '1 day', initial_start: '2023-01-01 10:00:00')
).to eq("SELECT add_retention_policy('events', INTERVAL '1 day', initial_start => '2023-01-01 10:00:00');")
end
end
context 'when passing timezone param' do
it 'returns expected SQL' do
expect(
described_class.add_retention_policy_sql('events', '1 day', timezone: 'America/Montevideo')
).to eq("SELECT add_retention_policy('events', INTERVAL '1 day', timezone => 'America/Montevideo');")
end
end
context 'when passing all params' do
it 'returns expected SQL' do
expect(
described_class.add_retention_policy_sql('events', '1 day', initial_start: '2023-01-01 10:00:00', timezone: 'America/Montevideo', if_not_exists: false)
).to eq("SELECT add_retention_policy('events', INTERVAL '1 day', initial_start => '2023-01-01 10:00:00', timezone => 'America/Montevideo', if_not_exists => 'FALSE');")
end
end
end
describe '.remove_retention_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.remove_retention_policy_sql('events')
).to eq("SELECT remove_retention_policy('events');")
end
end
context 'when passing if_exists param' do
it 'returns expected SQL' do
expect(
described_class.remove_retention_policy_sql('events', if_exists: true)
).to eq("SELECT remove_retention_policy('events', if_exists => 'TRUE');")
end
end
end
describe '.add_reorder_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.add_reorder_policy_sql('events', 'index_name')
).to eq("SELECT add_reorder_policy('events', 'index_name');")
end
end
context 'when passing initial_start param' do
it 'returns expected SQL' do
expect(
described_class.add_reorder_policy_sql('events', 'index_name', initial_start: '2023-01-01 10:00:00')
).to eq("SELECT add_reorder_policy('events', 'index_name', initial_start => '2023-01-01 10:00:00');")
end
end
context 'when passing timezone param' do
it 'returns expected SQL' do
expect(
described_class.add_reorder_policy_sql('events', 'index_name', timezone: 'America/Montevideo')
).to eq("SELECT add_reorder_policy('events', 'index_name', timezone => 'America/Montevideo');")
end
end
context 'when passing all params' do
it 'returns expected SQL' do
expect(
described_class.add_reorder_policy_sql('events', 'index_name', initial_start: '2023-01-01 10:00:00', timezone: 'America/Montevideo', if_not_exists: false)
).to eq("SELECT add_reorder_policy('events', 'index_name', initial_start => '2023-01-01 10:00:00', timezone => 'America/Montevideo', if_not_exists => 'FALSE');")
end
end
end
describe '.remove_reorder_policy_sql' do
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.remove_reorder_policy_sql('events')
).to eq("SELECT remove_reorder_policy('events');")
end
end
context 'when passing if_exists param' do
it 'returns expected SQL' do
expect(
described_class.remove_reorder_policy_sql('events', if_exists: true)
).to eq("SELECT remove_reorder_policy('events', if_exists => 'TRUE');")
end
end
end
describe '.create_continuous_aggregate_sql' do
let(:sql) {
<<~SQL
SELECT time_bucket('1 day', created_at) bucket, COUNT(*)
FROM activity
GROUP BY bucket
SQL
}
context 'when passing only required params' do
it 'returns expected SQL' do
expected_sql = <<~SQL
CREATE MATERIALIZED VIEW activity_counts
WITH (timescaledb.continuous) AS
SELECT time_bucket('1 day', created_at) bucket, COUNT(*)
FROM activity
GROUP BY bucket
WITH DATA;
SQL
expect(
described_class.create_continuous_aggregate_sql('activity_counts', sql)
).to eq(expected_sql)
end
end
context 'when passing with_no_data param' do
it 'returns expected SQL' do
expected_sql = <<~SQL
CREATE MATERIALIZED VIEW activity_counts
WITH (timescaledb.continuous) AS
SELECT time_bucket('1 day', created_at) bucket, COUNT(*)
FROM activity
GROUP BY bucket
WITH NO DATA;
SQL
expect(
described_class.create_continuous_aggregate_sql('activity_counts', sql, with_no_data: true)
).to eq(expected_sql)
end
end
end
describe '.drop_continuous_aggregate_sql' do
it 'returns expected SQL' do
expect(
described_class.drop_continuous_aggregate_sql('activity_counts')
).to eq("DROP MATERIALIZED VIEW activity_counts;")
end
context 'when passing cascade true' do
it 'returns expected SQL' do
expect(
described_class.drop_continuous_aggregate_sql('activity_counts', cascade: true)
).to eq("DROP MATERIALIZED VIEW activity_counts CASCADE;")
end
end
end
describe '.add_continuous_aggregate_policy_sql' do
context 'when missing required params' do
it 'raises ArgumentError' do
expect {
described_class.add_continuous_aggregate_policy_sql('activity_counts')
}.to raise_error(ArgumentError, 'missing keyword: :schedule_interval')
end
end
context 'when passing only required params' do
it 'returns expected SQL' do
expect(
described_class.add_continuous_aggregate_policy_sql(
'activity_counts',
start_offset: '1 month',
end_offset: '1 day',
schedule_interval: '1 hour'
)
).to eq("SELECT add_continuous_aggregate_policy('activity_counts', start_offset => INTERVAL '1 month', end_offset => INTERVAL '1 day', schedule_interval => INTERVAL '1 hour');")
end
context 'having null offset values' do
it 'returns expected SQL' do
expect(
described_class.add_continuous_aggregate_policy_sql(
'activity_counts',
start_offset: nil,
end_offset: nil,
schedule_interval: '1 hour'
)
).to eq("SELECT add_continuous_aggregate_policy('activity_counts', start_offset => NULL, end_offset => NULL, schedule_interval => INTERVAL '1 hour');")
end
end
end
context 'when passing initial_start' do
it 'returns expected SQL' do
expect(
described_class.add_continuous_aggregate_policy_sql(
'activity_counts',
start_offset: nil,
end_offset: nil,
schedule_interval: '1 hour',
initial_start: "2023-02-08 20:00:00"
)
).to eq("SELECT add_continuous_aggregate_policy('activity_counts', start_offset => NULL, end_offset => NULL, schedule_interval => INTERVAL '1 hour', initial_start => '2023-02-08 20:00:00');")
end
end
context 'when passing timezone' do
it 'returns expected SQL' do
expect(
described_class.add_continuous_aggregate_policy_sql(
'activity_counts',
start_offset: nil,
end_offset: nil,
schedule_interval: '1 hour',
timezone: "America/Montevideo"
)
).to eq("SELECT add_continuous_aggregate_policy('activity_counts', start_offset => NULL, end_offset => NULL, schedule_interval => INTERVAL '1 hour', timezone => 'America/Montevideo');")
end
end
end
describe '.remove_continuous_aggregate_policy_sql' do
it 'returns expected SQL' do
expect(
described_class.remove_continuous_aggregate_policy_sql('activity_counts')
).to eq("SELECT remove_continuous_aggregate_policy('activity_counts');")
end
context 'when passing if_not_exists' do
it 'returns expected SQL' do
expect(
described_class.remove_continuous_aggregate_policy_sql('activity_counts', if_not_exists: true)
).to eq("SELECT remove_continuous_aggregate_policy('activity_counts', if_not_exists => 'TRUE');")
end
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/spec/timescaledb/database/hypertable_statements_spec.rb | spec/timescaledb/database/hypertable_statements_spec.rb | # frozen_string_literal: true
require 'spec_helper'
require 'timescaledb/database'
RSpec.describe Timescaledb::Database do
describe '.hypertable_size_sql' do
it 'returns expected SQL' do
expect(
described_class.hypertable_size_sql('events')
).to eq("SELECT hypertable_size('events');")
end
end
describe '.hypertable_detailed_size_sql' do
it 'returns expected SQL' do
expect(
described_class.hypertable_detailed_size_sql('events')
).to eq("SELECT * FROM hypertable_detailed_size('events');")
end
end
describe '.hypertable_index_size_sql' do
it 'returns expected SQL' do
expect(
described_class.hypertable_index_size_sql('second_index')
).to eq("SELECT hypertable_index_size('second_index');")
end
end
describe '.chunks_detailed_size_sql' do
it 'returns expected SQL' do
expect(
described_class.chunks_detailed_size_sql('events')
).to eq("SELECT * FROM chunks_detailed_size('events');")
end
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/ohlc.rb | examples/toolkit-demo/ohlc.rb | # ruby ohlc.rb postgres://user:pass@host:port/db_name
# @see https://timescale.github.io/timescaledb-ruby/toolkit_ohlc/
require 'bundler/inline' #require only what you need
gemfile(true) do
gem 'timescaledb', path: '../..'
gem 'pry'
end
ActiveRecord::Base.establish_connection ARGV.last
# Compare ohlc processing in Ruby vs SQL.
class Tick < ActiveRecord::Base
acts_as_hypertable time_column: "time",
segment_by: "symbol",
value_column: "price"
end
require "active_support/concern"
module Ohlc
extend ActiveSupport::Concern
included do
%w[open high low close].each do |name|
attribute name, :decimal
attribute "#{name}_time", :time
end
scope :attributes, -> do
select("symbol, time,
toolkit_experimental.open(ohlc),
toolkit_experimental.high(ohlc),
toolkit_experimental.low(ohlc),
toolkit_experimental.close(ohlc),
toolkit_experimental.open_time(ohlc),
toolkit_experimental.high_time(ohlc),
toolkit_experimental.low_time(ohlc),
toolkit_experimental.close_time(ohlc)")
end
scope :rollup, -> (timeframe: '1h') do
select("symbol, time_bucket('#{timeframe}', time) as time,
toolkit_experimental.rollup(ohlc) as ohlc")
.group(1,2)
end
def readonly?
true
end
end
class_methods do
end
end
class Ohlc1m < ActiveRecord::Base
self.table_name = 'ohlc_1m'
include Ohlc
end
class Ohlc1h < ActiveRecord::Base
self.table_name = 'ohlc_1h'
include Ohlc
end
class Ohlc1d < ActiveRecord::Base
self.table_name = 'ohlc_1d'
include Ohlc
end
ActiveRecord::Base.connection.add_toolkit_to_search_path!
ActiveRecord::Base.connection.instance_exec do
ActiveRecord::Base.logger = Logger.new(STDOUT)
unless Tick.table_exists?
hypertable_options = {
time_column: 'time',
chunk_time_interval: '1 week',
compress_segmentby: 'symbol',
compress_orderby: 'time',
compress_after: '1 month'
}
create_table :ticks, hypertable: hypertable_options, id: false do |t|
t.column :time , 'timestamp with time zone'
t.string :symbol
t.decimal :price
t.integer :volume
end
options = {
with_data: false,
refresh_policies: {
start_offset: "INTERVAL '1 month'",
end_offset: "INTERVAL '1 minute'",
schedule_interval: "INTERVAL '1 minute'"
}
}
create_continuous_aggregate('ohlc_1m', Tick._candlestick(timeframe: '1m'), **options)
execute "CREATE VIEW ohlc_1h AS #{ Ohlc1m.rollup(timeframe: '1 hour').to_sql}"
execute "CREATE VIEW ohlc_1d AS #{ Ohlc1h.rollup(timeframe: '1 day').to_sql}"
end
end
if Tick.count.zero?
ActiveRecord::Base.connection.execute(<<~SQL)
INSERT INTO ticks
SELECT time, 'SYMBOL', 1 + (random()*30)::int, 100*(random()*10)::int
FROM generate_series(TIMESTAMP '2022-01-01 00:00:00',
TIMESTAMP '2022-02-01 00:01:00',
INTERVAL '1 second') AS time;
SQL
end
# Fetch attributes
Ohlc1m.attributes
# Rollup demo
# Attributes from rollup
Ohlc1m.attributes.from(Ohlc1m.rollup(timeframe: '1 day'))
# Nesting several levels
Ohlc1m.attributes.from(
Ohlc1m.rollup(timeframe: '1 week').from(
Ohlc1m.rollup(timeframe: '1 day')
)
)
Ohlc1m.attributes.from(
Ohlc1m.rollup(timeframe: '1 month').from(
Ohlc1m.rollup(timeframe: '1 week').from(
Ohlc1m.rollup(timeframe: '1 day')
)
)
)
Pry.start
=begin
TODO: implement the ohlc_ruby
Benchmark.bm do |x|
x.report("ruby") { Tick.ohlc_ruby }
x.report("sql") { Tick.ohlc.map(&:attributes) }
end
=end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/compare_volatility.rb | examples/toolkit-demo/compare_volatility.rb | # ruby compare_volatility.rb postgres://user:pass@host:port/db_name
require 'bundler/inline' #require only what you need
gemfile(true) do
gem 'timescaledb', path: '../..'
gem 'pry'
end
# TODO: get the volatility using the window function with plain postgresql
ActiveRecord::Base.establish_connection ARGV.last
# Compare volatility processing in Ruby vs SQL.
class Measurement < ActiveRecord::Base
acts_as_hypertable time_column: "ts",
segment_by: "device_id",
value_column: "val"
scope :volatility_sql, -> do
select("device_id, timevector(#{time_column}, #{value_column}) -> sort() -> delta() -> abs() -> sum() as volatility")
.group("device_id")
end
scope :volatility_ruby, -> {
volatility = Hash.new(0)
previous = Hash.new
find_all do |measurement|
device_id = measurement.device_id
if previous[device_id]
delta = (measurement.val - previous[device_id]).abs
volatility[device_id] += delta
end
previous[device_id] = measurement.val
end
volatility
}
scope :values_from_devices, -> {
ordered_values = select(:val, :device_id).order(:ts)
Hash[
from(ordered_values)
.group(:device_id)
.pluck("device_id, array_agg(val)")
]
}
end
class Volatility
def self.process(values)
previous = nil
deltas = values.map do |value|
if previous
delta = (value - previous).abs
volatility = delta
end
previous = value
volatility
end
#deltas => [nil, 1, 1]
deltas.shift
volatility = deltas.sum
end
def self.process_values(map)
map.transform_values(&method(:process))
end
end
ActiveRecord::Base.connection.add_toolkit_to_search_path!
ActiveRecord::Base.connection.instance_exec do
ActiveRecord::Base.logger = Logger.new(STDOUT)
unless Measurement.table_exists?
hypertable_options = {
time_column: 'ts',
chunk_time_interval: '1 day',
}
create_table :measurements, hypertable: hypertable_options, id: false do |t|
t.integer :device_id
t.decimal :val
t.timestamp :ts
end
end
end
if Measurement.count.zero?
ActiveRecord::Base.connection.execute(<<~SQL)
INSERT INTO measurements (ts, device_id, val)
SELECT ts, device_id, random()*80
FROM generate_series(TIMESTAMP '2022-01-01 00:00:00',
TIMESTAMP '2022-02-01 00:00:00',
INTERVAL '5 minutes') AS g1(ts),
generate_series(0, 5) AS g2(device_id);
SQL
end
volatilities = nil
#ActiveRecord::Base.logger = nil
Benchmark.bm do |x|
x.report("sql") { Measurement.volatility_sql.map(&:attributes) }
x.report("ruby") { Measurement.volatility_ruby }
x.report("fetch") { volatilities = Measurement.values_from_devices }
x.report("process") { Volatility.process_values(volatilities) }
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/candlestick.rb | examples/toolkit-demo/candlestick.rb | # ruby candlestick.rb postgres://user:pass@host:port/db_name
# @see https://timescale.github.io/timescaledb-ruby/toolkit_candlestick/
require 'bundler/inline' #require only what you need
gemfile(true) do
gem 'timescaledb', path: '../..'
gem 'pry'
gem 'puma'
gem 'sinatra'
gem 'sinatra-contrib'
gem 'sinatra-reloader'
end
ActiveRecord::Base.establish_connection ARGV.first
def db(&block)
ActiveRecord::Base.logger = Logger.new(STDOUT)
ActiveRecord::Base.connection.instance_exec(&block)
ActiveRecord::Base.logger = nil
end
class Tick < ActiveRecord::Base
extend Timescaledb::ActsAsHypertable
include Timescaledb:: ContinuousAggregatesHelper
acts_as_hypertable time_column: "time",
segment_by: "symbol",
value_column: "price"
scope :plotly_candlestick, -> (from: nil) do
data = ohlcv.to_a
{
type: 'candlestick',
xaxis: 'x',
yaxis: 'y',
x: data.map(&:time),
open: data.map(&:open),
high: data.map(&:high),
low: data.map(&:low),
close: data.map(&:close),
volume: data.map(&:volume)
}
end
continuous_aggregates(
timeframes: [:minute, :hour, :day, :month],
scopes: [:_candlestick]
)
descendants.each do |cagg|
cagg.class_eval do
self.time_vector_options = time_vector_options.merge(value_column: :close)
[:open, :high, :low, :close].each do |attr|
attribute attr, :decimal, precision: 10, scale: 2
end
[:volume, :vwap].each do |attr|
attribute attr, :integer
end
[:open_time, :high_time, :low_time, :close_time].each do |attr|
attribute attr, :time
end
scope :ohlcv, -> do
unscoped
.from("(#{to_sql}) AS candlestick")
.select(time_column, *segment_by_column,
"open(candlestick),
high(candlestick),
low(candlestick),
close(candlestick),
open_time(candlestick),
high_time(candlestick),
low_time(candlestick),
close_time(candlestick),
volume(candlestick),
vwap(candlestick)")
end
end
end
end
db do
if true
#Tick.drop_continuous_aggregates
#drop_table :ticks, if_exists: true, force: :cascade
hypertable_options = {
time_column: "time",
chunk_time_interval: "1 day",
compress_segmentby: "symbol",
compress_orderby: "time",
compress_after: "1 week"
}
create_table :ticks, id: false, hypertable: hypertable_options, if_not_exists: true do |t|
t.timestamptz :time, null: false
t.string :symbol, null: false
t.decimal :price
t.integer :volume
end
add_index :ticks, [:time, :symbol], if_not_exists: true
end
execute(ActiveRecord::Base.sanitize_sql_for_conditions( [<<~SQL, {from: 1.week.ago.to_date, to: 1.day.from_now.to_date}]))
INSERT INTO ticks
SELECT time, 'SYMBOL', 1 + (random()*30)::int, 100*(random()*10)::int
FROM generate_series(TIMESTAMP :from,
TIMESTAMP :to,
INTERVAL '10 second') AS time;
SQL
Tick.create_continuous_aggregates
Tick.refresh_aggregates
end
if ARGV.include?("--pry")
Pry.start
return
end
require 'sinatra/base'
require "sinatra/json"
class App < Sinatra::Base
register Sinatra::Reloader
get '/candlestick.js' do
send_file 'candlestick.js'
end
get '/daily_close_price' do
json({
title: "Daily",
data: Tick::CandlestickPerDay.previous_week.plotly_candlestick
})
end
get '/candlestick_1m' do
json({
title: "Candlestick 1 minute last hour",
data: Tick::CandlestickPerMinute.last_hour.plotly_candlestick
})
end
get '/candlestick_1h' do
json({
title: "Candlestick yesterday hourly",
data:Tick::CandlestickPerHour.yesterday.plotly_candlestick
})
end
get '/' do
<<~HTML
<head>
<script src="https://cdn.jsdelivr.net/npm/jquery@3.6.1/dist/jquery.min.js"></script>
<script src='https://cdn.plot.ly/plotly-2.17.1.min.js'></script>
<script src='/candlestick.js'></script>
</head>
<body>
<div id='charts'>
</body>
HTML
end
end
App.run!
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/lttb-zoom/lttb_zoomable.rb | examples/toolkit-demo/lttb-zoom/lttb_zoomable.rb | # ruby lttb_zoomable.rb postgres://user:pass@host:port/db_name
require 'bundler/inline' #require only what you need
gemfile(true) do
gem 'timescaledb', path: '../../..'
gem 'pry'
gem 'sinatra', require: false
gem 'sinatra-reloader'
gem 'sinatra-cross_origin'
gem 'puma'
end
require 'timescaledb/toolkit'
require 'sinatra'
require 'sinatra/json'
require 'sinatra/contrib'
register Sinatra::Reloader
register Sinatra::Contrib
PG_URI = ARGV.last
VALID_SIZES = %i[small med big]
def download_weather_dataset size: :small
unless VALID_SIZES.include?(size)
fail "Invalid size: #{size}. Valids are #{VALID_SIZES}"
end
url = "https://assets.timescale.com/docs/downloads/weather_#{size}.tar.gz"
puts "fetching #{size} weather dataset..."
system "wget \"#{url}\""
puts "done!"
end
def setup size: :small
file = "weather_#{size}.tar.gz"
download_weather_dataset(size: size) unless File.exists? file
puts "extracting #{file}"
system "tar -xvzf #{file} "
puts "creating data structures"
system "psql #{PG_URI} < weather.sql"
system %|psql #{PG_URI} -c "\\COPY conditions FROM weather_#{size}_conditions.csv CSV"|
system %|psql #{PG_URI} -c "\\COPY locations FROM weather_#{size}_locations.csv CSV"|
end
ActiveRecord::Base.establish_connection(PG_URI)
class Condition < ActiveRecord::Base
extend Timescaledb::ActsAsHypertable
extend Timescaledb::ActsAsTimeVector
acts_as_hypertable time_column: "time",
segment_by: "device_id",
value_column: "temperature"
end
# Setup Hypertable as in a migration
ActiveRecord::Base.connection.instance_exec do
ActiveRecord::Base.logger = Logger.new(STDOUT)
if !Condition.table_exists? || Condition.count.zero?
setup size: :big
binding.pry
end
end
def filter_by_request_params
filter= {device_id: "weather-pro-000001"}
if params[:filter] && params[:filter] != "null"
from, to = params[:filter].split(",").map(&Time.method(:parse))
filter[:time] = from..to
end
filter
end
def conditions
Condition.where(filter_by_request_params).order('time')
end
def threshold
params[:threshold]&.to_i || 50
end
configure do
enable :cross_origin
end
get '/' do
erb :index
end
get "/lttb_sql" do
downsampled = conditions.lttb(threshold: threshold, segment_by: nil)
json downsampled
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/lttb/lttb_sinatra.rb | examples/toolkit-demo/lttb/lttb_sinatra.rb | # ruby lttb.rb postgres://user:pass@host:port/db_name
require 'bundler/inline' #require only what you need
gemfile(true) do
gem 'timescaledb', path: '../../..'
gem 'pry'
gem 'sinatra', require: false
gem 'sinatra-reloader', require: false
gem 'sinatra-cross_origin', require: false
gem 'chartkick'
gem 'puma'
end
require 'timescaledb/toolkit'
require 'sinatra'
require 'sinatra/json'
require 'sinatra/cross_origin'
require 'chartkick'
require_relative 'lttb'
PG_URI = ARGV.last
VALID_SIZES = %i[small med big]
def download_weather_dataset size: :small
unless VALID_SIZES.include?(size)
fail "Invalid size: #{size}. Valids are #{VALID_SIZES}"
end
url = "https://timescaledata.blob.core.windows.net/datasets/weather_#{size}.tar.gz"
puts "fetching #{size} weather dataset..."
system "wget \"#{url}\""
puts "done!"
end
def setup size: :small
file = "weather_#{size}.tar.gz"
download_weather_dataset(size: size) unless File.exists? file
puts "extracting #{file}"
system "tar -xvzf #{file} "
puts "creating data structures"
system "psql #{PG_URI} < weather.sql"
system %|psql #{PG_URI} -c "\\COPY locations FROM weather_#{size}_locations.csv CSV"|
system %|psql #{PG_URI} -c "\\COPY conditions FROM weather_#{size}_conditions.csv CSV"|
end
ActiveRecord::Base.establish_connection(PG_URI)
class Location < ActiveRecord::Base
self.primary_key = "device_id"
has_many :conditions, foreign_key: "device_id"
end
class Condition < ActiveRecord::Base
acts_as_hypertable time_column: "time",
segment_by: "device_id",
value_column: "temperature"
belongs_to :location, foreign_key: "device_id"
end
# Setup Hypertable as in a migration
ActiveRecord::Base.connection.instance_exec do
ActiveRecord::Base.logger = Logger.new(STDOUT)
unless Condition.table_exists?
setup size: :big
end
end
require 'sinatra/reloader'
require 'sinatra/contrib'
register Sinatra::Reloader
register Sinatra::Contrib
include Chartkick::Helper
set :bind, '0.0.0.0'
set :port, 9999
def conditions
device_ids = (1..9).map{|i|"weather-pro-00000#{i}"}
Condition
.where(device_id: device_ids.first)
.order('time')
end
def threshold
params[:threshold]&.to_i || 50
end
configure do
enable :cross_origin
end
before do
response.headers['Access-Control-Allow-Origin'] = '*'
end
# routes...
options "*" do
response.headers["Allow"] = "GET, PUT, POST, DELETE, OPTIONS"
response.headers["Access-Control-Allow-Headers"] = "Authorization,
Content-Type, Accept, X-User-Email, X-Auth-Token"
response.headers["Access-Control-Allow-Origin"] = "*"
200
end
get '/' do
headers 'Access-Control-Allow-Origin' => 'https://cdn.jsdelivr.net/'
erb :index
end
get '/lttb_ruby' do
payload = conditions
.pluck(:device_id, :time, :temperature)
.group_by(&:first)
.map do |device_id, data|
data.each(&:shift)
{
name: device_id,
data: Lttb.downsample(data, threshold)
}
end
json payload
end
get "/lttb_sql" do
downsampled = conditions
.lttb(threshold: threshold)
.map do |device_id, data|
{
name: device_id,
data: data.sort_by(&:first)
}
end
json downsampled
end
get '/all_data' do
data = conditions.pluck(:time, :temperature)
json [ { name: "All data", data: data} ]
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/lttb/lttb.rb | examples/toolkit-demo/lttb/lttb.rb | module Triangle
module_function
def area(a, b, c)
(ax, ay),(bx,by),(cx,cy) = a,b,c
(
(ax - cx).to_f * (by - ay) -
(ax - bx).to_f * (cy - ay)
).abs * 0.5
end
end
class Lttb
class << self
def avg(array)
array.sum.to_f / array.size
end
def downsample(data, threshold)
new(data, threshold).downsample
end
end
attr_reader :data, :threshold
def initialize(data, threshold)
fail 'data is not an array' unless data.is_a? Array
fail "threshold should be >= 2. It's #{threshold}." if threshold < 2
@data = data
@threshold = threshold
end
def downsample
case @data.first.first
when Time, DateTime, Date
transformed_dates = true
dates_to_numbers()
end
process.tap do |downsampled|
numbers_to_dates(downsampled) if transformed_dates
end
end
private
def process
return data if threshold >= data.size || threshold == 0
sampled = [data.first, data.last] # Keep first and last point. append in the middle.
point_index = 0
(threshold - 2).times do |i|
step = [((i+1.0) * bucket_size).to_i, data.size].min
next_point = (i * bucket_size).to_i + 1
break if next_point > data.size - 2
points = data[step, slice]
avg_x = Lttb.avg(points.map(&:first)).to_i
avg_y = Lttb.avg(points.map(&:last))
max_area = -1.0
(next_point...(step + 1)).each do |idx|
area = Triangle.area(data[point_index], data[idx], [avg_x, avg_y])
if area > max_area
max_area = area
next_point = idx
end
end
sampled.insert(-2, data[next_point])
point_index = next_point
end
sampled
end
def bucket_size
@bucket_size ||= ((data.size - 2.0) / (threshold - 2.0))
end
def slice
@slice ||= bucket_size.to_i
end
def dates_to_numbers
@start_date = data[0][0].dup
data.each{|d| d[0] = d[0] - @start_date }
end
def numbers_to_dates(downsampled)
downsampled.each{|d| d[0] = @start_date + d[0]}
end
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/toolkit-demo/lttb/lttb_test.rb | examples/toolkit-demo/lttb/lttb_test.rb | require_relative 'lttb'
require 'pp'
require 'date'
data = [
['2020-1-1', 10],
['2020-1-2', 21],
['2020-1-3', 19],
['2020-1-4', 32],
['2020-1-5', 12],
['2020-1-6', 14],
['2020-1-7', 18],
['2020-1-8', 29],
['2020-1-9', 23],
['2020-1-10', 27],
['2020-1-11', 14]]
data.each do |e|
e[0] = Time.mktime(*e[0].split('-'))
end
pp Lttb.downsample(data, 5)
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/app/jobs/application_job.rb | examples/ranking/app/jobs/application_job.rb | class ApplicationJob < ActiveJob::Base
# Automatically retry jobs that encountered a deadlock
# retry_on ActiveRecord::Deadlocked
# Most jobs are safe to ignore if the underlying records are no longer available
# discard_on ActiveJob::DeserializationError
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/app/controllers/application_controller.rb | examples/ranking/app/controllers/application_controller.rb | class ApplicationController < ActionController::API
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/app/models/game.rb | examples/ranking/app/models/game.rb | class Game < ApplicationRecord
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/app/models/play.rb | examples/ranking/app/models/play.rb | class Play < ApplicationRecord
belongs_to :game
self.primary_key = "created_at"
acts_as_hypertable
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/app/models/application_record.rb | examples/ranking/app/models/application_record.rb | class ApplicationRecord < ActiveRecord::Base
primary_abstract_class
end
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
timescale/timescaledb-ruby | https://github.com/timescale/timescaledb-ruby/blob/e15cabd97d1b0901abedea39c4443386b305f184/examples/ranking/db/seeds.rb | examples/ranking/db/seeds.rb | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the bin/rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: "Star Wars" }, { name: "Lord of the Rings" }])
# Character.create(name: "Luke", movie: movies.first)
| ruby | MIT | e15cabd97d1b0901abedea39c4443386b305f184 | 2026-01-04T17:50:22.541182Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.