repo stringlengths 5 92 | file_url stringlengths 80 287 | file_path stringlengths 5 197 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:37:27 2026-01-04 17:58:21 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/forecasting_test.rb | test/forecasting_test.rb | require_relative "test_helper"
class ForecastingTest < ActionDispatch::IntegrationTest
def setup
Blazer::Query.delete_all
end
def test_prophet
skip unless ENV["TEST_PROPHET"]
assert_forecast("prophet")
end
def test_trend
skip unless ENV["TEST_TREND"]
assert_forecast("trend")
end
def assert_forecast(forecasting)
skip unless postgresql?
with_option(:forecasting, forecasting) do
query = create_query(statement: "SELECT current_date + n AS day, n FROM generate_series(1, 30) n")
run_query query.statement, query_id: query.id, forecast: "t"
assert_match %{"name":"forecast"}, response.body
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/queries_test.rb | test/queries_test.rb | require_relative "test_helper"
class QueriesTest < ActionDispatch::IntegrationTest
def setup
Blazer::Audit.delete_all
Blazer::Query.delete_all
end
def test_index
get blazer.root_path
assert_response :success
end
def test_create
post blazer.queries_path, params: {query: {name: "Test", statement: "SELECT 1", data_source: "main"}}
assert_response :redirect
query = Blazer::Query.last
get blazer.query_path(query)
assert_response :success
post blazer.run_queries_path, params: {statement: query.statement, data_source: query.data_source, query_id: query.id}, xhr: true
assert_response :success
audit = Blazer::Audit.last
assert_equal query.id, audit.query_id
assert_equal query.statement, audit.statement
assert_equal query.data_source, audit.data_source
end
def test_create_error
post blazer.queries_path, params: {query: {name: "Test", statement: "", data_source: "main"}}
assert_response :unprocessable_entity
assert_match(/Statement can('|’)t be blank/, response.body)
end
def test_destroy
query = create_query
delete blazer.query_path(query)
assert_response :redirect
end
def test_rollback
create_query
run_query "DELETE FROM blazer_queries"
assert_equal 1, Blazer::Query.count
end
def test_tables
get blazer.tables_queries_path(data_source: "main")
assert_response :success
tables = JSON.parse(response.body)
tables = tables.map { |v| v["table"] } if postgresql?
assert_includes tables, "blazer_queries"
end
def test_schema
get blazer.schema_queries_path(data_source: "main")
assert_response :success
end
def test_docs
get blazer.docs_queries_path(data_source: "main")
assert_response :success
end
def test_refresh
query = create_query
post blazer.refresh_query_path(query)
assert_response :redirect
end
def test_variables_time
query = create_query(statement: "SELECT {created_at}")
get blazer.query_path(query)
assert_response :success
assert_match "singleDatePicker", response.body
end
def test_variables_time_range
query = create_query(statement: "SELECT {start_time}, {end_time}")
get blazer.query_path(query)
assert_response :success
assert_match "daterangepicker", response.body
end
def test_variable_defaults
query = create_query(statement: "SELECT {default_var}")
get blazer.query_path(query)
assert_response :success
assert_match %{value="default_value"}, response.body
end
def test_variables_id
query = create_query(statement: "SELECT {id}")
get blazer.query_path(query), params: {id: 123}
assert_response :success
assert_match %!"variables":{"id":"123"}!, response.body
end
def test_variables_zero
query = create_query(statement: "SELECT {id}")
get blazer.query_path(query), params: {id: "0"}
assert_response :success
assert_match "SELECT 0", response.body
end
def test_variables_leading_zeros
query = create_query(statement: "SELECT {id}")
get blazer.query_path(query), params: {id: "0123"}
assert_response :success
assert_match "SELECT '0123'", response.body
end
def test_smart_variables
query = create_query(statement: "SELECT {period}")
get blazer.query_path(query)
assert_response :success
assert_match "day", response.body
assert_match "week", response.body
assert_match "month", response.body
end
def test_linked_columns
run_query "SELECT 123 AS user_id"
assert_match "/admin/users/123", response.body
end
def test_smart_columns
run_query "SELECT 0 AS status"
assert_match "Active", response.body
end
def test_csv
run_query("SELECT 1 AS id, 'Chicago' AS city", format: "csv")
assert_equal "id,city\n1,Chicago\n", response.body
assert_equal "attachment; filename=\"query.csv\"; filename*=UTF-8''query.csv", response.headers["Content-Disposition"]
assert_equal "text/csv; charset=utf-8", response.headers["Content-Type"]
end
def test_csv_query
query = create_query(name: "All Cities", statement: "SELECT 1 AS id, 'Chicago' AS city")
run_query(query.statement, format: "csv", query_id: query.id)
assert_equal "id,city\n1,Chicago\n", response.body
assert_equal "attachment; filename=\"all-cities.csv\"; filename*=UTF-8''all-cities.csv", response.headers["Content-Disposition"]
assert_equal "text/csv; charset=utf-8", response.headers["Content-Type"]
end
def test_csv_query_variables
query = create_query(name: "Cities", statement: "SELECT 1 AS id, {name} AS city")
run_query(query.statement, format: "csv", query_id: query.id, variables: {name: "Chicago"})
assert_equal "id,city\n1,Chicago\n", response.body
assert_equal "attachment; filename=\"cities.csv\"; filename*=UTF-8''cities.csv", response.headers["Content-Disposition"]
assert_equal "text/csv; charset=utf-8", response.headers["Content-Type"]
end
def test_url
run_query "SELECT 'http://localhost:3000/'"
assert_match %{<a target="_blank" href="http://localhost:3000/">http://localhost:3000/</a>}, response.body
end
def test_images_default
run_query("SELECT 'http://localhost:3000/image.png'")
refute_match %{<img referrerpolicy="no-referrer" src="http://localhost:3000/image.png" />}, response.body
end
def test_images
with_option(:images, true) do
run_query("SELECT 'http://localhost:3000/image.png'")
assert_match %{<img referrerpolicy="no-referrer" src="http://localhost:3000/image.png" }, response.body
end
end
def test_async
with_option(:async, true) do
perform_enqueued_jobs do
run_query "SELECT 123"
end
assert_match "123", response.body
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/checks_test.rb | test/checks_test.rb | require_relative "test_helper"
class ChecksTest < ActionDispatch::IntegrationTest
def setup
Blazer::Check.delete_all
Blazer::Query.delete_all
end
def test_index
get blazer.checks_path
assert_response :success
end
def test_bad_data
query = create_query
check = create_check(query: query, check_type: "bad_data")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "failing", check.state
query.update!(statement: "SELECT 1 LIMIT 0")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "passing", check.state
end
def test_missing_data
query = create_query
check = create_check(query: query, check_type: "missing_data")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "passing", check.state
query.update!(statement: "SELECT 1 LIMIT 0")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "failing", check.state
end
def test_error
query = create_query(statement: "invalid")
check = create_check(query: query, check_type: "bad_data")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "error", check.state
end
def test_emails
query = create_query
create_check(query: query, check_type: "bad_data", emails: "hi@example.org,hi2@example.org")
assert_emails 0 do
Blazer.send_failing_checks
end
assert_emails 1 do
Blazer.run_checks(schedule: "5 minutes")
end
assert_emails 2 do
Blazer.send_failing_checks
end
end
def test_slack
query = create_query
create_check(query: query, check_type: "bad_data", slack_channels: "#general,#random")
assert_slack_messages 0 do
Blazer.send_failing_checks
end
assert_slack_messages 2 do
Blazer.run_checks(schedule: "5 minutes")
end
assert_slack_messages 2 do
Blazer.send_failing_checks
end
end
def assert_slack_messages(expected)
count = 0
Blazer::SlackNotifier.stub :post_api, ->(*) { count += 1 } do
yield
end
assert_equal expected, count
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/test_helper.rb | test/test_helper.rb | require "bundler/setup"
require "combustion"
Bundler.require(:default)
require "minitest/autorun"
logger = ActiveSupport::Logger.new(ENV["VERBOSE"] ? STDERR : nil)
Combustion.path = "test/internal"
Combustion.initialize! :active_record, :action_controller, :action_mailer, :active_job do
config.load_defaults Rails::VERSION::STRING.to_f
config.action_controller.logger = logger
config.action_mailer.logger = logger
config.active_job.logger = logger
config.active_record.logger = logger
config.cache_store = :memory_store
# fixes warning with adapter tests
config.action_dispatch.show_exceptions = :none
end
Rails.cache.logger = logger
class ActionDispatch::IntegrationTest
def run_query(statement, format: nil, **params)
post blazer.run_queries_path(format: format), params: {statement: statement, data_source: "main"}.merge(params), xhr: true
assert_response :success
end
def create_query(statement: "SELECT 1", **attributes)
Blazer::Query.create!(statement: statement, data_source: "main", status: "active", **attributes)
end
def create_check(**attributes)
Blazer::Check.create!(schedule: "5 minutes", **attributes)
end
def postgresql?
ENV["ADAPTER"].nil?
end
def with_option(name, value)
previous_value = Blazer.send(name)
begin
Blazer.send("#{name}=", value)
yield
ensure
Blazer.send("#{name}=", previous_value)
end
end
end
require_relative "support/adapter_test"
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/cohort_analysis_test.rb | test/cohort_analysis_test.rb | require_relative "test_helper"
class CohortAnalysisTest < ActionDispatch::IntegrationTest
def test_works
run_query "SELECT 1 AS user_id, NOW() AS conversion_time /* cohort analysis */", query_id: 1
assert_match "1 cohort", response.body
end
def test_cohort_time
run_query "SELECT 1 AS user_id, NOW() AS cohort_time, NOW() AS conversion_time /* cohort analysis */", query_id: 1
assert_match "1 cohort", response.body
end
def test_cohort_period_default
query = create_query(statement: "SELECT 1 AS user_id, NOW() AS conversion_time /* cohort analysis */")
get blazer.query_path(query)
assert_response :success
assert_match %{selected="selected" value="week"}, response.body
end
def test_missing_columns
run_query "SELECT 1 /* cohort analysis */", query_id: 1
assert_match "alert-danger", response.body
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/charts_test.rb | test/charts_test.rb | require_relative "test_helper"
class ChartsTest < ActionDispatch::IntegrationTest
def test_line_chart_format1
run_query "SELECT NOW(), 1"
assert_match "LineChart", response.body
end
def test_line_chart_format2
run_query "SELECT NOW(), 'Label', 1"
assert_match "LineChart", response.body
end
def test_column_chart_format1
run_query "SELECT 'Label' AS label, 1"
assert_match "ColumnChart", response.body
end
def test_column_chart_format2
run_query "SELECT 'Label' AS label, 'Group' AS group2, 1"
assert_match "ColumnChart", response.body
assert_match %{"name":"Group"}, response.body
end
def test_scatter_chart
run_query "SELECT 1, 2"
assert_match "ScatterChart", response.body
end
def test_pie_chart
run_query "SELECT 'Label', 1 AS pie"
assert_match "PieChart", response.body
end
def test_target
run_query "SELECT NOW(), 1, 2 AS target"
assert_match %{"name":"target"}, response.body
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/archive_test.rb | test/archive_test.rb | require_relative "test_helper"
class ArchiveTest < ActionDispatch::IntegrationTest
def setup
Blazer::Audit.delete_all
Blazer::Query.delete_all
end
def test_archive_queries
query = create_query
query2 = create_query
query2.audits.create!
Blazer.archive_queries
query.reload
assert_equal "archived", query.status
query2.reload
assert_equal "active", query2.status
get blazer.query_path(query)
assert_response :success
query.reload
assert_equal "active", query.status
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/dashboards_test.rb | test/dashboards_test.rb | require_relative "test_helper"
class DashboardsTest < ActionDispatch::IntegrationTest
def setup
Blazer::Query.delete_all
Blazer::Dashboard.delete_all
end
def test_new
get blazer.new_dashboard_path
assert_response :success
end
def test_show
dashboard = create_dashboard
get blazer.dashboard_path(dashboard)
assert_response :success
end
def test_destroy
dashboard = create_dashboard
delete blazer.dashboard_path(dashboard)
assert_response :redirect
end
def test_refresh
dashboard = create_dashboard
dashboard.queries << create_query
post blazer.refresh_dashboard_path(dashboard)
assert_response :redirect
end
def create_dashboard
Blazer::Dashboard.create!(name: "Test")
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/uploads_test.rb | test/uploads_test.rb | require_relative "test_helper"
class UploadsTest < ActionDispatch::IntegrationTest
def setup
skip unless postgresql?
Blazer::Upload.delete_all
Blazer::UploadsConnection.connection.execute("DROP SCHEMA IF EXISTS uploads CASCADE")
Blazer::UploadsConnection.connection.execute("CREATE SCHEMA uploads")
end
def test_index
get blazer.uploads_path
assert_response :success
end
def test_new
get blazer.new_upload_path
assert_response :success
end
def test_create
create_upload
assert_response :redirect
upload = Blazer::Upload.last
assert_equal "line_items", upload.table
assert_equal "Billing line items", upload.description
run_query "SELECT * FROM uploads.line_items"
assert_response :success
column_types = Blazer::UploadsConnection.connection.select_all("SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = 'uploads' AND table_name = 'line_items'").rows.to_h
assert_equal "bigint", column_types["a"]
assert_equal "numeric", column_types["b"]
assert_equal "timestamp with time zone", column_types["c"]
assert_equal "date", column_types["d"]
assert_equal "text", column_types["e"]
assert_equal "text", column_types["f"]
end
def test_create_duplicate_table
create_upload
assert_response :redirect
Blazer::Upload.delete_all
create_upload
assert_response :unprocessable_entity
assert_match "Table already exists", response.body
end
def test_rename
create_upload
assert_response :redirect
upload = Blazer::Upload.last
patch blazer.upload_path(upload), params: {upload: {table: "items"}}
assert_response :redirect
tables = Blazer::UploadsConnection.connection.select_all("SELECT table_name FROM information_schema.tables WHERE table_schema = 'uploads'").rows.map(&:first)
assert_equal ["items"], tables
end
def test_bad_content_type
create_upload(content_type: "text/plain")
assert_response :unprocessable_entity
assert_match "File is not a CSV", response.body
end
def test_malformed_csv
create_upload(file: "malformed.csv")
assert_response :unprocessable_entity
assert_match "Unclosed quoted field in line 1", response.body
end
def test_duplicate_columns
create_upload(file: "duplicate_columns.csv")
assert_response :unprocessable_entity
assert_match "Duplicate column name: a", response.body
end
def create_upload(file: "line_items.csv", content_type: "text/csv")
post blazer.uploads_path, params: {upload: {table: "line_items", description: "Billing line items", file: fixture_file_upload("test/support/#{file}", content_type)}}
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/anomaly_checks_test.rb | test/anomaly_checks_test.rb | require_relative "test_helper"
class AnomalyChecksTest < ActionDispatch::IntegrationTest
def setup
Blazer::Check.delete_all
Blazer::Query.delete_all
end
def test_prophet
skip unless ENV["TEST_PROPHET"]
assert_anomaly("prophet")
end
def test_trend
skip unless ENV["TEST_TREND"]
assert_anomaly("trend")
end
def test_anomaly_detection
assert_anomaly("anomaly_detection")
end
def assert_anomaly(anomaly_checks)
skip if !postgresql? || RUBY_ENGINE == "truffleruby"
with_option(:anomaly_checks, anomaly_checks) do
query = create_query(statement: "SELECT current_date + n AS day, 0.1 FROM generate_series(1, 30) n")
check = create_check(query: query, check_type: "anomaly")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "passing", check.state
query.update!(statement: "SELECT current_date + n AS day, 0.1 * random() FROM generate_series(1, 30) n UNION ALL SELECT current_date + 31, 2")
Blazer.run_checks(schedule: "5 minutes")
check.reload
assert_equal "failing", check.state
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/maps_test.rb | test/maps_test.rb | require_relative "test_helper"
class MapsTest < ActionDispatch::IntegrationTest
def test_latitude_longitude
run_query "SELECT 1.2 AS latitude, 3.4 AS longitude"
assert_match "Map", response.body
end
def test_lat_lon
run_query "SELECT 1.2 AS lat, 3.4 AS lon"
assert_match "Map", response.body
end
def test_lat_lng
run_query "SELECT 1.2 AS lat, 3.4 AS lng"
assert_match "Map", response.body
end
def test_geojson
run_query "SELECT '{}' AS geojson"
assert_match "AreaMap", response.body
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/cache_test.rb | test/cache_test.rb | require_relative "test_helper"
class CacheTest < ActionDispatch::IntegrationTest
def setup
Rails.cache.clear
end
def test_all
with_caching({"mode" => "all"}) do
run_query "SELECT 1"
refute_match "Cached", response.body
run_query "SELECT 1"
assert_match "Cached", response.body
end
end
def test_slow_under_threshold
with_caching({"mode" => "slow"}) do
run_query "SELECT 1"
refute_match "Cached", response.body
run_query "SELECT 1"
refute_match "Cached", response.body
end
end
def test_slow_over_threshold
skip unless postgresql?
with_caching({"mode" => "slow", "slow_threshold" => 0.01}) do
run_query "SELECT pg_sleep(0.01)::text"
refute_match "Cached", response.body
run_query "SELECT pg_sleep(0.01)::text"
assert_match "Cached", response.body
end
end
def test_variables
with_caching({"mode" => "all"}) do
run_query "SELECT {str_var}, {int_var}", variables: {str_var: "hello", int_var: 1}
assert_match "hello", response.body
end
end
private
def with_caching(value)
data_source = Blazer.data_sources["main"]
begin
data_source.instance_variable_set(:@cache, value)
yield
ensure
data_source.remove_instance_variable(:@cache)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/permissions_test.rb | test/permissions_test.rb | require_relative "test_helper"
class PermissionsTest < ActionDispatch::IntegrationTest
def setup
Blazer::Query.delete_all
User.delete_all
end
def test_list
with_new_user do |user|
create_query(name: "# Test", creator: user)
get blazer.root_path
assert_response :success
assert_match "# Test", response.body
end
with_new_user do
get blazer.root_path
assert_response :success
refute_match "# Test", response.body
end
end
def test_edit
query =
with_new_user do |user|
create_query(name: "* Test", creator: user)
end
with_new_user do
patch blazer.query_path(query), params: {query: {name: "Renamed"}}
assert_response :unprocessable_entity
assert_match "Sorry, permission denied", response.body
delete blazer.query_path(query)
# TODO error response
assert_response :redirect
assert Blazer::Query.exists?(query.id)
end
end
def test_change_creator
with_new_user do |user|
query = create_query(name: "Test", creator: user)
patch blazer.query_path(query), params: {query: {name: "* Test"}}
assert_response :redirect
patch blazer.query_path(query), params: {query: {name: "# Test"}}
assert_response :redirect
end
end
private
def with_new_user
user = User.create!
yield user
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/support/adapter_test.rb | test/support/adapter_test.rb | module AdapterTest
def setup
settings = YAML.load_file("test/support/adapters.yml")
Blazer.instance_variable_set(:@settings, settings)
end
# some adapter tests override this method
def test_tables
assert_kind_of Array, tables
end
def test_schema
get blazer.schema_queries_path(data_source: data_source)
assert_response :success
end
private
def tables
get blazer.tables_queries_path(data_source: data_source)
assert_response :success
JSON.parse(response.body)
end
def assert_result(expected, statement, **variables)
assert_equal expected, run_statement(statement, **variables)
end
def assert_audit(expected, statement, **variables)
run_statement(statement, **variables)
assert_equal expected, Blazer::Audit.last.statement
end
def assert_error(message, statement, **variables)
error = assert_raises(Blazer::Error) do
run_statement(statement, **variables)
end
assert_match message, error.message
end
def assert_bad_position(statement, **variables)
assert_error "Variable cannot be used in this position", statement, **variables
end
def run_statement(statement, format: "csv", **variables)
run_query statement, data_source: data_source, format: format, variables: variables
CSV.parse(response.body, headers: true).map(&:to_h) if format == "csv"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/internal/app/controllers/application_controller.rb | test/internal/app/controllers/application_controller.rb | class ApplicationController < ActionController::Base
def current_user
User.last
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/internal/app/models/user.rb | test/internal/app/models/user.rb | class User < ActiveRecord::Base
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/internal/db/schema.rb | test/internal/db/schema.rb | ActiveRecord::Schema.define do
create_table :blazer_queries do |t|
t.references :creator
t.string :name
t.text :description
t.text :statement
t.string :data_source
t.string :status
t.timestamps null: false
end
create_table :blazer_audits do |t|
t.references :user
t.references :query
t.text :statement
t.string :data_source
t.datetime :created_at
end
create_table :blazer_dashboards do |t|
t.references :creator
t.string :name
t.timestamps null: false
end
create_table :blazer_dashboard_queries do |t|
t.references :dashboard
t.references :query
t.integer :position
t.timestamps null: false
end
create_table :blazer_checks do |t|
t.references :creator
t.references :query
t.string :state
t.string :schedule
t.text :emails
t.text :slack_channels
t.string :check_type
t.text :message
t.datetime :last_run_at
t.timestamps null: false
end
create_table :blazer_uploads do |t|
t.references :creator
t.string :table
t.text :description
t.timestamps null: false
end
create_table :users do |t|
t.string :name
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/internal/config/routes.rb | test/internal/config/routes.rb | Rails.application.routes.draw do
mount Blazer::Engine, at: "/"
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/opensearch_test.rb | test/adapters/opensearch_test.rb | require_relative "../test_helper"
class OpensearchTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"opensearch"
end
def test_run
assert_result [{"'world'" => "world"}], "SELECT 'world' AS hello"
end
def test_single_quote
assert_error "Quoting not specified", "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_error "Quoting not specified", "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_error "Quoting not specified", "SELECT {var} AS hello", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/snowflake_test.rb | test/adapters/snowflake_test.rb | require_relative "../test_helper"
class SnowflakeTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"snowflake"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/druid_test.rb | test/adapters/druid_test.rb | require_relative "../test_helper"
class DruidTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"druid"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00 UTC"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
# TODO fix
def test_nil
# assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
assert_bad_position "SELECT hello AS {var}", var: "world"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/postgresql_test.rb | test/adapters/postgresql_test.rb | require_relative "../test_helper"
class PostgresqlTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"postgresql"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT $1 AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_leading_zeros
assert_result [{"hello" => "0123"}], "SELECT {var} AS hello", var: "0123"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_multiple_variables
assert_result [{"c1" => "one", "c2" => "two", "c3" => "one"}], "SELECT {var} AS c1, {var2} AS c2, {var} AS c3", var: "one", var2: "two"
end
def test_bad_position
assert_bad_position "SELECT 'world' AS {var}", var: "hello"
end
def test_bad_position_before
assert_error "syntax error at or near \"SELECT$1\"", "SELECT{var}", var: "world"
end
def test_bad_position_after
assert_error "syntax error at or near \"456\"\nLINE 1: SELECT $1 456", "SELECT {var}456", var: "world"
assert_equal "SELECT $1 456\n\n[\"world\"]", Blazer::Audit.last.statement
end
def test_quoted
assert_error "could not determine data type of parameter $1", "SELECT '{var}' AS hello", var: "world"
end
def test_binary_output
assert_result [{"bytea" => "\\x68656c6c6f"}], "SELECT 'hello'::bytea"
end
def test_json_output
assert_result [{"json" => '{"hello": "world"}'}], %!SELECT '{"hello": "world"}'::json!
end
def test_jsonb_output
assert_result [{"jsonb" => '{"hello": "world"}'}], %!SELECT '{"hello": "world"}'::jsonb!
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/spark_test.rb | test/adapters/spark_test.rb | require_relative "../test_helper"
# bin/beeline -u jdbc:hive2://localhost:10000 -e 'CREATE DATABASE blazer_test;'
class SparkTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"spark"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/elasticsearch_test.rb | test/adapters/elasticsearch_test.rb | require_relative "../test_helper"
class ElasticsearchTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"elasticsearch"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01T08:00:00.000Z"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
assert_bad_position "SELECT 'world' AS {var}", var: "hello"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/hive_test.rb | test/adapters/hive_test.rb | require_relative "../test_helper"
# bin/beeline -u jdbc:hive2://localhost:10000 -e 'CREATE DATABASE blazer_test;'
class HiveTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"hive"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/sqlserver_test.rb | test/adapters/sqlserver_test.rb | require_relative "../test_helper"
# brew install freetds
# docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=YourStrong!Passw0rd' -p 1433:1433 -d mcr.microsoft.com/mssql/server:2019-latest
# docker exec -it <container-id> /opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P YourStrong\!Passw0rd -Q "CREATE DATABASE blazer_test"
class SqlserverTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"sqlserver"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT @0 AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
# https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/issues/643
# should be 1.5
def test_float
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "01-01-2022 08:00:00.0"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
assert_bad_position "SELECT 'world' AS {var}", var: "hello"
end
def test_quoted
assert_result [{"hello"=>"@0 "}], "SELECT '{var}' AS hello", var: "world"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/soda_test.rb | test/adapters/soda_test.rb | require_relative "../test_helper"
class SodaTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"soda"
end
def test_tables
assert_equal ["all"], tables
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello LIMIT 1"
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello LIMIT 1", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello LIMIT 1", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello LIMIT 1", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/salesforce_test.rb | test/adapters/salesforce_test.rb | require_relative "../test_helper"
# https://stackoverflow.com/questions/12794302/salesforce-authentication-failing/29112224#29112224
# create accounts named world, ', ", and \
# ENV["SALESFORCE_USERNAME"] = "username"
# ENV["SALESFORCE_PASSWORD"] = "password"
# ENV["SALESFORCE_SECURITY_TOKEN"] = "security token"
# ENV["SALESFORCE_CLIENT_ID"] = "client id"
# ENV["SALESFORCE_CLIENT_SECRET"] = "client secret"
# ENV["SALESFORCE_API_VERSION"] = "41.0"
class SalesforceTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"salesforce"
end
def test_run
assert_result [{"Name" => "world"}], "SELECT Name FROM Account WHERE Name = 'world'"
end
def test_single_quote
assert_result [{"Name" => "'"}], "SELECT Name FROM Account WHERE Name = {var}", var: "'"
end
def test_double_quote
assert_result [{"Name" => '"'}], "SELECT Name FROM Account WHERE Name = {var}", var: '"'
end
def test_backslash
assert_result [{"Name" => "\\"}], "SELECT Name FROM Account WHERE Name = {var}", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/athena_test.rb | test/adapters/athena_test.rb | require_relative "../test_helper"
class AthenaTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"athena"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
if engine_version > 1
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
else
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
if engine_version > 1
assert_error "Exception parsing query", "SELECT 'world' AS {var}", var: "hello"
else
assert_error "mismatched input", "SELECT 'world' AS {var}", var: "hello"
end
end
def test_quoted
if engine_version > 1
assert_error "Incorrect number of parameters: expected 0 but found 1", "SELECT '{var}' AS hello", var: "world"
end
end
private
def engine_version
Blazer.data_sources[data_source].settings["engine_version"].to_i
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/drill_test.rb | test/adapters/drill_test.rb | require_relative "../test_helper"
class DrillTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"drill"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/bigquery_test.rb | test/adapters/bigquery_test.rb | require_relative "../test_helper"
class BigqueryTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"bigquery"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00 UTC"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
# TODO fix
def test_nil
assert_error "nil params are not supported, must assign optional type", "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
assert_bad_position "SELECT 'world' AS {var}", var: "hello"
end
# does not raise error for too many params
def test_quoted
assert_result [{"hello" => "?"}], "SELECT '{var}' AS hello", var: "world"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/sqlite_test.rb | test/adapters/sqlite_test.rb | require_relative "../test_helper"
class SqliteTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"sqlite"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT $1 AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_multiple_variables
assert_result [{"c1" => "one", "c2" => "two", "c3" => "one"}], "SELECT {var} AS c1, {var2} AS c2, {var} AS c3", var: "one", var2: "two"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/mysql_test.rb | test/adapters/mysql_test.rb | require_relative "../test_helper"
class MysqlTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
ENV["MYSQL_ADAPTER"] || "mysql2"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
if prepared_statements?
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
else
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_multiple_variables
assert_result [{"c1" => "one", "c2" => "two", "c3" => "one"}], "SELECT {var} AS c1, {var2} AS c2, {var} AS c3", var: "one", var2: "two"
end
def test_bad_position
if prepared_statements?
assert_bad_position "SELECT 'world' AS {var}", var: "hello"
else
assert_result [{"hello"=>"world"}], "SELECT 'world' AS {var}", var: "hello"
end
end
def test_bad_position_before
if prepared_statements?
assert_result [{"?" => "world"}], "SELECT{var}", var: "world"
else
assert_result [{"world" => "world"}], "SELECT{var}", var: "world"
end
end
def test_bad_position_after
if prepared_statements?
assert_bad_position "SELECT {var}456", var: "world"
else
assert_error "You have an error in your SQL syntax", "SELECT {var}456", var: "world"
end
end
def test_quoted
if prepared_statements?
assert_error "Bind parameter count (0) doesn't match number of arguments (1)", "SELECT '{var}' AS hello", var: "world"
else
assert_error "You have an error in your SQL syntax", "SELECT '{var}' AS hello", var: "world"
end
end
def test_binary
# checks for successful response
run_statement "SELECT UNHEX('F6'), 1", format: "html"
end
def test_binary_output
assert_raises(CSV::InvalidEncodingError) do
assert_result [{"hello" => "0xF6"}], "SELECT UNHEX('F6') AS hello"
end
end
def test_json_output
assert_result [{"json" => '{"hello": "world"}'}], %!SELECT JSON_OBJECT('hello', 'world') AS json!
end
private
def prepared_statements?
Blazer.data_sources[data_source].settings["url"].include?("prepared_statements=true")
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/ignite_test.rb | test/adapters/ignite_test.rb | require_relative "../test_helper"
class IgniteTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"ignite"
end
def test_run
assert_result [{"HELLO" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT ? AS hello\n\n[\"world\"]", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"HELLO" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"HELLO" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"HELLO" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"HELLO" => "2022-01-01 08:00:00 UTC"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"HELLO" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"HELLO" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"HELLO" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"HELLO" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_bad_position
assert_error "Syntax error in SQL statement", "SELECT 'world' AS {var}", var: "hello"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/redshift_test.rb | test/adapters/redshift_test.rb | require_relative "../test_helper"
class RedshiftTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"redshift"
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "SELECT {var} AS hello", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "SELECT {var} AS hello", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "SELECT {var} AS hello", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00"}], "SELECT {created_at} AS hello", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "SELECT {var} AS hello", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
def test_multiple_variables
assert_result [{"c1" => "one", "c2" => "two", "c3" => "one"}], "SELECT {var} AS c1, {var2} AS c2, {var} AS c3", var: "one", var2: "two"
end
def test_bad_position
assert_error "syntax error at or near", "SELECT 'world' AS {var}", var: "hello"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/cassandra_test.rb | test/adapters/cassandra_test.rb | require_relative "../test_helper"
class CassandraTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"cassandra"
end
def setup
@@once ||= begin
require "cassandra"
cluster = Cassandra.cluster(hosts: ["localhost"])
session = cluster.connect("system")
session.execute("CREATE KEYSPACE IF NOT EXISTS blazer_test WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }")
session = cluster.connect("blazer_test")
session.execute("DROP TABLE IF EXISTS items")
session.execute("CREATE TABLE items (id int, hello text, PRIMARY KEY (id))")
session.execute("INSERT INTO items (id, hello) VALUES (1, 'world')")
session.execute("INSERT INTO items (id, hello) VALUES (2, '''')")
session.execute("INSERT INTO items (id, hello) VALUES (3, '\"')")
session.execute("INSERT INTO items (id, hello) VALUES (4, '\\')")
true
end
end
def test_tables
assert_equal ["items"], tables
end
def test_run
expected = [{"hello" => 'world'}]
assert_result expected, "SELECT hello FROM items WHERE hello = 'world' ALLOW FILTERING"
end
def test_audit
expected = "SELECT hello FROM items WHERE hello = ? ALLOW FILTERING\n\n[\"world\"]"
assert_audit expected, "SELECT hello FROM items WHERE hello = {var} ALLOW FILTERING", var: "world"
end
def test_single_quote
expected = [{"hello" => "'"}]
assert_result expected, "SELECT hello FROM items WHERE hello = {var} ALLOW FILTERING", var: "'"
end
def test_double_quote
expected = [{"hello" => '"'}]
assert_result expected, "SELECT hello FROM items WHERE hello = {var} ALLOW FILTERING", var: '"'
end
def test_backslash
expected = [{"hello" => "\\"}]
assert_result expected, "SELECT hello FROM items WHERE hello = {var} ALLOW FILTERING", var: "\\"
end
def test_bad_position
assert_bad_position "SELECT hello FROM items WHERE hello {var} 'world' ALLOW FILTERING", var: "="
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/presto_test.rb | test/adapters/presto_test.rb | require_relative "../test_helper"
class PrestoTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"presto"
end
def test_tables
# needs different connector
end
def test_run
assert_result [{"hello" => "world"}], "SELECT 'world' AS hello"
end
def test_audit
assert_audit "SELECT 'world' AS hello", "SELECT {var} AS hello", var: "world"
end
def test_single_quote
assert_result [{"hello" => "'"}], "SELECT {var} AS hello", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "SELECT {var} AS hello", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "SELECT {var} AS hello", var: "\\"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/neo4j_test.rb | test/adapters/neo4j_test.rb | require_relative "../test_helper"
class Neo4jTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"neo4j"
end
def test_run
assert_result [{"hello" => "world"}], "OPTIONAL MATCH () RETURN 'world' AS `hello`"
end
def test_audit
assert_audit "OPTIONAL MATCH () RETURN $var AS `hello`\n\n{\"var\":\"world\"}", "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "world"
end
def test_string
assert_result [{"hello" => "world"}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "world"
end
def test_integer
assert_result [{"hello" => "1"}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "1"
end
def test_float
assert_result [{"hello" => "1.5"}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "1.5"
end
def test_time
assert_result [{"hello" => "2022-01-01 08:00:00 UTC"}], "OPTIONAL MATCH () RETURN {created_at} AS `hello`", created_at: "2022-01-01 08:00:00"
end
def test_nil
assert_result [{"hello" => nil}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: ""
end
def test_single_quote
assert_result [{"hello" => "'"}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "'"
end
def test_double_quote
assert_result [{"hello" => '"'}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: '"'
end
def test_backslash
assert_result [{"hello" => "\\"}], "OPTIONAL MATCH () RETURN {var} AS `hello`", var: "\\"
end
def test_bad_position
assert_bad_position "OPTIONAL MATCH () RETURN 'world' AS {var}", var: "hello"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/test/adapters/influxdb_test.rb | test/adapters/influxdb_test.rb | require_relative "../test_helper"
class InfluxdbTest < ActionDispatch::IntegrationTest
include AdapterTest
def data_source
"influxdb"
end
def setup
@@once ||= begin
client = InfluxDB::Client.new(url: "http://localhost:8086/blazer_test")
client.delete_series("items")
client.write_point("items", {values: {value: 1}, tags: {hello: "world"}, timestamp: 0})
client.write_point("items", {values: {value: 1}, tags: {hello: "'"}, timestamp: 0})
client.write_point("items", {values: {value: 1}, tags: {hello: '"'}, timestamp: 0})
# InfluxDB does not like trailing backslashes
# https://github.com/influxdata/influxdb/issues/5231
# https://github.com/influxdata/influxdb-ruby/issues/225
client.write_point("items", {values: {value: 1}, tags: {hello: "\\a"}, timestamp: 0})
true
end
end
def test_run
expected = [{"time" => "1970-01-01 00:00:00 UTC", "hello" => "world", "value" => "1"}]
assert_result expected, "SELECT * FROM items WHERE hello = 'world'"
end
def test_audit
assert_audit "SELECT * FROM items WHERE hello = 'world'", "SELECT * FROM items WHERE hello = {var}", var: "world"
end
def test_single_quote
expected = [{"time" => "1970-01-01 00:00:00 UTC", "hello" => "'", "value" => "1"}]
assert_result expected, "SELECT * FROM items WHERE hello = {var}", var: "'"
end
def test_double_quote
expected = [{"time" => "1970-01-01 00:00:00 UTC", "hello" => '"', "value" => "1"}]
assert_result expected, "SELECT * FROM items WHERE hello = {var}", var: '"'
end
def test_backslash
expected = [{"time" => "1970-01-01 00:00:00 UTC", "hello" => "\\a", "value" => "1"}]
assert_result expected, "SELECT * FROM items WHERE hello = {var}", var: "\\a"
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer.rb | lib/blazer.rb | # dependencies
require "chartkick"
require "safely/core"
# stdlib
require "csv"
require "digest/sha2"
require "json"
require "yaml"
# modules
require_relative "blazer/version"
require_relative "blazer/data_source"
require_relative "blazer/result"
require_relative "blazer/result_cache"
require_relative "blazer/run_statement"
require_relative "blazer/statement"
# adapters
require_relative "blazer/adapters/base_adapter"
require_relative "blazer/adapters/athena_adapter"
require_relative "blazer/adapters/bigquery_adapter"
require_relative "blazer/adapters/cassandra_adapter"
require_relative "blazer/adapters/drill_adapter"
require_relative "blazer/adapters/druid_adapter"
require_relative "blazer/adapters/elasticsearch_adapter"
require_relative "blazer/adapters/hive_adapter"
require_relative "blazer/adapters/ignite_adapter"
require_relative "blazer/adapters/influxdb_adapter"
require_relative "blazer/adapters/neo4j_adapter"
require_relative "blazer/adapters/opensearch_adapter"
require_relative "blazer/adapters/presto_adapter"
require_relative "blazer/adapters/salesforce_adapter"
require_relative "blazer/adapters/soda_adapter"
require_relative "blazer/adapters/spark_adapter"
require_relative "blazer/adapters/sql_adapter"
require_relative "blazer/adapters/snowflake_adapter"
# engine
require_relative "blazer/engine"
module Blazer
class Error < StandardError; end
class UploadError < Error; end
class TimeoutNotSupported < Error; end
# actionmailer optional
autoload :CheckMailer, "blazer/check_mailer"
# net/http optional
autoload :SlackNotifier, "blazer/slack_notifier"
# activejob optional
autoload :RunStatementJob, "blazer/run_statement_job"
class << self
attr_accessor :audit
attr_reader :time_zone
attr_accessor :user_name
attr_writer :user_class
attr_writer :user_method
attr_accessor :before_action
attr_accessor :from_email
attr_accessor :cache
attr_accessor :transform_statement
attr_accessor :transform_variable
attr_accessor :check_schedules
attr_accessor :anomaly_checks
attr_accessor :forecasting
attr_accessor :async
attr_accessor :images
attr_accessor :override_csp
attr_accessor :slack_oauth_token
attr_accessor :slack_webhook_url
attr_accessor :mapbox_access_token
end
self.audit = true
self.user_name = :name
self.check_schedules = ["5 minutes", "1 hour", "1 day"]
self.anomaly_checks = false
self.forecasting = false
self.async = false
self.images = false
self.override_csp = false
VARIABLE_MESSAGE = "Variable cannot be used in this position"
TIMEOUT_MESSAGE = "Query timed out :("
TIMEOUT_ERRORS = [
"canceling statement due to statement timeout", # postgres
"canceling statement due to conflict with recovery", # postgres
"cancelled on user's request", # redshift
"canceled on user's request", # redshift
"system requested abort", # redshift
"maximum statement execution time exceeded" # mysql
]
def self.time_zone=(time_zone)
@time_zone = time_zone.is_a?(ActiveSupport::TimeZone) ? time_zone : ActiveSupport::TimeZone[time_zone.to_s]
end
def self.user_class
if !defined?(@user_class)
@user_class = settings.key?("user_class") ? settings["user_class"] : (User.name rescue nil)
end
@user_class
end
def self.user_method
if !defined?(@user_method)
@user_method = settings["user_method"]
if user_class
@user_method ||= "current_#{user_class.to_s.downcase.singularize}"
end
end
@user_method
end
def self.settings
@settings ||= begin
path = Rails.root.join("config", "blazer.yml").to_s
if File.exist?(path)
YAML.safe_load(ERB.new(File.read(path)).result, aliases: true)
else
{}
end
end
end
def self.data_sources
@data_sources ||= begin
ds = Hash.new { |hash, key| raise Blazer::Error, "Unknown data source: #{key}" }
settings["data_sources"].each do |id, s|
ds[id] = Blazer::DataSource.new(id, s)
end
ds
end
end
def self.run_checks(schedule: nil)
checks = Blazer::Check.includes(:query)
checks = checks.where(schedule: schedule) if schedule
checks.find_each do |check|
next if check.state == "disabled"
Safely.safely { run_check(check) }
end
end
def self.run_check(check)
tries = 1
ActiveSupport::Notifications.instrument("run_check.blazer", check_id: check.id, query_id: check.query.id, state_was: check.state) do |instrument|
# try 3 times on timeout errors
statement = check.query.statement_object
data_source = statement.data_source
while tries <= 3
result = data_source.run_statement(statement, refresh_cache: true, check: check, query: check.query)
if result.timed_out?
Rails.logger.info "[blazer timeout] query=#{check.query.name}"
tries += 1
sleep(10)
elsif result.error.to_s.start_with?("PG::ConnectionBad")
data_source.reconnect
Rails.logger.info "[blazer reconnect] query=#{check.query.name}"
tries += 1
sleep(10)
else
break
end
end
begin
check.reload # in case state has changed since job started
check.update_state(result)
rescue ActiveRecord::RecordNotFound
# check deleted
end
# TODO use proper logfmt
Rails.logger.info "[blazer check] query=#{check.query.name} state=#{check.state} rows=#{result.rows.try(:size)} error=#{result.error}"
# should be no variables
instrument[:statement] = statement.bind_statement
instrument[:data_source] = data_source
instrument[:state] = check.state
instrument[:rows] = result.rows.try(:size)
instrument[:error] = result.error
instrument[:tries] = tries
end
end
def self.send_failing_checks
emails = {}
slack_channels = {}
Blazer::Check.includes(:query).where(state: ["failing", "error", "timed out", "disabled"]).find_each do |check|
check.split_emails.each do |email|
(emails[email] ||= []) << check
end
check.split_slack_channels.each do |channel|
(slack_channels[channel] ||= []) << check
end
end
emails.each do |email, checks|
Safely.safely do
Blazer::CheckMailer.failing_checks(email, checks).deliver_now
end
end
slack_channels.each do |channel, checks|
Safely.safely do
Blazer::SlackNotifier.failing_checks(channel, checks)
end
end
end
def self.slack?
slack_oauth_token.present? || slack_webhook_url.present?
end
# TODO show warning on invalid access token
def self.maps?
mapbox_access_token.present? && mapbox_access_token.start_with?("pk.")
end
def self.uploads?
settings.key?("uploads")
end
def self.uploads_connection
raise "Empty url for uploads" unless settings.dig("uploads", "url")
Blazer::UploadsConnection.connection
end
def self.uploads_schema
settings.dig("uploads", "schema") || "uploads"
end
def self.uploads_table_name(name)
uploads_connection.quote_table_name("#{uploads_schema}.#{name}")
end
def self.adapters
@adapters ||= {}
end
def self.register_adapter(name, adapter)
adapters[name] = adapter
end
def self.anomaly_detectors
@anomaly_detectors ||= {}
end
def self.register_anomaly_detector(name, &anomaly_detector)
anomaly_detectors[name] = anomaly_detector
end
def self.forecasters
@forecasters ||= {}
end
def self.register_forecaster(name, &forecaster)
forecasters[name] = forecaster
end
def self.archive_queries
raise "Audits must be enabled to archive" unless Blazer.audit
raise "Missing status column - see https://github.com/ankane/blazer#23" unless Blazer::Query.column_names.include?("status")
viewed_query_ids = Blazer::Audit.where("created_at > ?", 90.days.ago).group(:query_id).count.keys.compact
Blazer::Query.active.where.not(id: viewed_query_ids).update_all(status: "archived")
end
# private
def self.monotonic_time
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
end
require_relative "blazer/adapters"
require_relative "blazer/anomaly_detectors"
require_relative "blazer/forecasters"
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/generators/blazer/install_generator.rb | lib/generators/blazer/install_generator.rb | require "rails/generators/active_record"
module Blazer
module Generators
class InstallGenerator < Rails::Generators::Base
include ActiveRecord::Generators::Migration
source_root File.join(__dir__, "templates")
def copy_migration
migration_template "install.rb", "db/migrate/install_blazer.rb", migration_version: migration_version
end
def copy_config
template "config.yml", "config/blazer.yml"
end
def migration_version
"[#{ActiveRecord::VERSION::MAJOR}.#{ActiveRecord::VERSION::MINOR}]"
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/generators/blazer/uploads_generator.rb | lib/generators/blazer/uploads_generator.rb | require "rails/generators/active_record"
module Blazer
module Generators
class UploadsGenerator < Rails::Generators::Base
include ActiveRecord::Generators::Migration
source_root File.join(__dir__, "templates")
def copy_migration
migration_template "uploads.rb", "db/migrate/create_blazer_uploads.rb", migration_version: migration_version
end
def migration_version
"[#{ActiveRecord::VERSION::MAJOR}.#{ActiveRecord::VERSION::MINOR}]"
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/run_statement_job.rb | lib/blazer/run_statement_job.rb | module Blazer
class RunStatementJob < ActiveJob::Base
self.queue_adapter = :async
def perform(data_source_id, statement, options)
statement = Blazer::Statement.new(statement, data_source_id)
statement.values = options.delete(:values)
data_source = statement.data_source
begin
ActiveRecord::Base.connection_pool.with_connection do
Blazer::RunStatement.new.perform(statement, options)
end
rescue Exception => e
result = Blazer::Result.new(data_source, [], [], "Unknown error", nil, false)
data_source.result_cache.write_run(options[:run_id], result)
raise e
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/version.rb | lib/blazer/version.rb | module Blazer
VERSION = "3.3.0"
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/slack_notifier.rb | lib/blazer/slack_notifier.rb | require "net/http"
module Blazer
class SlackNotifier
def self.state_change(check, state, state_was, rows_count, error, check_type)
check.split_slack_channels.each do |channel|
text =
if error
error
elsif rows_count > 0 && check_type == "bad_data"
pluralize(rows_count, "row")
end
payload = {
channel: channel,
attachments: [
{
title: escape("Check #{state.titleize}: #{check.query.name}"),
title_link: query_url(check.query_id),
text: escape(text),
color: state == "passing" ? "good" : "danger"
}
]
}
post(payload)
end
end
def self.failing_checks(channel, checks)
text =
checks.map do |check|
"<#{query_url(check.query_id)}|#{escape(check.query.name)}> #{escape(check.state)}"
end
payload = {
channel: channel,
attachments: [
{
title: escape("#{pluralize(checks.size, "Check")} Failing"),
text: text.join("\n"),
color: "warning"
}
]
}
post(payload)
end
# https://api.slack.com/docs/message-formatting#how_to_escape_characters
# - Replace the ampersand, &, with &
# - Replace the less-than sign, < with <
# - Replace the greater-than sign, > with >
# That's it. Don't HTML entity-encode the entire message.
def self.escape(str)
str.gsub("&", "&").gsub("<", "<").gsub(">", ">") if str
end
def self.pluralize(*args)
ActionController::Base.helpers.pluralize(*args)
end
# checks shouldn't have variables, but in any case,
# avoid passing variable params to url helpers
# (known unsafe parameters are removed, but still not ideal)
def self.query_url(id)
Blazer::Engine.routes.url_helpers.query_url(id, ActionMailer::Base.default_url_options)
end
# TODO use return value
def self.post(payload)
if Blazer.slack_webhook_url.present?
response = post_api(Blazer.slack_webhook_url, payload, {})
response.is_a?(Net::HTTPSuccess) && response.body == "ok"
else
headers = {
"Authorization" => "Bearer #{Blazer.slack_oauth_token}",
"Content-type" => "application/json"
}
response = post_api("https://slack.com/api/chat.postMessage", payload, headers)
response.is_a?(Net::HTTPSuccess) && (JSON.parse(response.body)["ok"] rescue false)
end
end
def self.post_api(url, payload, headers)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
http.open_timeout = 3
http.read_timeout = 5
http.post(uri.request_uri, payload.to_json, headers)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/check_mailer.rb | lib/blazer/check_mailer.rb | module Blazer
class CheckMailer < ActionMailer::Base
include ActionView::Helpers::TextHelper
default from: Blazer.from_email if Blazer.from_email
layout false
def state_change(check, state, state_was, rows_count, error, columns, rows, column_types, check_type)
@check = check
@state = state
@state_was = state_was
@rows_count = rows_count
@error = error
@columns = columns
@rows = rows
@column_types = column_types
@check_type = check_type
mail to: check.emails, reply_to: check.emails, subject: "Check #{state.titleize}: #{check.query.name}"
end
def failing_checks(email, checks)
@checks = checks
# add reply_to for mailing lists
mail to: email, reply_to: email, subject: "#{pluralize(checks.size, "Check")} Failing"
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/data_source.rb | lib/blazer/data_source.rb | module Blazer
class DataSource
extend Forwardable
attr_reader :id, :settings
def_delegators :adapter_instance, :schema, :tables, :preview_statement, :reconnect, :cost, :explain, :cancel, :supports_cohort_analysis?, :cohort_analysis_statement
def initialize(id, settings)
@id = id
@settings = settings
end
def adapter
settings["adapter"] || detect_adapter
end
def name
settings["name"] || @id
end
def linked_columns
settings["linked_columns"] || {}
end
def smart_columns
settings["smart_columns"] || {}
end
def smart_variables
settings["smart_variables"] || {}
end
def variable_defaults
settings["variable_defaults"] || {}
end
def timeout
settings["timeout"]
end
def cache
@cache ||= begin
if settings["cache"].is_a?(Hash)
settings["cache"]
elsif settings["cache"]
{
"mode" => "all",
"expires_in" => settings["cache"]
}
else
{
"mode" => "off"
}
end
end
end
def cache_mode
cache["mode"]
end
def cache_expires_in
(cache["expires_in"] || 60).to_f
end
def cache_slow_threshold
(cache["slow_threshold"] || 15).to_f
end
def local_time_suffix
@local_time_suffix ||= Array(settings["local_time_suffix"])
end
def result_cache
@result_cache ||= Blazer::ResultCache.new(self)
end
def run_results(run_id)
result_cache.read_run(run_id)
end
def delete_results(run_id)
result_cache.delete_run(run_id)
end
def sub_variables(statement, vars)
statement = statement.dup
vars.each do |var, value|
# use block form to disable back-references
statement.gsub!("{#{var}}") { quote(value) }
end
statement
end
def run_statement(statement, options = {})
statement = Statement.new(statement, self) if statement.is_a?(String)
statement.bind unless statement.bind_statement
result = nil
if cache_mode != "off"
if options[:refresh_cache]
clear_cache(statement) # for checks
else
result = result_cache.read_statement(statement)
end
end
unless result
comment = "blazer".dup
if options[:user].respond_to?(:id)
comment << ",user_id:#{options[:user].id}"
end
if options[:user].respond_to?(Blazer.user_name)
# only include letters, numbers, and spaces to prevent injection
comment << ",user_name:#{options[:user].send(Blazer.user_name).to_s.gsub(/[^a-zA-Z0-9 ]/, "")}"
end
if options[:query].respond_to?(:id)
comment << ",query_id:#{options[:query].id}"
end
if options[:check]
comment << ",check_id:#{options[:check].id},check_emails:#{options[:check].emails}"
end
if options[:run_id]
comment << ",run_id:#{options[:run_id]}"
end
result = run_statement_helper(statement, comment, options)
end
if options[:async] && options[:run_id]
run_id = options[:run_id]
begin
result_cache.write_run(run_id, result)
rescue
result = Blazer::Result.new(self, [], [], "Error storing the results of this query :(", nil, false)
result_cache.write_run(run_id, result)
end
end
result
end
def clear_cache(statement)
result_cache.delete_statement(statement)
end
def quote(value)
if quoting == :backslash_escape || quoting == :single_quote_escape
# only need to support types generated by process_vars
if value.is_a?(Integer) || value.is_a?(Float)
value.to_s
elsif value.nil?
"NULL"
else
value = value.to_formatted_s(:db) if value.is_a?(ActiveSupport::TimeWithZone)
if quoting == :backslash_escape
"'#{value.gsub("\\") { "\\\\" }.gsub("'") { "\\'" }}'"
else
"'#{value.gsub("'", "''")}'"
end
end
elsif quoting.respond_to?(:call)
quoting.call(value)
elsif quoting.nil?
raise Blazer::Error, "Quoting not specified"
else
raise Blazer::Error, "Unknown quoting"
end
end
def bind_params(statement, variables)
if parameter_binding == :positional
locations = []
variables.each do |k, v|
i = 0
while (idx = statement.index("{#{k}}", i))
locations << [v, idx]
i = idx + 1
end
end
variables.each do |k, v|
statement = statement.gsub("{#{k}}", "?")
end
[statement, locations.sort_by(&:last).map(&:first)]
elsif parameter_binding == :numeric
variables.each_with_index do |(k, v), i|
# add trailing space if followed by digit
# try to keep minimal to avoid fixing invalid queries like SELECT{var}
statement = statement.gsub(/#{Regexp.escape("{#{k}}")}(\d)/, "$#{i + 1} \\1").gsub("{#{k}}", "$#{i + 1}")
end
[statement, variables.values]
elsif parameter_binding.respond_to?(:call)
parameter_binding.call(statement, variables)
elsif parameter_binding.nil?
[sub_variables(statement, variables), []]
else
raise Blazer::Error, "Unknown bind parameters"
end
end
protected
def adapter_instance
@adapter_instance ||= begin
# TODO add required settings to adapters
unless settings["url"] || Rails.env.development? || ["bigquery", "athena", "snowflake", "salesforce"].include?(settings["adapter"])
raise Blazer::Error, "Empty url for data source: #{id}"
end
unless Blazer.adapters[adapter]
raise Blazer::Error, "Unknown adapter"
end
Blazer.adapters[adapter].new(self)
end
end
def quoting
@quoting ||= adapter_instance.quoting
end
def parameter_binding
@parameter_binding ||= adapter_instance.parameter_binding
end
def run_statement_helper(statement, comment, options)
start_time = Blazer.monotonic_time
columns, rows, error =
if adapter_instance.parameter_binding
adapter_instance.run_statement(statement.bind_statement, comment, statement.bind_values)
else
adapter_instance.run_statement(statement.bind_statement, comment)
end
duration = Blazer.monotonic_time - start_time
cache = !error && (cache_mode == "all" || (cache_mode == "slow" && duration >= cache_slow_threshold))
result = Blazer::Result.new(self, columns, rows, error, cache ? Time.now : nil, false)
if cache && adapter_instance.cachable?(statement.bind_statement)
begin
result_cache.write_statement(statement, result, expires_in: cache_expires_in.to_f * 60)
# set just_cached after caching
result.just_cached = true
rescue
# do nothing
end
end
result.cached_at = nil
result
end
# TODO check for adapter with same name, default to sql
def detect_adapter
scheme = settings["url"].to_s.split("://").first
case scheme
when "presto", "trino", "cassandra", "ignite"
scheme
else
"sql"
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/run_statement.rb | lib/blazer/run_statement.rb | module Blazer
class RunStatement
def perform(statement, options = {})
query = options[:query]
data_source = statement.data_source
statement.bind
# audit
if Blazer.audit
audit_statement = statement.bind_statement
audit_statement += "\n\n#{statement.bind_values.to_json}" if statement.bind_values.any?
audit = Blazer::Audit.new(statement: audit_statement)
audit.query = query
audit.data_source = data_source.id
# only set user if present to avoid error with Rails 7.1 when no user model
audit.user = options[:user] unless options[:user].nil?
audit.save!
end
start_time = Blazer.monotonic_time
result = data_source.run_statement(statement, options)
duration = Blazer.monotonic_time - start_time
if Blazer.audit
audit.duration = duration if audit.respond_to?(:duration=)
audit.error = result.error if audit.respond_to?(:error=)
audit.timed_out = result.timed_out? if audit.respond_to?(:timed_out=)
audit.cached = result.cached? if audit.respond_to?(:cached=)
if !result.cached? && duration >= 10
audit.cost = data_source.cost(statement) if audit.respond_to?(:cost=)
end
audit.save! if audit.changed?
end
if query && !result.timed_out? && !result.cached? && !query.variables.any?
query.checks.each do |check|
check.update_state(result)
end
end
result
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/anomaly_detectors.rb | lib/blazer/anomaly_detectors.rb | Blazer.register_anomaly_detector "anomaly_detection" do |series|
anomalies = AnomalyDetection.detect(series.to_h, period: :auto)
anomalies.include?(series.last[0])
end
Blazer.register_anomaly_detector "prophet" do |series|
df = Rover::DataFrame.new(series[0..-2].map { |v| {"ds" => v[0], "y" => v[1]} })
m = Prophet.new(interval_width: 0.99)
m.logger.level = ::Logger::FATAL # no logging
m.fit(df)
future = Rover::DataFrame.new(series[-1..-1].map { |v| {"ds" => v[0]} })
forecast = m.predict(future).to_a[0]
lower = forecast["yhat_lower"]
upper = forecast["yhat_upper"]
value = series.last[1]
value < lower || value > upper
end
Blazer.register_anomaly_detector "trend" do |series|
anomalies = Trend.anomalies(series.to_h)
anomalies.include?(series.last[0])
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/result.rb | lib/blazer/result.rb | module Blazer
class Result
attr_reader :data_source, :columns, :rows, :error, :forecast_error
attr_accessor :cached_at, :just_cached
def initialize(data_source, columns, rows, error, cached_at, just_cached)
@data_source = data_source
@columns = columns.dup
@rows = rows
@error = error
@cached_at = cached_at
@just_cached = just_cached
end
def timed_out?
error == Blazer::TIMEOUT_MESSAGE
end
def cached?
cached_at.present?
end
def smart_values
@smart_values ||= begin
smart_values = {}
columns.each_with_index do |key, i|
smart_columns_data_source =
([data_source] + Array(data_source.settings["inherit_smart_settings"]).map { |ds| Blazer.data_sources[ds] }).find { |ds| ds.smart_columns[key] }
if smart_columns_data_source
query = smart_columns_data_source.smart_columns[key]
res =
if query.is_a?(Hash)
query
else
values = rows.map { |r| r[i] }.compact.uniq
result = smart_columns_data_source.run_statement(ActiveRecord::Base.send(:sanitize_sql_array, [query.sub("{value}", "(?)"), values]))
result.rows
end
smart_values[key] = res.to_h { |k, v| [k.nil? ? k : k.to_s, v] }
end
end
smart_values
end
end
def column_types
@column_types ||= begin
columns.each_with_index.map do |k, i|
v = (rows.find { |r| r[i] } || {})[i]
if smart_values[k]
"string"
elsif v.is_a?(Numeric)
"numeric"
elsif v.is_a?(Time) || v.is_a?(Date)
"time"
elsif v.nil?
nil
elsif v.is_a?(String) && v.encoding == Encoding::BINARY
"binary"
else
"string"
end
end
end
end
def chart_type
@chart_type ||= begin
if column_types.compact.size >= 2 && column_types.compact == ["time"] + (column_types.compact.size - 1).times.map { "numeric" }
"line"
elsif column_types == ["time", "string", "numeric"]
"line2"
elsif column_types == ["string", "numeric"] && @columns.last == "pie"
"pie"
elsif column_types.compact.size >= 2 && column_types == ["string"] + (column_types.compact.size - 1).times.map { "numeric" }
"bar"
elsif column_types == ["string", "string", "numeric"]
"bar2"
elsif column_types == ["numeric", "numeric"]
"scatter"
end
end
end
def forecastable?
@forecastable ||= Blazer.forecasting && column_types == ["time", "numeric"] && @rows.size >= 10
end
# TODO cache it?
# don't want to put result data (even hashed version)
# into cache without developer opt-in
def forecast
count = (@rows.size * 0.25).round.clamp(30, 365)
forecaster = Blazer.forecasters.fetch(Blazer.forecasting)
forecast = forecaster.call(@rows.to_h, count: count)
# round integers
if @rows[0][1].is_a?(Integer)
forecast = forecast.map { |k, v| [k, v.round] }.to_h
end
@rows.each do |row|
row[2] = nil
end
@rows.unshift(*forecast.map { |k, v| [k, nil, v] })
@columns << "forecast"
# reset cache
@column_types = nil
@chart_type = nil
forecast
rescue => e
@forecast_error = String.new("Error generating forecast")
@forecast_error << ": #{e.message.sub("Invalid parameter: ", "")}"
nil
end
def detect_anomaly
anomaly = nil
message = nil
if rows.empty?
message = "No data"
else
if chart_type == "line" || chart_type == "line2"
series = []
if chart_type == "line"
columns[1..-1].each_with_index.each do |k, i|
series << {name: k, data: rows.map { |r| [r[0], r[i + 1]] }}
end
else
rows.group_by { |r| v = r[1]; (smart_values[columns[1]] || {})[v.to_s] || v }.each_with_index.map do |(name, v), i|
series << {name: name, data: v.map { |v2| [v2[0], v2[2]] }}
end
end
current_series = nil
begin
anomalies = []
series.each do |s|
current_series = s[:name]
anomalies << s[:name] if anomaly?(s[:data])
end
anomaly = anomalies.any?
if anomaly
if anomalies.size == 1
message = "Anomaly detected in #{anomalies.first}"
else
message = "Anomalies detected in #{anomalies.to_sentence}"
end
else
message = "No anomalies detected"
end
rescue => e
message = "#{current_series}: #{e.message}"
raise e if Rails.env.development?
end
else
message = "Bad format"
end
end
[anomaly, message]
end
def anomaly?(series)
series = series.reject { |v| v[0].nil? }.sort_by { |v| v[0] }
anomaly_detector = Blazer.anomaly_detectors.fetch(Blazer.anomaly_checks)
anomaly_detector.call(series)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/statement.rb | lib/blazer/statement.rb | module Blazer
class Statement
attr_reader :statement, :data_source, :bind_statement, :bind_values
attr_accessor :values
def initialize(statement, data_source = nil)
@statement = statement
@data_source = data_source.is_a?(String) ? Blazer.data_sources[data_source] : data_source
@values = {}
end
def variables
# strip commented out lines
# and regex {1} or {1,2}
@variables ||= statement.to_s.gsub(/\-\-.+/, "").gsub(/\/\*.+\*\//m, "").scan(/\{\w*?\}/i).map { |v| v[1...-1] }.reject { |v| /\A\d+(\,\d+)?\z/.match(v) || v.empty? }.uniq
end
def add_values(var_params)
variables.each do |var|
value = var_params[var].presence
value = nil unless value.is_a?(String) # ignore arrays and hashes
if value
if ["start_time", "end_time"].include?(var)
value = value.to_s.gsub(" ", "+") # fix for Quip bug
end
if var.end_with?("_at")
begin
value = Blazer.time_zone.parse(value)
rescue
# do nothing
end
end
unless value.is_a?(ActiveSupport::TimeWithZone)
if value.match?(/\A\d+\z/)
# check no leading zeros (when not zero)
if value == value.to_i.to_s
value = value.to_i
end
elsif value.match?(/\A\d+\.\d+\z/)
value = value.to_f
end
end
end
value = Blazer.transform_variable.call(var, value) if Blazer.transform_variable
@values[var] = value
end
end
def cohort_analysis?
/\/\*\s*cohort analysis\s*\*\//i.match?(statement)
end
def apply_cohort_analysis(period:, days:)
@statement = data_source.cohort_analysis_statement(statement, period: period, days: days).sub("{placeholder}") { statement }
end
# should probably transform before cohort analysis
# but keep previous order for now
def transformed_statement
statement = self.statement.dup
Blazer.transform_statement.call(data_source, statement) if Blazer.transform_statement
statement
end
def bind
@bind_statement, @bind_values = data_source.bind_params(transformed_statement, values)
end
def display_statement
data_source.sub_variables(transformed_statement, values)
end
def clear_cache
bind if bind_statement.nil?
data_source.clear_cache(self)
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/forecasters.rb | lib/blazer/forecasters.rb | Blazer.register_forecaster "prophet" do |series, count:|
Prophet.forecast(series, count: count)
end
Blazer.register_forecaster "trend" do |series, count:|
Trend.forecast(series, count: count)
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/engine.rb | lib/blazer/engine.rb | module Blazer
class Engine < ::Rails::Engine
isolate_namespace Blazer
initializer "blazer" do |app|
if app.config.respond_to?(:assets) && defined?(Sprockets)
if Sprockets::VERSION.to_i >= 4
app.config.assets.precompile += [
"blazer/application.js",
"blazer/application.css",
"blazer/glyphicons-halflings-regular.eot",
"blazer/glyphicons-halflings-regular.svg",
"blazer/glyphicons-halflings-regular.ttf",
"blazer/glyphicons-halflings-regular.woff",
"blazer/glyphicons-halflings-regular.woff2",
"blazer/favicon.png"
]
else
# use a proc instead of a string
app.config.assets.precompile << proc { |path| path =~ /\Ablazer\/application\.(js|css)\z/ }
app.config.assets.precompile << proc { |path| path =~ /\Ablazer\/.+\.(eot|svg|ttf|woff|woff2)\z/ }
app.config.assets.precompile << proc { |path| path == "blazer/favicon.png" }
end
end
Blazer.time_zone ||= Blazer.settings["time_zone"] || Time.zone
Blazer.audit = Blazer.settings.key?("audit") ? Blazer.settings["audit"] : true
Blazer.user_name = Blazer.settings["user_name"] if Blazer.settings["user_name"]
Blazer.from_email = Blazer.settings["from_email"] if Blazer.settings["from_email"]
Blazer.before_action = Blazer.settings["before_action_method"] if Blazer.settings["before_action_method"]
Blazer.check_schedules = Blazer.settings["check_schedules"] if Blazer.settings.key?("check_schedules")
Blazer.cache ||= Rails.cache
Blazer.anomaly_checks = Blazer.settings["anomaly_checks"] || false
Blazer.forecasting = Blazer.settings["forecasting"] || false
Blazer.async = Blazer.settings["async"] || false
Blazer.images = Blazer.settings["images"] || false
Blazer.override_csp = Blazer.settings["override_csp"] || false
Blazer.slack_oauth_token = Blazer.settings["slack_oauth_token"] || ENV["BLAZER_SLACK_OAUTH_TOKEN"]
Blazer.slack_webhook_url = Blazer.settings["slack_webhook_url"] || ENV["BLAZER_SLACK_WEBHOOK_URL"]
Blazer.mapbox_access_token = Blazer.settings["mapbox_access_token"] || ENV["MAPBOX_ACCESS_TOKEN"]
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/result_cache.rb | lib/blazer/result_cache.rb | module Blazer
class ResultCache
def initialize(data_source)
@data_source = data_source
end
def write_run(run_id, result)
write(run_cache_key(run_id), result, expires_in: 30.seconds)
end
def read_run(run_id)
read(run_cache_key(run_id))
end
def delete_run(run_id)
delete(run_cache_key(run_id))
end
def write_statement(statement, result, expires_in:)
write(statement_cache_key(statement), result, expires_in: expires_in) if caching?
end
def read_statement(statement)
read(statement_cache_key(statement)) if caching?
end
def delete_statement(statement)
delete(statement_cache_key(statement)) if caching?
end
private
def write(key, result, expires_in:)
raise ArgumentError, "expected Blazer::Result" unless result.is_a?(Blazer::Result)
value = [result.columns, result.rows, result.error, result.cached_at, result.just_cached]
cache.write(key, value, expires_in: expires_in)
end
def read(key)
value = cache.read(key)
if value
columns, rows, error, cached_at, just_cached = value
Blazer::Result.new(@data_source, columns, rows, error, cached_at, just_cached)
end
end
def delete(key)
cache.delete(key)
end
def caching?
@data_source.cache_mode != "off"
end
def cache_key(key)
(["blazer", "v5", @data_source.id] + key).join("/")
end
def statement_cache_key(statement)
cache_key(["statement", Digest::SHA256.hexdigest(statement.bind_statement.to_s.gsub("\r\n", "\n") + statement.bind_values.to_json)])
end
def run_cache_key(run_id)
cache_key(["run", run_id])
end
def cache
Blazer.cache
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters.rb | lib/blazer/adapters.rb | Blazer.register_adapter "athena", Blazer::Adapters::AthenaAdapter
Blazer.register_adapter "bigquery", Blazer::Adapters::BigQueryAdapter
Blazer.register_adapter "cassandra", Blazer::Adapters::CassandraAdapter
Blazer.register_adapter "drill", Blazer::Adapters::DrillAdapter
Blazer.register_adapter "druid", Blazer::Adapters::DruidAdapter
Blazer.register_adapter "elasticsearch", Blazer::Adapters::ElasticsearchAdapter
Blazer.register_adapter "hive", Blazer::Adapters::HiveAdapter
Blazer.register_adapter "ignite", Blazer::Adapters::IgniteAdapter
Blazer.register_adapter "influxdb", Blazer::Adapters::InfluxdbAdapter
Blazer.register_adapter "neo4j", Blazer::Adapters::Neo4jAdapter
Blazer.register_adapter "opensearch", Blazer::Adapters::OpensearchAdapter
Blazer.register_adapter "presto", Blazer::Adapters::PrestoAdapter
Blazer.register_adapter "salesforce", Blazer::Adapters::SalesforceAdapter
Blazer.register_adapter "soda", Blazer::Adapters::SodaAdapter
Blazer.register_adapter "spark", Blazer::Adapters::SparkAdapter
Blazer.register_adapter "sql", Blazer::Adapters::SqlAdapter
Blazer.register_adapter "snowflake", Blazer::Adapters::SnowflakeAdapter
Blazer.register_adapter "trino", Blazer::Adapters::PrestoAdapter
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/elasticsearch_adapter.rb | lib/blazer/adapters/elasticsearch_adapter.rb | module Blazer
module Adapters
class ElasticsearchAdapter < BaseAdapter
def run_statement(statement, comment, bind_params)
columns = []
rows = []
error = nil
begin
response = client.transport.perform_request("POST", endpoint, {}, {query: "#{statement} /*#{comment}*/", params: bind_params}).body
columns = response["columns"].map { |v| v["name"] }
# Elasticsearch does not differentiate between dates and times
date_indexes = response["columns"].each_index.select { |i| ["date", "datetime"].include?(response["columns"][i]["type"]) }
if columns.any?
rows = response["rows"]
date_indexes.each do |i|
rows.each do |row|
row[i] &&= Time.parse(row[i])
end
end
end
rescue => e
error = e.message
error = Blazer::VARIABLE_MESSAGE if error.include?("mismatched input '?'")
end
[columns, rows, error]
end
def tables
indices = client.cat.indices(format: "json").map { |v| v["index"] }
aliases = client.cat.aliases(format: "json").map { |v| v["alias"] }
(indices + aliases).uniq.sort
end
def preview_statement
"SELECT * FROM \"{table}\" LIMIT 10"
end
# https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-lexical-structure.html#sql-syntax-string-literals
def quoting
:single_quote_escape
end
# https://www.elastic.co/guide/en/elasticsearch/reference/current/sql-rest-params.html
def parameter_binding
:positional
end
protected
def endpoint
@endpoint ||= client.info["version"]["number"].to_i >= 7 ? "_sql" : "_xpack/sql"
end
def client
@client ||= Elasticsearch::Client.new(url: settings["url"])
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/athena_adapter.rb | lib/blazer/adapters/athena_adapter.rb | module Blazer
module Adapters
class AthenaAdapter < BaseAdapter
def run_statement(statement, comment, bind_params = [])
require "digest/md5"
columns = []
rows = []
error = nil
begin
# use empty? since any? doesn't work for [nil]
if !bind_params.empty?
request_token = Digest::MD5.hexdigest([statement, bind_params.to_json, data_source.id, settings["workgroup"]].compact.join("/"))
statement_name = "blazer_#{request_token}"
begin
client.create_prepared_statement({
statement_name: statement_name,
work_group: settings["workgroup"],
query_statement: statement
})
rescue Aws::Athena::Errors::InvalidRequestException => e
raise e unless e.message.include?("already exists in WorkGroup")
end
using_statement = bind_params.map { |v| data_source.quote(v) }.join(", ")
statement = "EXECUTE #{statement_name} USING #{using_statement}"
else
request_token = Digest::MD5.hexdigest([statement, data_source.id, settings["workgroup"]].compact.join("/"))
end
query_options = {
query_string: statement,
# use token so we fetch cached results after query is run
client_request_token: request_token,
query_execution_context: {
database: database
}
}
if settings["output_location"]
query_options[:result_configuration] = {output_location: settings["output_location"]}
end
if settings["workgroup"]
query_options[:work_group] = settings["workgroup"]
end
resp = client.start_query_execution(**query_options)
query_execution_id = resp.query_execution_id
timeout = data_source.timeout || 300
stop_at = Time.now + timeout
resp = nil
begin
resp = client.get_query_results(
query_execution_id: query_execution_id
)
rescue Aws::Athena::Errors::InvalidRequestException => e
unless e.message.start_with?("Query has not yet finished.")
raise e
end
if Time.now < stop_at
sleep(3)
retry
end
end
if resp && resp.result_set
column_info = resp.result_set.result_set_metadata.column_info
columns = column_info.map(&:name)
column_types = column_info.map(&:type)
untyped_rows = []
# paginated
resp.each do |page|
untyped_rows.concat page.result_set.rows.map { |r| r.data.map(&:var_char_value) }
end
utc = ActiveSupport::TimeZone['Etc/UTC']
rows = untyped_rows[1..-1] || []
rows = untyped_rows[0..-1] unless column_info.present?
column_types.each_with_index do |ct, i|
# TODO more column_types
case ct
when "timestamp", "timestamp with time zone"
rows.each do |row|
row[i] &&= utc.parse(row[i])
end
when "date"
rows.each do |row|
row[i] &&= Date.parse(row[i])
end
when "bigint"
rows.each do |row|
row[i] &&= row[i].to_i
end
when "double"
rows.each do |row|
row[i] &&= row[i].to_f
end
end
end
elsif resp
error = fetch_error(query_execution_id)
else
error = Blazer::TIMEOUT_MESSAGE
end
rescue Aws::Athena::Errors::InvalidRequestException => e
error = e.message
if error == "Query did not finish successfully. Final query state: FAILED"
error = fetch_error(query_execution_id)
end
end
[columns, rows, error]
end
def tables
glue.get_tables(database_name: database).table_list.map(&:name).sort
end
def schema
glue.get_tables(database_name: database).table_list.map { |t| {table: t.name, columns: t.storage_descriptor.columns.map { |c| {name: c.name, data_type: c.type} }} }
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
# https://docs.aws.amazon.com/athena/latest/ug/select.html#select-escaping
def quoting
:single_quote_escape
end
# https://docs.aws.amazon.com/athena/latest/ug/querying-with-prepared-statements.html
def parameter_binding
engine_version > 1 ? :positional : nil
end
private
def database
@database ||= settings["database"] || "default"
end
# note: this setting is experimental
# it does *not* need to be set to use engine version 2
# prepared statements must be manually deleted if enabled
def engine_version
@engine_version ||= (settings["engine_version"] || 1).to_i
end
def fetch_error(query_execution_id)
client.get_query_execution(
query_execution_id: query_execution_id
).query_execution.status.state_change_reason
end
def client
@client ||= Aws::Athena::Client.new(**client_options)
end
def glue
@glue ||= Aws::Glue::Client.new(**client_options)
end
def client_options
@client_options ||= begin
options = {}
if settings["access_key_id"] || settings["secret_access_key"]
options[:credentials] = Aws::Credentials.new(settings["access_key_id"], settings["secret_access_key"])
end
options[:region] = settings["region"] if settings["region"]
options
end
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/bigquery_adapter.rb | lib/blazer/adapters/bigquery_adapter.rb | module Blazer
module Adapters
class BigQueryAdapter < BaseAdapter
def run_statement(statement, comment, bind_params)
columns = []
rows = []
error = nil
begin
results = bigquery.query(statement, params: bind_params)
# complete? was removed in google-cloud-bigquery 0.29.0
# code is for backward compatibility
if !results.respond_to?(:complete?) || results.complete?
columns = results.first.keys.map(&:to_s) if results.size > 0
rows = results.all.map(&:values)
else
error = Blazer::TIMEOUT_MESSAGE
end
rescue => e
error = e.message
error = Blazer::VARIABLE_MESSAGE if error.include?("Syntax error: Unexpected \"?\"")
end
[columns, rows, error]
end
def tables
table_refs.map { |t| "#{t.project_id}.#{t.dataset_id}.#{t.table_id}" }
end
def schema
table_refs.map do |table_ref|
{
schema: table_ref.dataset_id,
table: table_ref.table_id,
columns: table_columns(table_ref)
}
end
end
def preview_statement
"SELECT * FROM `{table}` LIMIT 10"
end
# https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical#string_and_bytes_literals
def quoting
:backslash_escape
end
# https://cloud.google.com/bigquery/docs/parameterized-queries
def parameter_binding
:positional
end
private
def bigquery
@bigquery ||= begin
require "google/cloud/bigquery"
Google::Cloud::Bigquery.new(
project: settings["project"],
keyfile: settings["keyfile"]
)
end
end
def table_refs
bigquery.datasets.map(&:tables).flat_map { |table_list| table_list.map(&:table_ref) }
end
def table_columns(table_ref)
schema = bigquery.service.get_table(table_ref.dataset_id, table_ref.table_id).schema
return [] if schema.nil?
schema.fields.map { |field| {name: field.name, data_type: field.type} }
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/spark_adapter.rb | lib/blazer/adapters/spark_adapter.rb | module Blazer
module Adapters
class SparkAdapter < HiveAdapter
def tables
client.execute("SHOW TABLES").map { |r| r["tableName"] }
end
# https://spark.apache.org/docs/latest/sql-ref-literals.html
def quoting
:backslash_escape
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/base_adapter.rb | lib/blazer/adapters/base_adapter.rb | module Blazer
module Adapters
class BaseAdapter
attr_reader :data_source
def initialize(data_source)
@data_source = data_source
end
def run_statement(statement, comment)
# required
end
def quoting
# required, how to quote variables
# :backslash_escape - single quote strings and convert ' to \' and \ to \\
# :single_quote_escape - single quote strings and convert ' to ''
# ->(value) { ... } - custom method
end
def parameter_binding
# optional, but recommended when possible for security
# if specified, quoting is only used for display
# :positional - ?
# :numeric - $1
# ->(statement, values) { ... } - custom method
end
def tables
[] # optional, but nice to have
end
def schema
[] # optional, but nice to have
end
def preview_statement
"" # also optional, but nice to have
end
def reconnect
# optional
end
def cost(statement)
# optional
end
def explain(statement)
# optional
end
def cancel(run_id)
# optional
end
def cachable?(statement)
true # optional
end
def supports_cohort_analysis?
false # optional
end
def cohort_analysis_statement(statement, period:, days:)
# optional
end
protected
def settings
@data_source.settings
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/soda_adapter.rb | lib/blazer/adapters/soda_adapter.rb | module Blazer
module Adapters
class SodaAdapter < BaseAdapter
def run_statement(statement, comment)
require "json"
require "net/http"
require "uri"
columns = []
rows = []
error = nil
# remove comments manually
statement = statement.gsub(/--.+/, "")
# only supports single line /* */ comments
# regex not perfect, but should be good enough
statement = statement.gsub(/\/\*.+\*\//, "")
# remove trailing semicolon
statement = statement.sub(/;\s*\z/, "")
# remove whitespace
statement = statement.squish
uri = URI(settings["url"])
uri.query = URI.encode_www_form("$query" => statement)
req = Net::HTTP::Get.new(uri)
req["X-App-Token"] = settings["app_token"] if settings["app_token"]
options = {
use_ssl: uri.scheme == "https",
open_timeout: 3,
read_timeout: 30
}
begin
# use Net::HTTP instead of soda-ruby for types and better error messages
res = Net::HTTP.start(uri.hostname, uri.port, options) do |http|
http.request(req)
end
if res.is_a?(Net::HTTPSuccess)
body = JSON.parse(res.body)
columns = JSON.parse(res["x-soda2-fields"])
column_types = columns.zip(JSON.parse(res["x-soda2-types"])).to_h
columns.reject! { |f| f.start_with?(":@") }
# rows can be missing some keys in JSON, so need to map by column
rows = body.map { |r| columns.map { |c| r[c] } }
columns.each_with_index do |column, i|
# nothing to do for boolean
case column_types[column]
when "number"
# check if likely an integer column
if rows.all? { |r| r[i].to_i == r[i].to_f }
rows.each do |row|
row[i] = row[i].to_i
end
else
rows.each do |row|
row[i] = row[i].to_f
end
end
when "floating_timestamp"
# check if likely a date column
if rows.all? { |r| r[i].end_with?("T00:00:00.000") }
rows.each do |row|
row[i] = Date.parse(row[i])
end
else
utc = ActiveSupport::TimeZone["Etc/UTC"]
rows.each do |row|
row[i] = utc.parse(row[i])
end
end
end
end
else
error = JSON.parse(res.body)["message"] rescue "Bad response: #{res.code}"
end
rescue => e
error = e.message
end
[columns, rows, error]
end
def preview_statement
"SELECT * LIMIT 10"
end
def tables
["all"]
end
# https://dev.socrata.com/docs/datatypes/text.html
def quoting
:single_quote_escape
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/drill_adapter.rb | lib/blazer/adapters/drill_adapter.rb | module Blazer
module Adapters
class DrillAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
begin
# remove trailing semicolon
response = drill.query(statement.sub(/;\s*\z/, ""))
rows = response.map { |r| r.values }
columns = rows.any? ? response.first.keys : []
rescue => e
error = e.message
end
[columns, rows, error]
end
# https://drill.apache.org/docs/lexical-structure/#string
def quoting
:single_quote_escape
end
# https://issues.apache.org/jira/browse/DRILL-5079
def parameter_binding
# not supported
end
private
def drill
@drill ||= ::Drill.new(url: settings["url"])
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/presto_adapter.rb | lib/blazer/adapters/presto_adapter.rb | module Blazer
module Adapters
class PrestoAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
begin
columns, rows = client.run("#{statement} /*#{comment}*/")
columns = columns.map(&:name)
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
_, rows = client.run("SHOW TABLES")
rows.map(&:first)
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
def quoting
:single_quote_escape
end
# TODO support prepared statements - https://prestodb.io/docs/current/sql/prepare.html
# feature request for variables - https://github.com/prestodb/presto/issues/5918
def parameter_binding
end
protected
def client
@client ||= begin
uri = URI.parse(settings["url"])
query = uri.query ? CGI.parse(uri.query) : {}
cls = uri.scheme == "trino" ? Trino::Client : Presto::Client
cls.new(
server: "#{uri.host}:#{uri.port}",
catalog: uri.path.to_s.delete_prefix("/"),
schema: query["schema"] || "public",
user: uri.user,
http_debug: false
)
end
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/salesforce_adapter.rb | lib/blazer/adapters/salesforce_adapter.rb | module Blazer
module Adapters
class SalesforceAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
# remove comments manually
statement = statement.gsub(/--.+/, "")
# only supports single line /* */ comments
# regex not perfect, but should be good enough
statement = statement.gsub(/\/\*.+\*\//, "")
# remove trailing semicolon
statement = statement.sub(/;\s*\z/, "")
begin
response = client.query(statement)
rows = response.map { |r| r.to_hash.except("attributes").values }
columns = rows.any? ? response.first.to_hash.except("attributes").keys : []
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
# cache
@tables ||= client.describe.select { |r| r.queryable }.map(&:name)
end
def preview_statement
"SELECT Id FROM {table} LIMIT 10"
end
# https://developer.salesforce.com/docs/atlas.en-us.soql_sosl.meta/soql_sosl/sforce_api_calls_soql_select_quotedstringescapes.htm
def quoting
:backslash_escape
end
protected
def client
@client ||= Restforce.new
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/ignite_adapter.rb | lib/blazer/adapters/ignite_adapter.rb | module Blazer
module Adapters
class IgniteAdapter < BaseAdapter
def run_statement(statement, comment, bind_params)
columns = []
rows = []
error = nil
begin
result = client.query("#{statement} /*#{comment}*/", bind_params, schema: default_schema, statement_type: :select, timeout: data_source.timeout)
columns = result.any? ? result.first.keys : []
rows = result.map(&:values)
rescue => e
error = e.message
end
[columns, rows, error]
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
def tables
sql = "SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN ('INFORMATION_SCHEMA', 'SYS')"
result = data_source.run_statement(sql)
result.rows.reject { |row| row[1].start_with?("__") }.map do |row|
(row[0] == default_schema ? row[1] : "#{row[0]}.#{row[1]}").downcase
end
end
# TODO figure out error
# Table `__T0` can be accessed only within Ignite query context.
# def schema
# sql = "SELECT table_schema, table_name, column_name, data_type, ordinal_position FROM information_schema.columns WHERE table_schema NOT IN ('INFORMATION_SCHEMA', 'SYS')"
# result = data_source.run_statement(sql)
# result.rows.group_by { |r| [r[0], r[1]] }.map { |k, vs| {schema: k[0], table: k[1], columns: vs.sort_by { |v| v[2] }.map { |v| {name: v[2], data_type: v[3]} }} }.sort_by { |t| [t[:schema] == default_schema ? "" : t[:schema], t[:table]] }
# end
def quoting
:single_quote_escape
end
# query arguments
# https://ignite.apache.org/docs/latest/binary-client-protocol/sql-and-scan-queries#op_query_sql
def parameter_binding
:positional
end
private
def default_schema
"PUBLIC"
end
def client
@client ||= begin
uri = URI(settings["url"])
Ignite::Client.new(host: uri.host, port: uri.port, username: uri.user, password: uri.password)
end
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/hive_adapter.rb | lib/blazer/adapters/hive_adapter.rb | module Blazer
module Adapters
class HiveAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
begin
result = client.execute("#{statement} /*#{comment}*/")
columns = result.any? ? result.first.keys : []
rows = result.map(&:values)
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
client.execute("SHOW TABLES").map { |r| r["tab_name"] }
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
# https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Types#LanguageManualTypes-StringsstringStrings
def quoting
:backslash_escape
end
# has variable substitution, but sets for session
# https://cwiki.apache.org/confluence/display/Hive/LanguageManual+VariableSubstitution
def parameter_binding
end
protected
def client
@client ||= begin
uri = URI.parse(settings["url"])
Hexspace::Client.new(
host: uri.host,
port: uri.port,
username: uri.user,
password: uri.password,
database: uri.path.delete_prefix("/"),
mode: uri.scheme.to_sym
)
end
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/neo4j_adapter.rb | lib/blazer/adapters/neo4j_adapter.rb | module Blazer
module Adapters
class Neo4jAdapter < BaseAdapter
def run_statement(statement, comment, bind_params)
columns = []
rows = []
error = nil
begin
if bolt?
result = session.run("#{statement} /*#{comment}*/", bind_params).to_a
columns = result.any? ? result.first.keys.map(&:to_s) : []
rows = result.map(&:values)
else
result = session.query("#{statement} /*#{comment}*/", bind_params)
columns = result.columns.map(&:to_s)
rows = []
result.each do |row|
rows << columns.map do |c|
v = row.send(c)
v = v.properties if v.respond_to?(:properties)
v
end
end
end
rescue => e
error = e.message
error = Blazer::VARIABLE_MESSAGE if error.include?("Invalid input '$'")
end
[columns, rows, error]
end
def tables
if bolt?
result = session.run("CALL db.labels()").to_a
result.map { |r| r.values.first }
else
result = session.query("CALL db.labels()")
result.rows.map(&:first)
end
end
def preview_statement
"MATCH (n:{table}) RETURN n LIMIT 10"
end
# https://neo4j.com/docs/cypher-manual/current/syntax/expressions/#cypher-expressions-string-literals
def quoting
:backslash_escape
end
def parameter_binding
proc do |statement, variables|
variables.each_key do |k|
statement = statement.gsub("{#{k}}") { "$#{k} " }
end
[statement, variables]
end
end
protected
def session
@session ||= begin
if bolt?
uri = URI.parse(settings["url"])
auth = Neo4j::Driver::AuthTokens.basic(uri.user, uri.password)
database = uri.path.delete_prefix("/")
uri.user = nil
uri.password = nil
uri.path = ""
Neo4j::Driver::GraphDatabase.driver(uri, auth).session(database: database)
else
require "neo4j/core/cypher_session/adaptors/http"
http_adaptor = Neo4j::Core::CypherSession::Adaptors::HTTP.new(settings["url"])
Neo4j::Core::CypherSession.new(http_adaptor)
end
end
end
def bolt?
if !defined?(@bolt)
@bolt = settings["url"].start_with?("bolt")
end
@bolt
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/cassandra_adapter.rb | lib/blazer/adapters/cassandra_adapter.rb | module Blazer
module Adapters
class CassandraAdapter < BaseAdapter
def run_statement(statement, comment, bind_params)
columns = []
rows = []
error = nil
begin
response = session.execute("#{statement} /*#{comment}*/", arguments: bind_params)
rows = response.map { |r| r.values }
columns = rows.any? ? response.first.keys : []
rescue => e
error = e.message
error = Blazer::VARIABLE_MESSAGE if error.include?("no viable alternative at input '?'")
end
[columns, rows, error]
end
def tables
session.execute("SELECT table_name FROM system_schema.tables WHERE keyspace_name = #{data_source.quote(keyspace)}").map { |r| r["table_name"] }
end
def schema
result = session.execute("SELECT keyspace_name, table_name, column_name, type, position FROM system_schema.columns WHERE keyspace_name = #{data_source.quote(keyspace)}")
result.map(&:values).group_by { |r| [r[0], r[1]] }.map { |k, vs| {schema: k[0], table: k[1], columns: vs.sort_by { |v| v[2] }.map { |v| {name: v[2], data_type: v[3]} }} }
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
# https://docs.datastax.com/en/cql-oss/3.3/cql/cql_reference/escape_char_r.html
def quoting
:single_quote_escape
end
# https://docs.datastax.com/en/developer/nodejs-driver/3.0/features/parameterized-queries/
def parameter_binding
:positional
end
private
def cluster
@cluster ||= begin
require "cassandra"
options = {hosts: [uri.host]}
options[:port] = uri.port if uri.port
options[:username] = uri.user if uri.user
options[:password] = uri.password if uri.password
::Cassandra.cluster(options)
end
end
def session
@session ||= cluster.connect(keyspace)
end
def uri
@uri ||= URI.parse(data_source.settings["url"])
end
def keyspace
@keyspace ||= uri.path[1..-1]
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/druid_adapter.rb | lib/blazer/adapters/druid_adapter.rb | module Blazer
module Adapters
class DruidAdapter < BaseAdapter
TIMESTAMP_REGEX = /\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z\z/
def run_statement(statement, comment, bind_params)
require "json"
require "net/http"
require "uri"
columns = []
rows = []
error = nil
params =
bind_params.map do |v|
type =
case v
when Integer
"BIGINT"
when Float
"DOUBLE"
when ActiveSupport::TimeWithZone
v = (v.to_f * 1000).round
"TIMESTAMP"
else
"VARCHAR"
end
{type: type, value: v}
end
header = {"Content-Type" => "application/json", "Accept" => "application/json"}
timeout = data_source.timeout ? data_source.timeout.to_i : 300
data = {
query: statement,
parameters: params,
context: {
timeout: timeout * 1000
}
}
uri = URI.parse("#{settings["url"]}/druid/v2/sql/")
http = Net::HTTP.new(uri.host, uri.port)
http.read_timeout = timeout
begin
response = JSON.parse(http.post(uri.request_uri, data.to_json, header).body)
if response.is_a?(Hash)
error = response["errorMessage"] || "Unknown error: #{response.inspect}"
if error.include?("timed out")
error = Blazer::TIMEOUT_MESSAGE
elsif error.include?("Encountered \"?\" at")
error = Blazer::VARIABLE_MESSAGE
end
else
columns = (response.first || {}).keys
rows = response.map { |r| r.values }
# Druid doesn't return column types
# and no timestamp type in JSON
rows.each do |row|
row.each_with_index do |v, i|
if v.is_a?(String) && TIMESTAMP_REGEX.match(v)
row[i] = Time.parse(v)
end
end
end
end
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
result = data_source.run_statement("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA') ORDER BY TABLE_NAME")
result.rows.map(&:first)
end
def schema
result = data_source.run_statement("SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, DATA_TYPE, ORDINAL_POSITION FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA NOT IN ('INFORMATION_SCHEMA') ORDER BY 1, 2")
result.rows.group_by { |r| [r[0], r[1]] }.map { |k, vs| {schema: k[0], table: k[1], columns: vs.sort_by { |v| v[2] }.map { |v| {name: v[2], data_type: v[3]} }} }
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
# https://druid.apache.org/docs/latest/querying/sql.html#identifiers-and-literals
# docs only mention double quotes
def quoting
:single_quote_escape
end
# https://druid.apache.org/docs/latest/querying/sql.html#dynamic-parameters
def parameter_binding
:positional
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/influxdb_adapter.rb | lib/blazer/adapters/influxdb_adapter.rb | module Blazer
module Adapters
class InfluxdbAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
begin
result = client.query(statement, denormalize: false).first
if result
columns = result["columns"]
rows = result["values"]
# parse time columns
# current approach isn't ideal, but result doesn't include types
# another approach would be to check the format
time_index = columns.index("time")
if time_index
rows.each do |row|
row[time_index] = Time.parse(row[time_index]) if row[time_index]
end
end
end
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
client.list_series
end
def preview_statement
"SELECT * FROM {table} LIMIT 10"
end
# https://docs.influxdata.com/influxdb/v1.8/query_language/spec/#strings
def quoting
:backslash_escape
end
def parameter_binding
# not supported
end
protected
def client
@client ||= InfluxDB::Client.new(url: settings["url"])
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/opensearch_adapter.rb | lib/blazer/adapters/opensearch_adapter.rb | module Blazer
module Adapters
class OpensearchAdapter < BaseAdapter
def run_statement(statement, comment)
columns = []
rows = []
error = nil
begin
response = client.transport.perform_request("POST", "_plugins/_sql", {}, {query: "#{statement} /*#{comment}*/"}).body
columns = response["schema"].map { |v| v["name"] }
# TODO typecast more types
# https://github.com/opensearch-project/sql/blob/main/docs/user/general/datatypes.rst
date_indexes = response["schema"].each_index.select { |i| response["schema"][i]["type"] == "timestamp" }
if columns.any?
rows = response["datarows"]
utc = ActiveSupport::TimeZone["Etc/UTC"]
date_indexes.each do |i|
rows.each do |row|
row[i] &&= utc.parse(row[i])
end
end
end
rescue => e
error = e.message
end
[columns, rows, error]
end
def tables
indices = client.cat.indices(format: "json").map { |v| v["index"] }
aliases = client.cat.aliases(format: "json").map { |v| v["alias"] }
(indices + aliases).uniq.sort
end
def preview_statement
"SELECT * FROM `{table}` LIMIT 10"
end
def quoting
# unknown
end
protected
def client
@client ||= OpenSearch::Client.new(url: settings["url"])
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/sql_adapter.rb | lib/blazer/adapters/sql_adapter.rb | module Blazer
module Adapters
class SqlAdapter < BaseAdapter
attr_reader :connection_model
def initialize(data_source)
super
@connection_model =
Class.new(Blazer::Connection) do
def self.name
"Blazer::Connection::Adapter#{object_id}"
end
establish_connection(data_source.settings["url"]) if data_source.settings["url"]
end
end
def run_statement(statement, comment, bind_params = [])
columns = []
rows = []
error = nil
begin
types = []
in_transaction do |connection|
set_timeout(data_source.timeout) if data_source.timeout
binds = bind_params.map { |v| ActiveRecord::Relation::QueryAttribute.new(nil, v, ActiveRecord::Type::Value.new) }
if sqlite?
type_map = connection.send(:type_map)
connection.raw_connection.prepare("#{statement} /*#{comment}*/") do |stmt|
stmt.bind_params(connection.send(:type_casted_binds, binds))
columns = stmt.columns
rows = stmt.to_a
types = stmt.types.map { |t| type_map.lookup(t) }
end
else
result = connection.select_all("#{statement} /*#{comment}*/", nil, binds)
columns = result.columns
rows = result.rows
if result.column_types.any?
types = columns.size.times.map { |i| result.column_types[i] }
end
end
end
# cast values
if types.any?
rows =
rows.map do |row|
row.map.with_index do |v, i|
v && (t = types[i]) ? t.send(:cast_value, v) : v
end
end
end
# fix for non-ASCII column names and charts
if adapter_name == "Trilogy"
columns = columns.map { |k| k.dup.force_encoding(Encoding::UTF_8) }
end
rescue => e
error = e.message.sub(/.+ERROR: /, "")
error = Blazer::TIMEOUT_MESSAGE if Blazer::TIMEOUT_ERRORS.any? { |e| error.include?(e) }
error = Blazer::VARIABLE_MESSAGE if error.include?("syntax error at or near \"$") || error.include?("Incorrect syntax near '@") || error.include?("your MySQL server version for the right syntax to use near '?")
if error.include?("could not determine data type of parameter")
error += " - try adding casting to variables and make sure none are inside a string literal"
end
reconnect if error.include?("PG::ConnectionBad")
end
[columns, rows, error]
end
def tables
sql =
if sqlite?
"SELECT NULL, name FROM sqlite_master WHERE type IN ('table', 'view') ORDER BY name"
else
add_schemas("SELECT table_schema, table_name FROM information_schema.tables")
end
result = data_source.run_statement(sql, refresh_cache: true)
if postgresql? || redshift? || snowflake?
result.rows.sort_by { |r| [r[0] == default_schema ? "" : r[0], r[1]] }.map do |row|
table =
if row[0] == default_schema
row[1]
else
"#{row[0]}.#{row[1]}"
end
table = table.downcase if snowflake?
{
table: table,
value: connection_model.connection.quote_table_name(table)
}
end
else
result.rows.map(&:second).sort
end
end
def schema
sql =
if sqlite?
"SELECT NULL, t.name, c.name, c.type, c.cid FROM sqlite_master t INNER JOIN pragma_table_info(t.name) c WHERE t.type IN ('table', 'view')"
else
add_schemas("SELECT table_schema, table_name, column_name, data_type, ordinal_position FROM information_schema.columns")
end
result = data_source.run_statement(sql)
result.rows.group_by { |r| [r[0], r[1]] }.map { |k, vs| {schema: k[0], table: k[1], columns: vs.sort_by { |v| v[2] }.map { |v| {name: v[2], data_type: v[3]} }} }.sort_by { |t| [t[:schema] == default_schema ? "" : t[:schema], t[:table]] }
end
def preview_statement
if sqlserver?
"SELECT TOP (10) * FROM {table}"
else
"SELECT * FROM {table} LIMIT 10"
end
end
def reconnect
connection_model.establish_connection(settings["url"])
end
def cost(statement)
result = explain(statement)
if sqlserver?
result["TotalSubtreeCost"]
else
match = /cost=\d+\.\d+..(\d+\.\d+) /.match(result)
match[1] if match
end
end
def explain(statement)
if postgresql? || redshift?
select_all("EXPLAIN #{statement}").rows.first.first
elsif sqlserver?
begin
execute("SET SHOWPLAN_ALL ON")
result = select_all(statement).each.first
ensure
execute("SET SHOWPLAN_ALL OFF")
end
result
end
rescue
nil
end
def cancel(run_id)
if postgresql?
select_all("SELECT pg_cancel_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND query LIKE ?", ["%,run_id:#{run_id}%"])
elsif redshift?
first_row = select_all("SELECT pid FROM stv_recents WHERE status = 'Running' AND query LIKE ?", ["%,run_id:#{run_id}%"]).first
if first_row
select_all("CANCEL #{first_row["pid"].to_i}")
end
end
end
def cachable?(statement)
!%w[CREATE ALTER UPDATE INSERT DELETE].include?(statement.split.first.to_s.upcase)
end
def supports_cohort_analysis?
postgresql? || mysql?
end
# TODO treat date columns as already in time zone
def cohort_analysis_statement(statement, period:, days:)
raise "Cohort analysis not supported" unless supports_cohort_analysis?
cohort_column = statement.match?(/\bcohort_time\b/) ? "cohort_time" : "conversion_time"
tzname = Blazer.time_zone.tzinfo.name
if mysql?
time_sql = "CONVERT_TZ(cohorts.cohort_time, '+00:00', ?)"
case period
when "day"
date_sql = "CAST(DATE_FORMAT(#{time_sql}, '%Y-%m-%d') AS DATE)"
date_params = [tzname]
when "week"
date_sql = "CAST(DATE_FORMAT(#{time_sql} - INTERVAL ((5 + DAYOFWEEK(#{time_sql})) % 7) DAY, '%Y-%m-%d') AS DATE)"
date_params = [tzname, tzname]
else
date_sql = "CAST(DATE_FORMAT(#{time_sql}, '%Y-%m-01') AS DATE)"
date_params = [tzname]
end
bucket_sql = "CAST(CEIL(TIMESTAMPDIFF(SECOND, cohorts.cohort_time, query.conversion_time) / ?) AS SIGNED)"
else
date_sql = "date_trunc(?, cohorts.cohort_time::timestamptz AT TIME ZONE ?)::date"
date_params = [period, tzname]
bucket_sql = "CEIL(EXTRACT(EPOCH FROM query.conversion_time - cohorts.cohort_time) / ?)::int"
end
# WITH not an optimization fence in Postgres 12+
statement = <<~SQL
WITH query AS (
{placeholder}
),
cohorts AS (
SELECT user_id, MIN(#{cohort_column}) AS cohort_time FROM query
WHERE user_id IS NOT NULL AND #{cohort_column} IS NOT NULL
GROUP BY 1
)
SELECT
#{date_sql} AS period,
0 AS bucket,
COUNT(DISTINCT cohorts.user_id)
FROM cohorts GROUP BY 1
UNION ALL
SELECT
#{date_sql} AS period,
#{bucket_sql} AS bucket,
COUNT(DISTINCT query.user_id)
FROM cohorts INNER JOIN query ON query.user_id = cohorts.user_id
WHERE query.conversion_time IS NOT NULL
AND query.conversion_time >= cohorts.cohort_time
#{cohort_column == "conversion_time" ? "AND query.conversion_time != cohorts.cohort_time" : ""}
GROUP BY 1, 2
SQL
params = [statement] + date_params + date_params + [days.to_i * 86400]
connection_model.send(:sanitize_sql_array, params)
end
def quoting
->(value) { connection_model.connection.quote(value) }
end
# Redshift adapter silently ignores binds
def parameter_binding
if postgresql?
:numeric
elsif sqlite? && prepared_statements?
# Active Record silently ignores binds with SQLite when prepared statements are disabled
:numeric
elsif mysql? && prepared_statements?
# Active Record silently ignores binds with MySQL when prepared statements are disabled
:positional
elsif sqlserver?
proc do |statement, variables|
variables.each_with_index do |(k, _), i|
statement = statement.gsub("{#{k}}", "@#{i} ")
end
[statement, variables.values]
end
end
end
protected
def select_all(statement, params = [])
statement = connection_model.send(:sanitize_sql_array, [statement] + params) if params.any?
connection_model.connection.select_all(statement)
end
# separate from select_all to prevent mysql error
def execute(statement)
connection_model.connection.execute(statement)
end
def postgresql?
["PostgreSQL", "PostGIS"].include?(adapter_name)
end
def redshift?
["Redshift"].include?(adapter_name)
end
def mysql?
["MySQL", "Mysql2", "Mysql2Spatial", "Trilogy"].include?(adapter_name)
end
def sqlite?
["SQLite"].include?(adapter_name)
end
def sqlserver?
["SQLServer", "tinytds", "mssql"].include?(adapter_name)
end
def snowflake?
data_source.adapter == "snowflake"
end
def adapter_name
# prevent bad data source from taking down queries/new
connection_model.connection.adapter_name rescue nil
end
def default_schema
@default_schema ||= begin
if postgresql? || redshift?
"public"
elsif sqlserver?
"dbo"
elsif sqlite?
nil
elsif connection_model.respond_to?(:connection_db_config)
connection_model.connection_db_config.database
else
connection_model.connection_config[:database]
end
end
end
def add_schemas(query)
if settings["schemas"]
where = "table_schema IN (?)"
schemas = settings["schemas"]
elsif mysql?
where = "table_schema IN (?)"
schemas = [default_schema]
else
where = "table_schema NOT IN (?)"
schemas = ["information_schema"]
schemas.map!(&:upcase) if snowflake?
schemas << "pg_catalog" if postgresql? || redshift?
end
connection_model.send(:sanitize_sql_array, ["#{query} WHERE #{where}", schemas])
end
def set_timeout(timeout)
if postgresql? || redshift?
execute("SET #{use_transaction? ? "LOCAL " : ""}statement_timeout = #{timeout.to_i * 1000}")
elsif mysql?
mariadb = connection_model.connection.mariadb? rescue false
if mariadb
execute("SET max_statement_time = #{timeout.to_i * 1000}")
else
execute("SET max_execution_time = #{timeout.to_i * 1000}")
end
else
raise Blazer::TimeoutNotSupported, "Timeout not supported for #{adapter_name} adapter"
end
end
def use_transaction?
settings.key?("use_transaction") ? settings["use_transaction"] : true
end
def in_transaction
connection_model.connection_pool.with_connection do |connection|
if use_transaction?
connection_model.transaction do
yield connection
raise ActiveRecord::Rollback
end
else
yield connection
end
end
end
def prepared_statements?
connection_model.connection.prepared_statements
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/lib/blazer/adapters/snowflake_adapter.rb | lib/blazer/adapters/snowflake_adapter.rb | module Blazer
module Adapters
class SnowflakeAdapter < SqlAdapter
def initialize(data_source)
@data_source = data_source
@@registered ||= begin
require "active_record/connection_adapters/odbc_adapter"
require "odbc_adapter/adapters/postgresql_odbc_adapter"
ODBCAdapter.register(/snowflake/, ODBCAdapter::Adapters::PostgreSQLODBCAdapter) do
# Explicitly turning off prepared statements as they are not yet working with
# snowflake + the ODBC ActiveRecord adapter
def prepared_statements
false
end
# Quoting needs to be changed for snowflake
def quote_column_name(name)
name.to_s
end
private
# Override dbms_type_cast to get the values encoded in UTF-8
def dbms_type_cast(columns, values)
int_column = {}
columns.each_with_index do |c, i|
int_column[i] = c.type == 3 && c.scale == 0
end
float_column = {}
columns.each_with_index do |c, i|
float_column[i] = c.type == 3 && c.scale != 0
end
values.each do |row|
row.each_index do |idx|
val = row[idx]
if val
if int_column[idx]
row[idx] = val.to_i
elsif float_column[idx]
row[idx] = val.to_f
elsif val.is_a?(String)
row[idx] = val.force_encoding('UTF-8')
end
end
end
end
end
end
end
@connection_model =
Class.new(Blazer::Connection) do
def self.name
"Blazer::Connection::SnowflakeAdapter#{object_id}"
end
if data_source.settings["conn_str"]
establish_connection(adapter: "odbc", conn_str: data_source.settings["conn_str"])
elsif data_source.settings["dsn"]
establish_connection(adapter: "odbc", dsn: data_source.settings["dsn"])
end
end
end
def cancel(run_id)
# todo
end
# https://docs.snowflake.com/en/sql-reference/data-types-text.html#escape-sequences
def quoting
:backslash_escape
end
def parameter_binding
# TODO
end
end
end
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
ankane/blazer | https://github.com/ankane/blazer/blob/e5515ecca17ed2f2d3f87347147549353983f24b/config/routes.rb | config/routes.rb | Blazer::Engine.routes.draw do
resources :queries do
post :run, on: :collection # err on the side of caution
post :cancel, on: :collection
post :refresh, on: :member
get :tables, on: :collection
get :schema, on: :collection
get :docs, on: :collection
end
resources :checks, except: [:show] do
get :run, on: :member
end
resources :dashboards, except: [:index] do
post :refresh, on: :member
end
if Blazer.uploads?
resources :uploads do
end
end
root to: "queries#home"
end
| ruby | MIT | e5515ecca17ed2f2d3f87347147549353983f24b | 2026-01-04T15:45:43.211083Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/youplot_test.rb | test/youplot_test.rb | # frozen_string_literal: true
require_relative 'test_helper'
class YouPlotTest < Test::Unit::TestCase
def teardown
YouPlot.run_as_executable = false
end
test :it_has_a_version_number do
assert_kind_of String, ::YouPlot::VERSION
end
test :run_as_executable do
assert_equal false, YouPlot.run_as_executable
assert_equal false, YouPlot.run_as_executable?
YouPlot.run_as_executable = true
assert_equal true, YouPlot.run_as_executable
assert_equal true, YouPlot.run_as_executable?
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/test_helper.rb | test/test_helper.rb | # frozen_string_literal: true
require 'simplecov'
SimpleCov.start
require 'youplot'
require 'test/unit'
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/unicode_plot_test.rb | test/unicode_plot_test.rb | # frozen_string_literal: true
require_relative 'test_helper'
require 'unicode_plot'
# Check the UnicodePlot constants that YouPlot depends on.
# Prepare for UnicodePlot version upgrades.
class UnicodePlotTest < Test::Unit::TestCase
test 'VERSION' do
assert UnicodePlot::VERSION
end
test 'BORDER_MAP' do
assert_instance_of Hash, UnicodePlot::BORDER_MAP
end
test 'PREDEFINED_TRANSFORM_FUNCTIONS' do
assert_instance_of Hash, UnicodePlot::ValueTransformer::PREDEFINED_TRANSFORM_FUNCTIONS
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/youplot/dsv_test.rb | test/youplot/dsv_test.rb | # frozen_string_literal: true
require_relative '../test_helper'
class YouPlotDSVTest < Test::Unit::TestCase
def setup
@m = YouPlot::DSV
end
test :transpose2 do
n = nil
assert_equal([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], @m.transpose2([[1, 4, 7],
[2, 5, 8],
[3, 6, 9]]))
assert_equal([[1, 2, 3],
[4, 5, n],
[6, n, n]], @m.transpose2([[1, 4, 6],
[2, 5],
[3]]))
assert_equal([[1, 2, 3],
[n, 4, 5],
[n, n, 6]], @m.transpose2([[1],
[2, 4],
[3, 5, 6]]))
end
test :get_headers do
assert_equal([1, 4, 7], @m.get_headers([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], true, true))
assert_equal([1, 2, 3], @m.get_headers([[1, 4, 6],
[2, 5],
[3]], true, true))
assert_equal([1, 2, 3], @m.get_headers([[1],
[2, 4],
[3, 5, 6]], true, true))
assert_equal([1, 2, 3], @m.get_headers([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], true, false))
assert_equal([1, 4, 6], @m.get_headers([[1, 4, 6],
[2, 5],
[3]], true, false))
assert_equal([1], @m.get_headers([[1],
[2, 4],
[3, 5, 6]], true, false))
assert_equal(nil, @m.get_headers([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], false, true))
assert_equal(nil, @m.get_headers([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], false, false))
assert_equal([1, 2, 3], @m.get_headers([[1, 2, 3]], true, false))
end
test :get_series do
n = nil
assert_equal([[2, 3], [5, 6], [8, 9]], @m.get_series([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], true, true))
assert_equal([[4, 6], [5], []], @m.get_series([[1, 4, 6],
[2, 5],
[3]], true, true))
assert_equal([[], [4], [5, 6]], @m.get_series([[1],
[2, 4],
[3, 5, 6]], true, true))
assert_equal([[4, 7], [5, 8], [6, 9]], @m.get_series([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], true, false))
assert_equal([[2, 3], [5, nil]], @m.get_series([[1, 4, 6],
[2, 5],
[3]], true, false))
assert_equal([[2, 3], [4, 5], [nil, 6]], @m.get_series([[1],
[2, 4],
[3, 5, 6]], true, false))
assert_equal([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], @m.get_series([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], false, true))
assert_equal([[1, 4, 6],
[2, 5],
[3]], @m.get_series([[1, 4, 6],
[2, 5],
[3]], false, true))
assert_equal([[1],
[2, 4],
[3, 5, 6]], @m.get_series([[1],
[2, 4],
[3, 5, 6]], false, true))
assert_equal([[1, 4, 7],
[2, 5, 8],
[3, 6, 9]], @m.get_series([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]], false, false))
assert_equal([[1, 2, 3],
[4, 5, n],
[6, n, n]], @m.get_series([[1, 4, 6],
[2, 5],
[3]], false, false))
assert_equal([[1, 2, 3],
[n, 4, 5],
[n, n, 6]], @m.get_series([[1],
[2, 4],
[3, 5, 6]], false, false))
assert_equal([[], [], []], @m.get_series([[1, 2, 3]], true, false))
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/youplot/simple_test.rb | test/youplot/simple_test.rb | # frozen_string_literal: true
require 'tempfile'
require_relative '../test_helper'
class YouPlotSimpleTest < Test::Unit::TestCase
class << self
def startup
@stdin = $stdin.dup
@stdout = $stdout.dup
@stderr = $stderr.dup
end
def shutdown
$stdin = @stdin
$stdout = @stdout
$stderr = @stderr
end
end
def setup
$stdin = File.open(File.expand_path('../fixtures/simple.tsv', __dir__), 'r')
@stderr_file = Tempfile.new
@stdout_file = Tempfile.new
$stderr = @stderr_file
$stdout = @stdout_file
end
def teardown
@stderr_file.close
@stdout_file.close
end
def fixture(fname)
File.read(File.expand_path("../fixtures/#{fname}", __dir__))
end
# Single command
# The goal is to verify that the command works without any options.
test :barplot do
assert_raise(ArgumentError) do
YouPlot::Command.new(['barplot']).run
end
end
test :bar do
assert_raise(ArgumentError) do
YouPlot::Command.new(['bar']).run
end
end
test :histogram do
YouPlot::Command.new(['histogram']).run
assert_equal fixture('simple-histogram.txt'), @stderr_file.read
end
test :hist do
YouPlot::Command.new(['hist']).run
assert_equal fixture('simple-histogram.txt'), @stderr_file.read
end
test :lineplot do
YouPlot::Command.new(['lineplot']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :line do
YouPlot::Command.new(['line']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :lineplots do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['lineplots']).run
end
end
test :lines do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['lines']).run
end
end
test :scatter do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['scatter']).run
end
end
test :s do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['s']).run
end
end
test :density do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['density']).run
end
end
test :d do
assert_raise(YouPlot::Backends::UnicodePlot::Error) do
YouPlot::Command.new(['d']).run
end
end
test :boxplot do
YouPlot::Command.new(['boxplot']).run
assert_equal fixture('simple-boxplot.txt'), @stderr_file.read
end
test :box do
YouPlot::Command.new(['box']).run
assert_equal fixture('simple-boxplot.txt'), @stderr_file.read
end
test :count do
YouPlot::Command.new(['c']).run
assert_equal fixture('simple-count.txt'), @stderr_file.read
end
test :c do
YouPlot::Command.new(['count']).run
assert_equal fixture('simple-count.txt'), @stderr_file.read
end
test :plot_output_stdout do
YouPlot::Command.new(['line', '-o']).run
assert_equal '', @stderr_file.read
assert_equal fixture('simple-lineplot.txt'), @stdout_file.read
end
test :data_output_stdout do
YouPlot::Command.new(['box', '-O']).run
assert_equal fixture('simple-boxplot.txt'), @stderr_file.read
assert_equal fixture('simple.tsv'), @stdout_file.read
end
test :line_transpose do
$stdin = File.open(File.expand_path('../fixtures/simpleT.tsv', __dir__), 'r')
YouPlot::Command.new(['line', '--transpose']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :line_T do
$stdin = File.open(File.expand_path('../fixtures/simpleT.tsv', __dir__), 'r')
YouPlot::Command.new(['line', '-T']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :line_xlabel do
YouPlot::Command.new(['line', '--xlabel', 'X-LABEL']).run
assert_equal fixture('simple-lineplot-xlabel.txt'), @stderr_file.read
end
test :line_ylabel do
YouPlot::Command.new(['line', '--ylabel', 'Y-LABEL']).run
assert_equal fixture('simple-lineplot-ylabel.txt'), @stderr_file.read
end
test :line_width do
YouPlot::Command.new(['line', '--width', '17']).run
assert_equal fixture('simple-lineplot-width-17.txt'), @stderr_file.read
end
test :line_w do
YouPlot::Command.new(['line', '-w', '17']).run
assert_equal fixture('simple-lineplot-width-17.txt'), @stderr_file.read
end
test :line_height do
YouPlot::Command.new(['line', '--height', '17']).run
assert_equal fixture('simple-lineplot-height-17.txt'), @stderr_file.read
end
test :line_h do
YouPlot::Command.new(['line', '-h', '17']).run
assert_equal fixture('simple-lineplot-height-17.txt'), @stderr_file.read
end
test :line_margin do
YouPlot::Command.new(['line', '--margin', '17']).run
assert_equal fixture('simple-lineplot-margin-17.txt'), @stderr_file.read
end
test :line_m do
YouPlot::Command.new(['line', '-m', '17']).run
assert_equal fixture('simple-lineplot-margin-17.txt'), @stderr_file.read
end
test :line_padding do
YouPlot::Command.new(['line', '--padding', '17']).run
assert_equal fixture('simple-lineplot-padding-17.txt'), @stderr_file.read
end
test :line_border_corners do
YouPlot::Command.new(['line', '--border', 'corners']).run
assert_equal fixture('simple-lineplot-border-corners.txt'), @stderr_file.read
end
test :line_b_corners do
YouPlot::Command.new(['line', '-b', 'corners']).run
assert_equal fixture('simple-lineplot-border-corners.txt'), @stderr_file.read
end
test :line_border_barplot do
YouPlot::Command.new(['line', '--border', 'barplot']).run
assert_equal fixture('simple-lineplot-border-barplot.txt'), @stderr_file.read
end
test :line_b_barplot do
YouPlot::Command.new(['line', '-b', 'barplot']).run
assert_equal fixture('simple-lineplot-border-barplot.txt'), @stderr_file.read
end
test :line_canvas_ascii do
YouPlot::Command.new(['line', '--canvas', 'ascii']).run
assert_equal fixture('simple-lineplot-canvas-ascii.txt'), @stderr_file.read
end
test :line_canvas_braille do
YouPlot::Command.new(['line', '--canvas', 'braille']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :line_canvas_density do
YouPlot::Command.new(['line', '--canvas', 'density']).run
assert_equal fixture('simple-lineplot-canvas-density.txt'), @stderr_file.read
end
test :line_canvas_dot do
YouPlot::Command.new(['line', '--canvas', 'dot']).run
assert_equal fixture('simple-lineplot-canvas-dot.txt'), @stderr_file.read
end
# test :line_canvas_block do
# YouPlot::Command.new(['line', '--canvas', 'block']).run
# assert_equal fixture('simple-lineplot-canvas-dot.txt'), @stderr_file.read
# end
test :hist_symbol_atmark do
YouPlot::Command.new(['hist', '--symbol', '@']).run
assert_equal fixture('simple-histogram-symbol-@.txt'), @stderr_file.read
end
test :line_xlim do
YouPlot::Command.new(['line', '--xlim', '-1,5']).run
assert_equal fixture('simple-lineplot-xlim--1-5.txt'), @stderr_file.read
end
test :line_ylim do
YouPlot::Command.new(['line', '--ylim', '-25,50']).run
assert_equal fixture('simple-lineplot-ylim--25-50.txt'), @stderr_file.read
end
test :line_xlim_and_ylim do
YouPlot::Command.new(['line', '--xlim', '-1,5', '--ylim', '-25,50']).run
assert_equal fixture('simple-lineplot-xlim--1-5-ylim--25-50.txt'), @stderr_file.read
end
test :line_grid do
YouPlot::Command.new(['line', '--grid']).run
assert_equal fixture('simple-lineplot.txt'), @stderr_file.read
end
test :line_no_grid do
YouPlot::Command.new(['line', '--no-grid']).run
assert_equal fixture('simple-lineplot-no-grid.txt'), @stderr_file.read
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/youplot/iris_test.rb | test/youplot/iris_test.rb | # frozen_string_literal: true
require 'tempfile'
require_relative '../test_helper'
class YouPlotIRISTest < Test::Unit::TestCase
class << self
def startup
@stdin = $stdin.dup
@stdout = $stdout.dup
@stderr = $stderr.dup
end
def shutdown
$stdin = @stdin
$stdout = @stdout
$stderr = @stderr
end
end
def setup
$stdin = File.open(File.expand_path('../fixtures/iris.csv', __dir__), 'r')
@stderr_file = Tempfile.new
@stdout_file = Tempfile.new
$stderr = @stderr_file
$stdout = @stdout_file
end
def teardown
@stderr_file.close
@stdout_file.close
end
def fixture(fname)
File.read(File.expand_path("../fixtures/#{fname}", __dir__))
end
test :barplot do
YouPlot::Command.new(['barplot', '-H', '-d,', '-t', 'IRIS-BARPLOT']).run
assert_equal fixture('iris-barplot.txt'), @stderr_file.read
end
# barplot doesn't make sense, but just to make sure it works.
test :bar do
YouPlot::Command.new(['bar', '-H', '-d,', '-t', 'IRIS-BARPLOT']).run
assert_equal fixture('iris-barplot.txt'), @stderr_file.read
end
test :histogram do
YouPlot::Command.new(['histogram', '-H', '-d,', '-t', 'IRIS-HISTOGRAM']).run
assert_equal fixture('iris-histogram.txt'), @stderr_file.read
end
test :hist do
YouPlot::Command.new(['hist', '-H', '-d,', '-t', 'IRIS-HISTOGRAM']).run
assert_equal fixture('iris-histogram.txt'), @stderr_file.read
end
# Yeah, lineplot/lineplots don't make sense too.
test :lineplot do
YouPlot::Command.new(['lineplot', '-H', '-d,', '-t', 'IRIS-LINEPLOT']).run
assert_equal fixture('iris-lineplot.txt'), @stderr_file.read
end
test :line do
YouPlot::Command.new(['line', '-H', '-d,', '-t', 'IRIS-LINEPLOT']).run
assert_equal fixture('iris-lineplot.txt'), @stderr_file.read
end
# l is an undocumented alias of lineplot.
test :l do
YouPlot::Command.new(['l', '-H', '-d,', '-t', 'IRIS-LINEPLOT']).run
assert_equal fixture('iris-lineplot.txt'), @stderr_file.read
end
test :lineplots do
YouPlot::Command.new(['lineplots', '-H', '-d,', '-t', 'IRIS-LINEPLOTS']).run
assert_equal fixture('iris-lineplots.txt'), @stderr_file.read
end
test :lines do
YouPlot::Command.new(['lines', '-H', '-d,', '-t', 'IRIS-LINEPLOTS']).run
assert_equal fixture('iris-lineplots.txt'), @stderr_file.read
end
# ls is an undocumented alias of lineplots.
test :ls do
YouPlot::Command.new(['lines', '-H', '-d,', '-t', 'IRIS-LINEPLOTS']).run
assert_equal fixture('iris-lineplots.txt'), @stderr_file.read
end
test :scatter do
YouPlot::Command.new(['scatter', '-H', '-d,', '-t', 'IRIS-SCATTER']).run
assert_equal fixture('iris-scatter.txt'), @stderr_file.read
end
test :s do
YouPlot::Command.new(['s', '-H', '-d,', '-t', 'IRIS-SCATTER']).run
assert_equal fixture('iris-scatter.txt'), @stderr_file.read
end
test :density do
YouPlot::Command.new(['density', '-H', '-d,', '-t', 'IRIS-DENSITY']).run
assert_equal fixture('iris-density.txt'), @stderr_file.read
end
test :d do
YouPlot::Command.new(['d', '-H', '-d,', '-t', 'IRIS-DENSITY']).run
assert_equal fixture('iris-density.txt'), @stderr_file.read
end
test :boxplot do
YouPlot::Command.new(['boxplot', '-H', '-d,', '-t', 'IRIS-BOXPLOT']).run
assert_equal fixture('iris-boxplot.txt'), @stderr_file.read
end
test :box do
YouPlot::Command.new(['box', '-H', '-d,', '-t', 'IRIS-BOXPLOT']).run
assert_equal fixture('iris-boxplot.txt'), @stderr_file.read
end
# Yeah, lineplot/lineplots don't make sense too.
# Just checking the behavior.
test :c do
YouPlot::Command.new(['count', '-H', '-d,']).run
assert_equal fixture('iris-count.txt'), @stderr_file.read
end
test :count do
YouPlot::Command.new(['c', '-H', '-d,']).run
assert_equal fixture('iris-count.txt'), @stderr_file.read
end
# Output options.
test :plot_output_stdout do
YouPlot::Command.new(['bar', '-o', '-H', '-d,', '-t', 'IRIS-BARPLOT']).run
assert_equal '', @stderr_file.read
assert_equal fixture('iris-barplot.txt'), @stdout_file.read
end
test :data_output_stdout do
YouPlot::Command.new(['bar', '-O', '-H', '-d,', '-t', 'IRIS-BARPLOT']).run
assert_equal fixture('iris-barplot.txt'), @stderr_file.read
assert_equal fixture('iris.csv'), @stdout_file.read
end
%i[colors color colours colour].each do |cmd_name|
test cmd_name do
YouPlot::Command.new([cmd_name.to_s]).run
assert_equal fixture('colors.txt'), @stderr_file.read
assert_equal '', @stdout_file.read
end
end
test :colors_output_stdout do
YouPlot::Command.new(['colors', '-o']).run
assert_equal '', @stderr_file.read
assert_equal fixture('colors.txt'), @stdout_file.read
end
test :unrecognized_command do
assert_raise(YouPlot::Parser::Error) do
YouPlot::Command.new(['abracadabra', '--hadley', '--wickham']).run
end
assert_equal '', @stderr_file.read
assert_equal '', @stdout_file.read
end
test :encoding do
$stdin = File.open(File.expand_path('../fixtures/iris_utf16.csv', __dir__), 'r')
YouPlot::Command.new(['s', '--encoding', 'UTF-16', '-H', '-d,', '-t', 'IRIS-SCATTER']).run
assert_equal fixture('iris-scatter.txt'), @stderr_file.read
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/test/youplot/backends/processing_test.rb | test/youplot/backends/processing_test.rb | # frozen_string_literal: true
require_relative '../../test_helper'
class ProcessingTest < Test::Unit::TestCase
test :count_values do
@m = YouPlot::Backends::Processing
assert_equal([%i[a b c], [3, 2, 1]], @m.count_values(%i[a a a b b c]))
assert_equal([%i[c b a], [3, 2, 1]], @m.count_values(%i[a b b c c c]))
end
test :count_values_non_tally do
@m = YouPlot::Backends::Processing
assert_equal([%i[a b c], [3, 2, 1]], @m.count_values(%i[a a a b b c], tally: false))
assert_equal([%i[c b a], [3, 2, 1]], @m.count_values(%i[a b b c c c], tally: false))
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot.rb | lib/youplot.rb | # frozen_string_literal: true
require_relative 'youplot/version'
require_relative 'youplot/dsv'
require_relative 'youplot/parameters'
require_relative 'youplot/command'
module YouPlot
# @run_as_executable = true / false
# YouPlot behaves slightly differently when run as a command line tool
# and when run as a script (e.g. for testing). In the event of an error,
# when run as a command line tool, YouPlot will display a short error message
# and exit abnormally. When run as a script, it will just raise an error.
@run_as_executable = false
class << self
attr_accessor :run_as_executable
def run_as_executable?
@run_as_executable
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/command.rb | lib/youplot/command.rb | # frozen_string_literal: true
require_relative 'dsv'
require_relative 'parser'
require_relative 'backends/unicode_plot'
module YouPlot
Data = Struct.new(:headers, :series)
class Command
attr_accessor :command, :params, :options
attr_reader :data, :parser
def initialize(argv = ARGV)
@argv = argv
@parser = Parser.new
@command = nil
@params = nil
@options = nil
@backend = YouPlot::Backends::UnicodePlot
end
def run_as_executable
YouPlot.run_as_executable = true
run
end
def run
parser.parse_options(@argv)
@command ||= parser.command
@options ||= parser.options
@params ||= parser.params
# color command
if %i[colors color colours colour].include? @command
plot = create_plot
output_plot(plot)
return
end
# progressive mode
if options[:progressive]
stop = false
Signal.trap(:INT) { stop = true }
# make cursor invisible
options[:output].print "\e[?25l"
# mainloop
while (input = Kernel.gets)
n = main_progressive(input)
break if stop
options[:output].print "\e[#{n}F"
end
options[:output].print "\e[0J"
# make cursor visible
options[:output].print "\e[?25h"
# normal mode
else
# Sometimes the input file does not end with a newline code.
begin
begin
input = Kernel.gets(nil)
rescue Errno::ENOENT => e
warn e.message
next
end
main(input)
end until input
end
end
private
def main(input)
# Outputs input data to a file or stdout.
output_data(input)
@data = parse_dsv(input)
# Debug mode, show parsed results
pp @data if options[:debug]
# When run as a program instead of a library
if YouPlot.run_as_executable?
begin
plot = create_plot
rescue ArgumentError => e
# Show only one line of error.
warn e.backtrace[0]
# Show error message in purple
warn "\e[35m#{e}\e[0m"
# Explicitly terminated with exit code: 1
exit 1
end
# When running YouPlot as a library (e.g. for testing)
else
plot = create_plot
end
output_plot(plot)
end
def main_progressive(input)
output_data(input)
# FIXME
# Worked around the problem of not being able to draw
# plots when there is only one header line.
if @raw_data.nil?
@raw_data = String.new
if options[:headers]
@raw_data << input
return
end
end
@raw_data << input
# FIXME
@data = parse_dsv(@raw_data)
plot = create_plot
output_plot_progressive(plot)
end
def parse_dsv(input)
# If encoding is specified, convert to UTF-8
if options[:encoding]
input.force_encoding(options[:encoding])
.encode!('utf-8')
end
begin
data = DSV.parse(input, options[:delimiter], options[:headers], options[:transpose])
rescue CSV::MalformedCSVError => e
warn 'Failed to parse the text. '
warn 'Please try to set the correct character encoding with --encoding option.'
warn e.backtrace.grep(/youplot/).first
exit 1
rescue ArgumentError => e
warn 'Failed to parse the text. '
warn e.backtrace.grep(/youplot/).first
exit 1
end
data
end
def create_plot
case command
when :bar, :barplot
@backend.barplot(data, params, options[:fmt])
when :count, :c
@backend.barplot(data, params, count: true, reverse: options[:reverse])
when :hist, :histogram
@backend.histogram(data, params)
when :line, :lineplot, :l
@backend.line(data, params, options[:fmt])
when :lines, :lineplots, :ls
@backend.lines(data, params, options[:fmt])
when :scatter, :s
@backend.scatter(data, params, options[:fmt])
when :density, :d
@backend.density(data, params, options[:fmt])
when :box, :boxplot
@backend.boxplot(data, params)
when :colors, :color, :colours, :colour
@backend.colors(options[:color_names])
else
raise "unrecognized plot_type: #{command}"
end
end
def output_data(input)
# Pass the input to subsequent pipelines
case options[:pass]
when IO, StringIO
options[:pass].print(input)
else
if options[:pass]
File.open(options[:pass], 'w') do |f|
f.print(input)
end
end
end
end
def output_plot(plot)
case options[:output]
when IO, StringIO
plot.render(options[:output])
when String, Tempfile
File.open(options[:output], 'w') do |f|
plot.render(f)
end
end
end
def output_plot_progressive(plot)
case options[:output]
when IO, StringIO
# RefactorMe
out = StringIO.new(String.new)
def out.tty?
true
end
plot.render(out)
lines = out.string.lines
lines.each do |line|
options[:output].print line.chomp
options[:output].print "\e[0K"
options[:output].puts
end
options[:output].print "\e[0J"
options[:output].flush
out.string.lines.size
else
raise 'In progressive mode, output to a file is not possible.'
end
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/dsv.rb | lib/youplot/dsv.rb | # frozen_string_literal: true
require 'csv'
module YouPlot
# Module to handle DSV (Delimiter-separated values) format.
# Extract header and series.
module DSV
module_function
def parse(input, delimiter, headers, transpose)
# Parse as CSV
arr = CSV.parse(input, col_sep: delimiter)
# Remove blank lines
arr.delete_if do |i|
i == [] or i.all?(&:nil?)
end
# get header
headers = get_headers(arr, headers, transpose)
# get series
series = get_series(arr, headers, transpose)
# Return if No header
return Data.new(headers, series) if headers.nil?
# Warn if header contains nil
warn "\e[35mHeaders contains nil in it.\e[0m" if headers.include?(nil)
# Warn if header contains ''
warn "\e[35mHeaders contains \"\" in it.\e[0m" if headers.include? ''
# Make sure the number of elements in the header matches the number of series.
h_size = headers.size
s_size = series.size
if h_size > s_size
warn "\e[35mThe number of headers is greater than the number of series.\e[0m"
exit 1 if YouPlot.run_as_executable?
elsif h_size < s_size
warn "\e[35mThe number of headers is less than the number of series.\e[0m"
exit 1 if YouPlot.run_as_executable?
end
Data.new(headers, series) if h_size == s_size
end
# Transpose different sized ruby arrays
# https://stackoverflow.com/q/26016632
def transpose2(arr)
Array.new(arr.map(&:length).max) { |i| arr.map { |e| e[i] } }
end
def get_headers(arr, headers, transpose)
# header(-)
return nil unless headers
# header(+) trenspose(+)
return arr.map(&:first) if transpose
# header(+) transpose(-)
arr[0]
end
def get_series(arr, headers, transpose)
# header(-)
unless headers
return arr if transpose
return transpose2(arr)
end
# header(+) but no element in the series.
# TODO: should raise error?
return Array.new(arr[0].size, []) if arr.size == 1
# header(+) transpose(+)
return arr.map { |row| row[1..-1] } if transpose
# header(+) transpose(-)
transpose2(arr[1..-1])
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/version.rb | lib/youplot/version.rb | # frozen_string_literal: true
module YouPlot
VERSION = '0.4.6'
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/options.rb | lib/youplot/options.rb | # frozen_string_literal: true
module YouPlot
# Command line options that are not Plot parameters
Options = Struct.new(
:delimiter,
:transpose,
:headers,
:pass,
:output,
:fmt,
:progressive,
:encoding,
:reverse, # count
:color_names, # color
:debug
)
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/parser.rb | lib/youplot/parser.rb | # frozen_string_literal: true
require 'optparse'
require_relative 'options'
module YouPlot
# Class for parsing command line options
class Parser
class Error < StandardError; end
attr_reader :command, :options, :params,
:main_parser, :sub_parser,
:config_file, :config
def initialize
@command = nil
@options = Options.new(
"\t", # elimiter:
false, # transpose:
nil, # headers:
false, # pass:
$stderr, # output:
'xyy', # fmt:
false, # progressive:
nil, # encoding:
false, # color_names:
false # debug:
)
@params = Parameters.new
end
def apply_config_file
return if !config_file && find_config_file.nil?
read_config_file
configure
end
def config_file_candidate_paths
# keep the order of the paths
paths = []
paths << ENV['MYYOUPLOTRC'] if ENV['MYYOUPLOTRC']
paths << '.youplot.yml'
paths << '.youplotrc'
if ENV['HOME']
paths << File.join(ENV['HOME'], '.youplotrc')
paths << File.join(ENV['HOME'], '.youplot.yml')
paths << File.join(ENV['HOME'], '.config', 'youplot', 'youplotrc')
paths << File.join(ENV['HOME'], '.config', 'youplot', 'youplot.yml')
end
paths
end
def find_config_file
config_file_candidate_paths.each do |file|
path = File.expand_path(file)
next unless File.exist?(path)
@config_file = path
ENV['MYYOUPLOTRC'] = path
return @config_file
end
nil
end
def read_config_file
require 'yaml'
@config = YAML.load_file(config_file)
end
def configure
option_members = @options.members
param_members = @params.members
# It would be more useful to be able to configure by plot type
config.each do |k, v|
k = k.to_sym
if option_members.include?(k)
@options[k] ||= v
elsif param_members.include?(k)
@params[k] ||= v
else
raise Error, "Unknown option/param in config file: #{k}"
end
end
end
def create_base_parser
OptionParser.new do |parser|
parser.program_name = 'YouPlot'
parser.version = YouPlot::VERSION
parser.summary_width = 23
parser.on_tail('') # Add a blank line at the end
parser.separator('')
parser.on('Common options:')
parser.on('-O', '--pass [FILE]', 'file to output input data to [stdout]',
'for inserting YouPlot in the middle of Unix pipes') do |v|
options[:pass] = v || $stdout
end
parser.on('-o', '--output [FILE]', 'file to output plots to [stdout]',
'If no option is specified, plot will print to stderr') do |v|
options[:output] = v || $stdout
end
parser.on('-d', '--delimiter DELIM', String, 'use DELIM instead of [TAB] for field delimiter') do |v|
options[:delimiter] = v
end
parser.on('-H', '--headers', TrueClass, 'specify that the input has header row') do |v|
options[:headers] = v
end
parser.on('-T', '--transpose', TrueClass, 'transpose the axes of the input data') do |v|
options[:transpose] = v
end
parser.on('-t', '--title STR', String, 'print string on the top of plot') do |v|
params.title = v
end
parser.on('--xlabel STR', String, 'print string on the bottom of the plot') do |v|
params.xlabel = v
end
parser.on('--ylabel STR', String, 'print string on the far left of the plot') do |v|
params.ylabel = v
end
parser.on('-w', '--width INT', Numeric, 'number of characters per row') do |v|
params.width = v
end
parser.on('-h', '--height INT', Numeric, 'number of rows') do |v|
params.height = v
end
border_options = UnicodePlot::BORDER_MAP.keys.join(', ')
parser.on('-b', '--border STR', String, 'specify the style of the bounding box', "(#{border_options})") do |v|
params.border = v.to_sym
end
parser.on('-m', '--margin INT', Numeric, 'number of spaces to the left of the plot') do |v|
params.margin = v
end
parser.on('--padding INT', Numeric, 'space of the left and right of the plot') do |v|
params.padding = v
end
parser.on('-c', '--color VAL', String, 'color of the drawing') do |v|
params.color = v =~ /\A[0-9]+\z/ ? v.to_i : v.to_sym
end
parser.on('--[no-]labels', TrueClass, 'hide the labels') do |v|
params.labels = v
end
parser.on('-p', '--progress', TrueClass, 'progressive mode [experimental]') do |v|
options[:progressive] = v
end
parser.on('-C', '--color-output', TrueClass, 'colorize even if writing to a pipe') do |_v|
UnicodePlot::IOContext.define_method(:color?) { true } # FIXME
end
parser.on('-M', '--monochrome', TrueClass, 'no colouring even if writing to a tty') do |_v|
UnicodePlot::IOContext.define_method(:color?) { false } # FIXME
end
parser.on('--encoding STR', String, 'specify the input encoding') do |v|
options[:encoding] = v
end
# Optparse adds the help option, but it doesn't show up in usage.
# This is why you need the code below.
parser.on('--help', 'print sub-command help menu') do
puts parser.help
exit if YouPlot.run_as_executable?
end
parser.on('--config FILE', 'specify a config file') do |v|
@config_file = v
end
parser.on('--debug', TrueClass, 'print preprocessed data') do |v|
options[:debug] = v
end
# yield opt if block_given?
end
end
def create_main_parser
@main_parser = create_base_parser
main_parser.banner = \
<<~MSG
Program: YouPlot (Tools for plotting on the terminal)
Version: #{YouPlot::VERSION} (using UnicodePlot #{UnicodePlot::VERSION})
Source: https://github.com/red-data-tools/YouPlot
Usage: uplot <command> [options] <in.tsv>
Commands:
barplot bar draw a horizontal barplot
histogram hist draw a horizontal histogram
lineplot line draw a line chart
lineplots lines draw a line chart with multiple series
scatter s draw a scatter plot
density d draw a density plot
boxplot box draw a horizontal boxplot
count c draw a barplot based on the number of
occurrences (slow)
colors color show the list of available colors
General options:
--config print config file info
--help print command specific help menu
--version print the version of YouPlot
MSG
# Help for the main parser is simple.
# Simply show the banner above.
main_parser.on('--help', 'print sub-command help menu') do
show_main_help
end
main_parser.on('--config', 'show config file info') do
show_config_info
end
end
def show_main_help(out = $stdout)
out.puts main_parser.banner
out.puts
exit if YouPlot.run_as_executable?
end
def show_config_info
if ENV['MYYOUPLOTRC']
puts "config file : #{ENV['MYYOUPLOTRC']}"
puts config.inspect
else
puts <<~EOS
Configuration file not found.
It should be a YAML file, like this example:
width : 40
height : 20
By default, YouPlot will look for the configuration file in these locations:
#{config_file_candidate_paths.map { |s| ' ' + s }.join("\n")}
If you have the file elsewhere, you can specify its location with the `MYYOUPLOTRC` environment variable.
EOS
end
exit if YouPlot.run_as_executable?
end
def sub_parser_add_symbol
sub_parser.on_head('--symbol STR', String, 'character to be used to plot the bars') do |v|
params.symbol = v
end
end
def sub_parser_add_xscale
xscale_options = UnicodePlot::ValueTransformer::PREDEFINED_TRANSFORM_FUNCTIONS.keys.join(', ')
sub_parser.on_head('--xscale STR', String, "axis scaling (#{xscale_options})") do |v|
params.xscale = v.to_sym
end
end
def sub_parser_add_canvas
canvas_types = UnicodePlot::Canvas::CANVAS_CLASS_MAP.keys.join(', ')
sub_parser.on_head('--canvas STR', String, 'type of canvas', "(#{canvas_types})") do |v|
params.canvas = v.to_sym
end
end
def sub_parser_add_xlim
sub_parser.on_head('--xlim FLOAT,FLOAT', Array, 'plotting range for the x coordinate') do |v|
params.xlim = v.map(&:to_f)
end
end
def sub_parser_add_ylim
sub_parser.on_head('--ylim FLOAT,FLOAT', Array, 'plotting range for the y coordinate') do |v|
params.ylim = v.map(&:to_f)
end
end
def sub_parser_add_grid
sub_parser.on_head('--[no-]grid', TrueClass, 'draws grid-lines at the origin') do |v|
params.grid = v
end
end
def sub_parser_add_fmt_xyxy
sub_parser.on_head('--fmt STR', String,
'xyxy : header is like x1, y1, x2, y2, x3, y3...',
'xyy : header is like x, y1, y2, y2, y3...') do |v|
options[:fmt] = v
end
end
def sub_parser_add_fmt_yx
sub_parser.on_head('--fmt STR', String,
'xy : header is like x, y...',
'yx : header is like y, x...') do |v|
options[:fmt] = v
end
end
def create_sub_parser
@sub_parser = create_base_parser
sub_parser.banner = \
<<~MSG
Usage: YouPlot #{command} [options] <in.tsv>
Options for #{command}:
MSG
case command
# If you type only `uplot` in the terminal.
# Output help to standard error output.
when nil
show_main_help($stderr)
# Output help to standard output.
when :help
show_main_help
when :barplot, :bar
sub_parser_add_symbol
sub_parser_add_fmt_yx
sub_parser_add_xscale
when :count, :c
sub_parser.on_head('-r', '--reverse', TrueClass, 'reverse the result of comparisons') do |v|
options.reverse = v
end
sub_parser_add_symbol
sub_parser_add_xscale
when :histogram, :hist
sub_parser_add_symbol
sub_parser.on_head('--closed STR', String, 'side of the intervals to be closed [left]') do |v|
params.closed = v
end
sub_parser.on_head('-n', '--nbins INT', Numeric, 'approximate number of bins') do |v|
params.nbins = v
end
when :lineplot, :line, :l
sub_parser_add_canvas
sub_parser_add_grid
sub_parser_add_fmt_yx
sub_parser_add_ylim
sub_parser_add_xlim
when :lineplots, :lines, :ls
sub_parser_add_canvas
sub_parser_add_grid
sub_parser_add_fmt_xyxy
sub_parser_add_ylim
sub_parser_add_xlim
when :scatter, :s
sub_parser_add_canvas
sub_parser_add_grid
sub_parser_add_fmt_xyxy
sub_parser_add_ylim
sub_parser_add_xlim
when :density, :d
sub_parser_add_canvas
sub_parser_add_grid
sub_parser_add_fmt_xyxy
sub_parser_add_ylim
sub_parser_add_xlim
when :boxplot, :box
sub_parser_add_xlim
when :colors, :color, :colours, :colour
sub_parser.on_head('-n', '--names', TrueClass, 'show color names only') do |v|
options[:color_names] = v
end
# Currently it simply displays the configuration file,
# but in the future this may be changed to open a text editor like Vim
# to edit the configuration file.
when :config
show_config_info
else
error_message = "YouPlot: unrecognized command '#{command}'"
raise Error, error_message unless YouPlot.run_as_executable?
warn error_message
exit 1
end
end
def parse_options(argv = ARGV)
begin
create_main_parser.order!(argv)
rescue OptionParser::ParseError => e
warn "YouPlot: #{e.message}"
exit 1 if YouPlot.run_as_executable?
end
@command = argv.shift&.to_sym
begin
create_sub_parser&.parse!(argv)
rescue OptionParser::ParseError => e
warn "YouPlot: #{e.message}"
exit 1 if YouPlot.run_as_executable?
end
begin
apply_config_file
rescue StandardError => e
warn "YouPlot: #{e.message}"
exit 1 if YouPlot.run_as_executable?
end
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/parameters.rb | lib/youplot/parameters.rb | # frozen_string_literal: true
module YouPlot
# UnicodePlot parameters.
# Why Struct, not Hash?
# * The keys are static in Struct.
# * Struct does not conflict with keyword arguments. Hash dose.
Parameters = Struct.new(
# Sort me!
:title,
:width,
:height,
:border,
:margin,
:padding,
:color,
:xlabel,
:ylabel,
:labels,
:symbol,
:xscale,
:nbins,
:closed,
:canvas,
:xlim,
:ylim,
:grid,
:name
) do
def to_hc
to_h.compact
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/backends/processing.rb | lib/youplot/backends/processing.rb | # frozen_string_literal: true
module YouPlot
# plotting functions.
module Backends
module Processing
module_function
def count_values(arr, tally: true, reverse: false)
# tally was added in Ruby 2.7
result = \
if tally && Enumerable.method_defined?(:tally)
arr.tally
else
# value_counts Enumerable::Statistics
arr.value_counts(dropna: false)
end
# sorting
result = result.sort do |a, b|
# compare values
r = b[1] <=> a[1]
# If the values are the same, compare by name
r = a[0] <=> b[0] if r.zero?
r
end
# --reverse option
result.reverse! if reverse
# prepare for barplot
result.transpose
end
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
red-data-tools/YouPlot | https://github.com/red-data-tools/YouPlot/blob/e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397/lib/youplot/backends/unicode_plot.rb | lib/youplot/backends/unicode_plot.rb | # frozen_string_literal: true
# UnicodePlot - Plot your data by Unicode characters
# https://github.com/red-data-tools/unicode_plot.rb
require_relative 'processing'
require 'unicode_plot'
# If the line color is specified as a number, the program will display an error
# message to the user and exit. Remove this patch when UnicodePlot is improved.
module UnicodePlot
class << self
alias lineplot_original lineplot
def lineplot(*args, **kw)
if kw[:color].is_a? Numeric
warn <<~EOS
YouPlot: Line colors cannot be specified by numerical values.
For more information, please see the following issue.
https://github.com/red-data-tools/unicode_plot.rb/issues/34
EOS
YouPlot.run_as_executable ? exit(1) : raise(Error)
end
lineplot_original(*args, **kw)
end
end
end
module YouPlot
# plotting functions.
module Backends
module UnicodePlot
class Error < StandardError; end
module_function
def barplot(data, params, fmt = nil, count: false, reverse: false)
headers = data.headers
series = data.series
# `uplot count`
if count
series = Processing.count_values(series[0], reverse: reverse)
params.title = headers[0] if headers
end
if series.size == 1
# If there is only one series.use the line number for label.
params.title ||= headers[0] if headers
labels = Array.new(series[0].size) { |i| (i + 1).to_s }
values = series[0].map(&:to_f)
else
# If there are 2 or more series...
if fmt == 'yx'
# assume that the first 2 series are the y and x series respectively.
x_col = 1
y_col = 0
else
# assume that the first 2 series are the x and y series respectively.
x_col = 0
y_col = 1
end
params.title ||= headers[y_col] if headers
labels = series[x_col]
values = series[y_col].map(&:to_f)
end
::UnicodePlot.barplot(labels, values, **params.to_hc)
end
def histogram(data, params)
headers = data.headers
series = data.series
params.title ||= data.headers[0] if headers
values = series[0].map(&:to_f)
::UnicodePlot.histogram(values, **params.to_hc)
end
def line(data, params, fmt = nil)
headers = data.headers
series = data.series
if series.size == 1
# If there is only one series, it is assumed to be sequential data.
params.ylabel ||= headers[0] if headers
y = series[0].map(&:to_f)
::UnicodePlot.lineplot(y, **params.to_hc)
else
# If there are 2 or more series...
if fmt == 'yx'
# assume that the first 2 series are the y and x series respectively.
x_col = 1
y_col = 0
else
# assume that the first 2 series are the x and y series respectively.
x_col = 0
y_col = 1
end
if headers
params.xlabel ||= headers[x_col]
params.ylabel ||= headers[y_col]
end
x = series[x_col].map(&:to_f)
y = series[y_col].map(&:to_f)
::UnicodePlot.lineplot(x, y, **params.to_hc)
end
end
def get_method2(method1)
"#{method1}!".to_sym
end
def plot_xyy(data, method1, params)
headers = data.headers
series = data.series
method2 = get_method2(method1)
series.map! { |s| s.map(&:to_f) }
if headers
params.name ||= headers[1]
params.xlabel ||= headers[0]
end
params.xlim ||= series[0].flatten.minmax # why need?
params.ylim ||= series[1..-1].flatten.minmax # why need?
plot = ::UnicodePlot.public_send(method1, series[0], series[1], **params.to_hc)
2.upto(series.size - 1) do |i|
::UnicodePlot.public_send(method2, plot, series[0], series[i], name: headers&.[](i))
end
plot
end
def plot_xyxy(data, method1, params)
headers = data.headers
series2 = data.series
.map { |s| s.map(&:to_f) }
.each_slice(2).to_a
method2 = get_method2(method1)
params.name ||= headers[0] if headers
params.xlim ||= series2.map(&:first).flatten.minmax # why need?
params.ylim ||= series2.map(&:last).flatten.minmax # why need?
x1, y1 = series2.shift
plot = ::UnicodePlot.public_send(method1, x1, y1, **params.to_hc)
series2.each_with_index do |(xi, yi), i|
::UnicodePlot.public_send(method2, plot, xi, yi, name: headers&.[]((i + 1) * 2))
end
plot
end
def plot_fmt(data, fmt, method1, params)
case fmt
when 'xyy'
plot_xyy(data, method1, params)
when 'xyxy'
plot_xyxy(data, method1, params)
when 'yx'
raise "Incorrect format: #{fmt}"
else
raise "Unknown format: #{fmt}"
end
end
def lines(data, params, fmt = 'xyy')
check_series_size(data, fmt)
plot_fmt(data, fmt, :lineplot, params)
end
def scatter(data, params, fmt = 'xyy')
check_series_size(data, fmt)
plot_fmt(data, fmt, :scatterplot, params)
end
def density(data, params, fmt = 'xyy')
check_series_size(data, fmt)
plot_fmt(data, fmt, :densityplot, params)
end
def boxplot(data, params)
headers = data.headers
series = data.series
headers ||= (1..series.size).map(&:to_s)
series.map! { |s| s.map(&:to_f) }
::UnicodePlot.boxplot(headers, series, **params.to_hc)
end
def colors(color_names = false)
# FIXME
s = String.new
::UnicodePlot::StyledPrinter::TEXT_COLORS.each do |k, v|
s << v
s << k.to_s
unless color_names
s << "\t"
s << ' ●'
end
s << "\033[0m"
s << "\t"
end
s << "\n"
def s.render(obj)
obj.print(self)
end
s
end
def check_series_size(data, fmt)
series = data.series
if series.size == 1
warn <<~EOS
YouPlot: There is only one series of input data. Please check the delimiter.
Headers: \e[35m#{data.headers.inspect}\e[0m
The first item is: \e[35m\"#{series[0][0]}\"\e[0m
The last item is : \e[35m\"#{series[0][-1]}\"\e[0m
EOS
# NOTE: Error messages cannot be colored.
YouPlot.run_as_executable ? exit(1) : raise(Error)
end
if fmt == 'xyxy' && series.size.odd?
warn <<~EOS
YouPlot: In the xyxy format, the number of series must be even.
Number of series: \e[35m#{series.size}\e[0m
Headers: \e[35m#{data.headers.inspect}\e[0m
EOS
# NOTE: Error messages cannot be colored.
YouPlot.run_as_executable ? exit(1) : raise(Error)
end
end
end
end
end
| ruby | MIT | e2f82e9c5b18bab477b86d8b7cbdbc3cf4df5397 | 2026-01-04T15:45:57.869385Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/graphql-c_parser/ext/graphql_c_parser_ext/extconf.rb | graphql-c_parser/ext/graphql_c_parser_ext/extconf.rb | # frozen_string_literal: true
require 'mkmf'
create_makefile 'graphql/graphql_c_parser_ext'
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/graphql-c_parser/lib/graphql-c_parser.rb | graphql-c_parser/lib/graphql-c_parser.rb | # frozen_string_literal: true
require "graphql/c_parser"
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/graphql-c_parser/lib/graphql/c_parser.rb | graphql-c_parser/lib/graphql/c_parser.rb | # frozen_string_literal: true
require "graphql"
require "graphql/c_parser/version"
require "graphql/graphql_c_parser_ext"
module GraphQL
module CParser
def self.parse(query_str, filename: nil, trace: GraphQL::Tracing::NullTrace, max_tokens: nil)
Parser.parse(query_str, filename: filename, trace: trace, max_tokens: max_tokens)
end
def self.parse_file(filename)
contents = File.read(filename)
parse(contents, filename: filename)
end
def self.tokenize_with_c(str)
reject_numbers_followed_by_names = GraphQL.respond_to?(:reject_numbers_followed_by_names) && GraphQL.reject_numbers_followed_by_names
tokenize_with_c_internal(str, false, reject_numbers_followed_by_names)
end
def self.prepare_parse_error(message, parser)
query_str = parser.query_string
filename = parser.filename
if message.start_with?("memory exhausted")
return GraphQL::ParseError.new("This query is too large to execute.", nil, nil, query_str, filename: filename)
end
token = parser.tokens[parser.next_token_index - 1]
if token
# There might not be a token if it's a comments-only string
line = token[1]
col = token[2]
if line && col
location_str = " at [#{line}, #{col}]"
if !message.include?(location_str)
message += location_str
end
end
if !message.include?("end of file")
message.sub!(/, unexpected ([a-zA-Z ]+)(,| at)/, ", unexpected \\1 (#{token[3].inspect})\\2")
end
end
GraphQL::ParseError.new(message, line, col, query_str, filename: filename)
end
def self.prepare_number_name_parse_error(line, col, query_str, number_part, name_part)
raise GraphQL::ParseError.new("Name after number is not allowed (in `#{number_part}#{name_part}`)", line, col, query_str)
end
def self.prepare_bad_unicode_error(parser)
token = parser.tokens[parser.next_token_index - 1]
line = token[1]
col = token[2]
GraphQL::ParseError.new(
"Parse error on bad Unicode escape sequence: #{token[3].inspect} (error) at [#{line}, #{col}]",
line,
col,
parser.query_string,
filename: parser.filename
)
end
module Lexer
def self.tokenize(graphql_string, intern_identifiers: false, max_tokens: nil)
if !(graphql_string.encoding == Encoding::UTF_8 || graphql_string.ascii_only?)
graphql_string = graphql_string.dup.force_encoding(Encoding::UTF_8)
end
if !graphql_string.valid_encoding?
return [
[
:BAD_UNICODE_ESCAPE,
1,
1,
graphql_string,
241 # BAD_UNICODE_ESCAPE in lexer.rl
]
]
end
reject_numbers_followed_by_names = GraphQL.respond_to?(:reject_numbers_followed_by_names) && GraphQL.reject_numbers_followed_by_names
# -1 indicates that there is no limit
lexer_max_tokens = max_tokens.nil? ? -1 : max_tokens
tokenize_with_c_internal(graphql_string, intern_identifiers, reject_numbers_followed_by_names, lexer_max_tokens)
end
end
class Parser
def self.parse(query_str, filename: nil, trace: GraphQL::Tracing::NullTrace, max_tokens: nil)
self.new(query_str, filename, trace, max_tokens).result
end
def self.parse_file(filename)
contents = File.read(filename)
parse(contents, filename: filename)
end
def initialize(query_string, filename, trace, max_tokens)
if query_string.nil?
raise GraphQL::ParseError.new("No query string was present", nil, nil, query_string)
end
@query_string = query_string
@filename = filename
@tokens = nil
@next_token_index = 0
@result = nil
@trace = trace
@intern_identifiers = false
@max_tokens = max_tokens
end
def result
if @result.nil?
@tokens = @trace.lex(query_string: @query_string) do
GraphQL::CParser::Lexer.tokenize(@query_string, intern_identifiers: @intern_identifiers, max_tokens: @max_tokens)
end
@trace.parse(query_string: @query_string) do
c_parse
@result
end
end
@result
end
def tokens_count
result
@tokens.length
end
attr_reader :tokens, :next_token_index, :query_string, :filename
end
class SchemaParser < Parser
def initialize(*args)
super
@intern_identifiers = true
end
end
end
def self.scan_with_c(graphql_string)
GraphQL::CParser::Lexer.tokenize(graphql_string)
end
def self.parse_with_c(string, filename: nil, trace: GraphQL::Tracing::NullTrace)
if string.nil?
raise GraphQL::ParseError.new("No query string was present", nil, nil, string)
end
document = GraphQL::CParser.parse(string, filename: filename, trace: trace)
if document.definitions.size == 0
raise GraphQL::ParseError.new("Unexpected end of document", 1, 1, string)
end
document
end
self.default_parser = GraphQL::CParser
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/graphql-c_parser/lib/graphql/c_parser/version.rb | graphql-c_parser/lib/graphql/c_parser/version.rb | # frozen_string_literal: true
module GraphQL
module CParser
VERSION = "1.1.3"
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/benchmark/run.rb | benchmark/run.rb | # frozen_string_literal: true
require "graphql"
ADD_WARDEN = false
require "jazz"
require "benchmark/ips"
require "stackprof"
require "memory_profiler"
require "graphql/batch"
require "securerandom"
module GraphQLBenchmark
QUERY_STRING = GraphQL::Introspection::INTROSPECTION_QUERY
DOCUMENT = GraphQL.parse(QUERY_STRING)
SCHEMA = Jazz::Schema
BENCHMARK_PATH = File.expand_path("../", __FILE__)
CARD_SCHEMA = GraphQL::Schema.from_definition(File.read(File.join(BENCHMARK_PATH, "schema.graphql")))
ABSTRACT_FRAGMENTS = GraphQL.parse(File.read(File.join(BENCHMARK_PATH, "abstract_fragments.graphql")))
ABSTRACT_FRAGMENTS_2_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "abstract_fragments_2.graphql"))
ABSTRACT_FRAGMENTS_2 = GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
BIG_SCHEMA = GraphQL::Schema.from_definition(File.join(BENCHMARK_PATH, "big_schema.graphql"))
BIG_QUERY_STRING = File.read(File.join(BENCHMARK_PATH, "big_query.graphql"))
BIG_QUERY = GraphQL.parse(BIG_QUERY_STRING)
FIELDS_WILL_MERGE_SCHEMA = GraphQL::Schema.from_definition("type Query { hello: String }")
FIELDS_WILL_MERGE_QUERY = GraphQL.parse("{ #{Array.new(5000, "hello").join(" ")} }")
module_function
def self.run(task)
Benchmark.ips do |x|
case task
when "query"
x.report("query") { SCHEMA.execute(document: DOCUMENT) }
when "validate"
x.report("validate - introspection ") { CARD_SCHEMA.validate(DOCUMENT) }
x.report("validate - abstract fragments") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS) }
x.report("validate - abstract fragments 2") { CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS_2) }
x.report("validate - big query") { BIG_SCHEMA.validate(BIG_QUERY) }
x.report("validate - fields will merge") { FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY) }
when "scan"
require "graphql/c_parser"
x.report("scan c - introspection") { GraphQL.scan_with_c(QUERY_STRING) }
x.report("scan - introspection") { GraphQL.scan_with_ruby(QUERY_STRING) }
x.report("scan c - fragments") { GraphQL.scan_with_c(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan - fragments") { GraphQL.scan_with_ruby(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("scan c - big query") { GraphQL.scan_with_c(BIG_QUERY_STRING) }
x.report("scan - big query") { GraphQL.scan_with_ruby(BIG_QUERY_STRING) }
when "parse"
# Uncomment this to use the C parser:
# require "graphql/c_parser"
x.report("parse - introspection") { GraphQL.parse(QUERY_STRING) }
x.report("parse - fragments") { GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING) }
x.report("parse - big query") { GraphQL.parse(BIG_QUERY_STRING) }
else
raise("Unexpected task #{task}")
end
end
end
def self.profile_parse
# To profile the C parser instead:
# require "graphql/c_parser"
report = MemoryProfiler.report do
GraphQL.parse(BIG_QUERY_STRING)
GraphQL.parse(QUERY_STRING)
GraphQL.parse(ABSTRACT_FRAGMENTS_2_QUERY_STRING)
end
report.pretty_print
end
def self.validate_memory
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
report = MemoryProfiler.report do
FIELDS_WILL_MERGE_SCHEMA.validate(FIELDS_WILL_MERGE_QUERY)
nil
end
report.pretty_print
end
def self.profile
# Warm up any caches:
SCHEMA.execute(document: DOCUMENT)
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = nil
result = StackProf.run(mode: :wall) do
# CARD_SCHEMA.validate(ABSTRACT_FRAGMENTS)
res = SCHEMA.execute(document: DOCUMENT)
end
StackProf::Report.new(result).print_text
end
def self.build_large_schema
Class.new(GraphQL::Schema) do
query_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Query")
int_ts = 5.times.map do |i|
int_t = Module.new do
include GraphQL::Schema::Interface
graphql_name "Interface#{i}"
5.times do |n2|
field :"field#{n2}", String do
argument :arg, String
end
end
end
field :"int_field_#{i}", int_t
int_t
end
obj_ts = 100.times.map do |n|
input_obj_t = Class.new(GraphQL::Schema::InputObject) do
graphql_name("Input#{n}")
argument :arg, String
end
obj_t = Class.new(GraphQL::Schema::Object) do
graphql_name("Object#{n}")
implements(*int_ts)
20.times do |n2|
field :"field#{n2}", String do
argument :input, input_obj_t
end
end
field :self_field, self
field :int_0_field, int_ts[0]
end
field :"rootfield#{n}", obj_t
obj_t
end
10.times do |n|
union_t = Class.new(GraphQL::Schema::Union) do
graphql_name "Union#{n}"
possible_types(*obj_ts.sample(10))
end
field :"unionfield#{n}", union_t
end
end
query(query_t)
end
end
def self.profile_boot
Benchmark.ips do |x|
x.config(time: 10)
x.report("Booting large schema") {
build_large_schema
}
end
result = StackProf.run(mode: :wall, interval: 1) do
build_large_schema
end
StackProf::Report.new(result).print_text
retained_schema = nil
report = MemoryProfiler.report do
retained_schema = build_large_schema
end
report.pretty_print
end
SILLY_LARGE_SCHEMA = build_large_schema
def self.profile_small_query_on_large_schema
schema = Class.new(SILLY_LARGE_SCHEMA)
Benchmark.ips do |x|
x.report("Run small query") {
schema.execute("{ __typename }")
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute("{ __typename }")
end
StackProf::Report.new(result).print_text
StackProf.run(mode: :wall, out: "tmp/small_query.dump", interval: 1) do
schema.execute("{ __typename }")
end
report = MemoryProfiler.report do
schema.execute("{ __typename }")
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_introspection
schema = SILLY_LARGE_SCHEMA
Benchmark.ips do |x|
x.config(time: 10)
x.report("Run large introspection") {
schema.to_json
}
end
result = StackProf.run(mode: :wall) do
schema.to_json
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.to_json
end
puts "\n\n"
report.pretty_print
end
def self.profile_large_analysis
query_str = "query {\n".dup
5.times do |n|
query_str << " intField#{n} { "
20.times do |o|
query_str << "...Obj#{o}Fields "
end
query_str << "}\n"
end
query_str << "}"
20.times do |o|
query_str << "fragment Obj#{o}Fields on Object#{o} { "
20.times do |f|
query_str << " field#{f}(arg: \"a\")\n"
end
query_str << " selfField { selfField { selfField { __typename } } }\n"
# query_str << " int0Field { ...Int0Fields }"
query_str << "}\n"
end
# query_str << "fragment Int0Fields on Interface0 { __typename }"
query = GraphQL::Query.new(SILLY_LARGE_SCHEMA, query_str)
analyzers = [
GraphQL::Analysis::AST::FieldUsage,
GraphQL::Analysis::AST::QueryDepth,
GraphQL::Analysis::AST::QueryComplexity
]
Benchmark.ips do |x|
x.report("Running introspection") {
GraphQL::Analysis::AST.analyze_query(query, analyzers)
}
end
StackProf.run(mode: :wall, out: "last-stackprof.dump", interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Analysis::AST.analyze_query(query, analyzers)
end
puts "\n\n"
report.pretty_print
end
# Adapted from https://github.com/rmosolgo/graphql-ruby/issues/861
def self.profile_large_result
schema = ProfileLargeResult::Schema
document = ProfileLargeResult::ALL_FIELDS
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_result
schema = ProfileLargeResult::Schema
document = GraphQL.parse <<-GRAPHQL
query {
foos(first: 5) {
__typename
id
int1
int2
string1
string2
foos(first: 5) {
__typename
string1
string2
foo {
__typename
int1
}
}
}
}
GRAPHQL
Benchmark.ips do |x|
x.config(time: 10)
x.report("Querying for #{ProfileLargeResult::DATA.size} objects") {
schema.execute(document: document)
}
end
StackProf.run(mode: :wall, interval: 1, out: "tmp/small.dump") do
schema.execute(document: document)
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
def self.profile_small_introspection
schema = ProfileLargeResult::Schema
document = GraphQL.parse(GraphQL::Introspection::INTROSPECTION_QUERY)
Benchmark.ips do |x|
x.config(time: 5)
x.report("Introspection") {
schema.execute(document: document)
}
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.execute(document: document)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.execute(document: document)
end
report.pretty_print
end
module ProfileLargeResult
def self.eager_or_proc(value)
ENV["EAGER"] ? value : -> { value }
end
DATA_SIZE = 1000
DATA = DATA_SIZE.times.map {
eager_or_proc({
id: SecureRandom.uuid,
int1: SecureRandom.random_number(100000),
int2: SecureRandom.random_number(100000),
string1: eager_or_proc(SecureRandom.base64),
string2: SecureRandom.base64,
boolean1: SecureRandom.random_number(1) == 0,
boolean2: SecureRandom.random_number(1) == 0,
int_array: eager_or_proc(10.times.map { eager_or_proc(SecureRandom.random_number(100000)) } ),
string_array: 10.times.map { SecureRandom.base64 },
boolean_array: 10.times.map { SecureRandom.random_number(1) == 0 },
})
}
module Bar
include GraphQL::Schema::Interface
field :string_array, [String], null: false
end
module Baz
include GraphQL::Schema::Interface
implements Bar
field :int_array, [Integer], null: false
field :boolean_array, [Boolean], null: false
end
class ExampleExtension < GraphQL::Schema::FieldExtension
end
class FooType < GraphQL::Schema::Object
implements Baz
field :id, ID, null: false, extensions: [ExampleExtension]
field :int1, Integer, null: false, extensions: [ExampleExtension]
field :int2, Integer, null: false, extensions: [ExampleExtension]
field :string1, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :string2, String, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean1, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :boolean2, Boolean, null: false do
argument :arg1, String, required: false
argument :arg2, String, required: false
argument :arg3, String, required: false
argument :arg4, String, required: false
end
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
field :foo, FooType
def foo
DATA.sample
end
end
class QueryType < GraphQL::Schema::Object
description "Query root of the system"
field :foos, [FooType], null: false, description: "Return a list of Foo objects" do
argument :first, Integer, default_value: DATA_SIZE
end
def foos(first:)
DATA.first(first)
end
end
class Schema < GraphQL::Schema
query QueryType
# use GraphQL::Dataloader
lazy_resolve Proc, :call
end
ALL_FIELDS = GraphQL.parse <<-GRAPHQL
query($skip: Boolean = false) {
foos {
id @skip(if: $skip)
int1
int2
string1
string2
boolean1
boolean2
stringArray
intArray
booleanArray
}
}
GRAPHQL
end
def self.profile_to_definition
require_relative "./batch_loading"
schema = ProfileLargeResult::Schema
schema.to_definition
Benchmark.ips do |x|
x.report("to_definition") { schema.to_definition }
end
result = StackProf.run(mode: :wall, interval: 1) do
schema.to_definition
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
schema.to_definition
end
report.pretty_print
end
def self.profile_from_definition
# require "graphql/c_parser"
schema_str = SILLY_LARGE_SCHEMA.to_definition
Benchmark.ips do |x|
x.report("from_definition") { GraphQL::Schema.from_definition(schema_str) }
end
result = StackProf.run(mode: :wall, interval: 1) do
GraphQL::Schema.from_definition(schema_str)
end
StackProf::Report.new(result).print_text
report = MemoryProfiler.report do
GraphQL::Schema.from_definition(schema_str)
end
report.pretty_print
end
def self.profile_batch_loaders
require_relative "./batch_loading"
include BatchLoading
document = GraphQL.parse <<-GRAPHQL
{
braves: team(name: "Braves") { ...TeamFields }
bulls: team(name: "Bulls") { ...TeamFields }
}
fragment TeamFields on Team {
players {
team {
players {
team {
name
}
}
}
}
}
GRAPHQL
batch_result = GraphQLBatchSchema.execute(document: document).to_h
dataloader_result = GraphQLDataloaderSchema.execute(document: document).to_h
no_batch_result = GraphQLNoBatchingSchema.execute(document: document).to_h
results = [batch_result, dataloader_result, no_batch_result].uniq
if results.size > 1
puts "Batch result:"
pp batch_result
puts "Dataloader result:"
pp dataloader_result
puts "No-batch result:"
pp no_batch_result
raise "Got different results -- fix implementation before benchmarking."
end
Benchmark.ips do |x|
x.report("GraphQL::Batch") { GraphQLBatchSchema.execute(document: document) }
x.report("GraphQL::Dataloader") { GraphQLDataloaderSchema.execute(document: document) }
x.report("No Batching") { GraphQLNoBatchingSchema.execute(document: document) }
x.compare!
end
puts "========== GraphQL-Batch Memory =============="
report = MemoryProfiler.report do
GraphQLBatchSchema.execute(document: document)
end
report.pretty_print
puts "========== Dataloader Memory ================="
report = MemoryProfiler.report do
GraphQLDataloaderSchema.execute(document: document)
end
report.pretty_print
puts "========== No Batch Memory =============="
report = MemoryProfiler.report do
GraphQLNoBatchingSchema.execute(document: document)
end
report.pretty_print
end
def self.profile_schema_memory_footprint
schema = nil
report = MemoryProfiler.report do
query_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Query"
100.times do |i|
type = Class.new(GraphQL::Schema::Object) do
graphql_name "Object#{i}"
field :f, Integer
end
field "f#{i}", type
end
end
thing_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Thing"
field :name, String
end
mutation_type = Class.new(GraphQL::Schema::Object) do
graphql_name "Mutation"
100.times do |i|
mutation_class = Class.new(GraphQL::Schema::RelayClassicMutation) do
graphql_name "Do#{i}"
argument :id, "ID"
field :thing, thing_type
field :things, thing_type.connection_type
end
field "f#{i}", mutation: mutation_class
end
end
schema = Class.new(GraphQL::Schema) do
query(query_type)
mutation(mutation_type)
end
end
report.pretty_print
end
class StackDepthSchema < GraphQL::Schema
class Thing < GraphQL::Schema::Object
field :thing, self do
argument :lazy, Boolean, default_value: false
end
def thing(lazy:)
if lazy
-> { :something }
else
:something
end
end
field :stack_trace_depth, Integer do
argument :lazy, Boolean, default_value: false
end
def stack_trace_depth(lazy:)
get_depth = -> {
graphql_caller = caller.select { |c| c.include?("graphql") }
graphql_caller.size
}
if lazy
get_depth
else
get_depth.call
end
end
end
class Query < GraphQL::Schema::Object
field :thing, Thing
def thing
:something
end
end
query(Query)
lazy_resolve(Proc, :call)
end
def self.profile_stack_depth
query_str = <<-GRAPHQL
query($lazyThing: Boolean!, $lazyStackTrace: Boolean!) {
thing {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
thing(lazy: $lazyThing) {
stackTraceDepth(lazy: $lazyStackTrace)
}
}
}
}
}
}
GRAPHQL
eager_res = StackDepthSchema.execute(query_str, variables: { lazyThing: false, lazyStackTrace: false })
lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: false })
very_lazy_res = StackDepthSchema.execute(query_str, variables: { lazyThing: true, lazyStackTrace: true })
get_depth = ->(result) { result["data"]["thing"]["thing"]["thing"]["thing"]["thing"]["stackTraceDepth"] }
puts <<~RESULT
Result Depth
---------------------
Eager #{get_depth.call(eager_res)}
Lazy #{get_depth.call(lazy_res)}
Very Lazy #{get_depth.call(very_lazy_res)}
RESULT
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/benchmark/batch_loading.rb | benchmark/batch_loading.rb | # frozen_string_literal: true
module BatchLoading
class GraphQLBatchSchema < GraphQL::Schema
DATA = [
{ id: "1", name: "Bulls", player_ids: ["2", "3"] },
{ id: "2", name: "Michael Jordan", team_id: "1" },
{ id: "3", name: "Scottie Pippin", team_id: "1" },
{ id: "4", name: "Braves", player_ids: ["5", "6"] },
{ id: "5", name: "Chipper Jones", team_id: "4" },
{ id: "6", name: "Tom Glavine", team_id: "4" },
]
class DataLoader < GraphQL::Batch::Loader
def initialize(column: :id)
@column = column
end
def perform(keys)
keys.each do |key|
record = DATA.find { |d| d[@column] == key }
fulfill(key, record)
end
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLBatchSchema::Player]", null: false
def players
DataLoader.load_many(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DataLoader.load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DataLoader.for(column: :name).load(name)
end
end
query(Query)
use GraphQL::Batch
end
class GraphQLDataloaderSchema < GraphQL::Schema
class DataSource < GraphQL::Dataloader::Source
def initialize(options = {column: :id})
@column = options[:column]
end
def fetch(keys)
keys.map { |key|
d = GraphQLBatchSchema::DATA.find { |d| d[@column] == key }
# p [key, @column, d]
d
}
end
end
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLDataloaderSchema::Player]", null: false
def players
dataloader.with(DataSource).load_all(object[:player_ids])
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
dataloader.with(DataSource).load(object[:team_id])
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
dataloader.with(DataSource, column: :name).load(name)
end
end
query(Query)
use GraphQL::Dataloader
end
class GraphQLNoBatchingSchema < GraphQL::Schema
DATA = GraphQLBatchSchema::DATA
class Team < GraphQL::Schema::Object
field :name, String, null: false
field :players, "[BatchLoading::GraphQLNoBatchingSchema::Player]", null: false
def players
object[:player_ids].map { |id| DATA.find { |d| d[:id] == id } }
end
end
class Player < GraphQL::Schema::Object
field :name, String, null: false
field :team, Team, null: false
def team
DATA.find { |d| d[:id] == object[:team_id] }
end
end
class Query < GraphQL::Schema::Object
field :team, Team do
argument :name, String
end
def team(name:)
DATA.find { |d| d[:name] == name }
end
end
query(Query)
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/spec/graphql_spec.rb | spec/graphql_spec.rb | # frozen_string_literal: true
require "spec_helper"
require "open3"
describe GraphQL do
it "loads without warnings" do
stderr_and_stdout, _status = Open3.capture2e(%|ruby -Ilib -e "require 'bundler/inline'; gemfile(true, quiet: true) { source('https://rubygems.org'); gem('graphql', path: './') }; GraphQL.eager_load!"|)
assert_equal "", stderr_and_stdout
end
it "loads without warnings when Rails is defined but `Rails.env` isn't" do
script = <<~RUBY
require 'bundler/inline'
module Rails
end
gemfile(true, quiet: true) do
source('https://rubygems.org')
gem 'graphql', path: './'
end
class MySchema < GraphQL::Schema
use GraphQL::Schema::AlwaysVisible
end
GraphQL.eager_load!
RUBY
stderr_and_stdout, _status = Open3.capture2e(%|ruby -Ilib -e "#{script}"|)
assert_equal "", stderr_and_stdout
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/spec/spec_helper.rb | spec/spec_helper.rb | # frozen_string_literal: true
require 'rubygems'
require 'bundler'
require 'simplecov'
require 'simplecov-lcov'
SimpleCov::Formatter::LcovFormatter.config.report_with_single_file = true
SimpleCov.formatters = [
SimpleCov::Formatter::HTMLFormatter,
SimpleCov::Formatter::LcovFormatter
]
SimpleCov.start do
enable_coverage :branch
add_filter "spec/"
add_group "Generators", "lib/generators"
add_group "Execution", [/analysis/, /language/, /execution/, /static_validation/, /pagination/, /subscriptions/, /query/, /tracing/, /introspection/, /backtrace/]
add_group "Helpers", [/rake_task/, /testing/, /rubocop/]
add_group "Definition", [/types/, /relay/, /schema/]
add_group "Dataloader", [/dataloader/]
end
Bundler.require
# Print full backtrace for failures:
ENV["BACKTRACE"] = "1"
require "graphql"
if ENV["GRAPHQL_CPARSER"]
USING_C_PARSER = true
puts "Opting in to GraphQL::CParser"
require "graphql-c_parser"
else
USING_C_PARSER = false
end
if ENV["GRAPHQL_REJECT_NUMBERS_FOLLOWED_BY_NAMES"]
puts "Opting into GraphQL.reject_numbers_followed_by_names"
GraphQL.reject_numbers_followed_by_names = true
puts "Opting into GraphQL::Schema::Visibility::Profile"
GraphQL::Schema.use(GraphQL::Schema::Visibility, migration_errors: true)
ADD_WARDEN = false
else
ADD_WARDEN = true
end
# C methods aren't fair game in non-main Ractors
RUN_RACTOR_TESTS = defined?(::Ractor) && !USING_C_PARSER
require "rake"
require "graphql/rake_task"
require "pry"
require "minitest/autorun"
require "minitest/focus"
require "minitest/reporters"
require "graphql/batch"
running_in_rubymine = ENV["RM_INFO"]
unless running_in_rubymine
Minitest::Reporters.use! Minitest::Reporters::DefaultReporter.new(color: true)
end
# Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new(color: true)
Minitest::Spec.make_my_diffs_pretty!
module CheckWardenShape
DEFAULT_SHAPE = GraphQL::Schema::Warden.new(context: {}, schema: GraphQL::Schema).instance_variables
class CheckShape
def initialize(warden)
@warden = warden
end
def call(_obj_id)
ivars = @warden.instance_variables
if ivars != DEFAULT_SHAPE
raise <<-ERR
Object Shape Failed (#{@warden.class}):
- Expected: #{DEFAULT_SHAPE.inspect}
- Actual: #{ivars.inspect}
ERR
# else # To make sure it's running properly:
# puts "OK Warden #{@warden.object_id}"
end
end
end
def prepare_ast
super
setup_finalizer
end
private
def setup_finalizer
if !@finalizer_defined
@finalizer_defined = true
if warden.is_a?(GraphQL::Schema::Warden)
ObjectSpace.define_finalizer(self, CheckShape.new(warden))
end
end
end
end
GraphQL::Query.prepend(CheckWardenShape)
# Filter out Minitest backtrace while allowing backtrace from other libraries
# to be shown.
Minitest.backtrace_filter = Minitest::BacktraceFilter.new
# Can be used as a GraphQL::Schema::Warden for some purposes, but allows nothing
module NothingWarden
def self.enum_values(enum_type)
[]
end
end
# Use this when a schema requires a `resolve_type` hook
# but you know it won't be called
NO_OP_RESOLVE_TYPE = ->(type, obj, ctx) {
raise "this should never be called"
}
def testing_rails?
defined?(::Rails)
end
def testing_mongoid?
defined?(::Mongoid)
end
def testing_redis?
defined?(::Redis)
end
if testing_rails?
require "integration/rails/spec_helper"
end
if testing_mongoid?
require "integration/mongoid/star_trek/data"
require "integration/mongoid/star_trek/schema"
end
# Load support files
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each do |f|
require f
end
def star_trek_query(string, variables={}, context: {})
StarTrek::Schema.execute(string, variables: variables, context: context)
end
def star_wars_query(string, variables={}, context: {})
StarWars::Schema.execute(string, variables: variables, context: context)
end
def with_bidirectional_pagination
prev_value = GraphQL::Relay::ConnectionType.bidirectional_pagination
GraphQL::Relay::ConnectionType.bidirectional_pagination = true
yield
ensure
GraphQL::Relay::ConnectionType.bidirectional_pagination = prev_value
end
module TestTracing
class << self
def clear
traces.clear
end
def with_trace
clear
yield
traces
end
def traces
@traces ||= []
end
def trace(key, data)
data[:key] = key
result = yield
data[:result] = result
traces << data
result
end
end
end
if !USING_C_PARSER && defined?(GraphQL::CParser::Parser)
raise "Load error: didn't opt in to C parser but GraphQL::CParser::Parser was defined"
end
def assert_warns(warning, printing = "")
return_val = nil
stdout, stderr = capture_io { return_val = yield }
assert_equal warning, stderr, "It produced the expected stderr"
assert_equal stdout, printing, "It produced the expected stdout"
return_val
end
module Minitest
class Test
def self.it_dataloads(message, &block)
it(message) do
GraphQL::Dataloader.with_dataloading do |d|
self.instance_exec(d, &block)
end
end
end
end
module Assertions
def assert_graphql_equal(data1, data2, message = "GraphQL Result was equal")
case data1
when Hash
assert_equal(data1, data2, message)
assert_equal(data1.keys, data2.keys, "Order of keys matched (#{message})")
when Array
data1.each_with_index do |item1, idx|
assert_graphql_equal(item1, data2[idx], message + "[Item #{idx + 1}] ")
end
else
raise ArgumentError, "assert_graphql_equal doesn't support #{data1.class} yet"
end
end
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/spec/support/global_id.rb | spec/support/global_id.rb | # frozen_string_literal: true
if defined?(GlobalID)
GlobalID.app = "graphql-ruby-test"
class GlobalIDUser
include GlobalID::Identification
attr_reader :id
def initialize(id, located_many: false)
@id = id
@located_many = located_many
end
def located_many?
@located_many
end
def self.find(id_or_ids)
if id_or_ids.is_a?(Array)
id_or_ids.map { |id| new(id, located_many: true) }
else
new(id_or_ids)
end
end
def ==(that)
self.id == that.id
end
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
rmosolgo/graphql-ruby | https://github.com/rmosolgo/graphql-ruby/blob/fa2ba4e489f8475b194f7c6985e0b25681a442c2/spec/support/connection_assertions.rb | spec/support/connection_assertions.rb | # frozen_string_literal: true
# A shared module for testing ArrayConnection, RelationConnection,
# DatasetConnection and MongoRelationConnection.
#
# The test must implement `schema` to serve the queries below with the expected results.
module ConnectionAssertions
MAX_PAGE_SIZE = 6
DEFAULT_PAGE_SIZE = 4
NAMES = [
"Avocado",
"Beet",
"Cucumber",
"Dill",
"Eggplant",
"Fennel",
"Ginger",
"Horseradish",
"I Can't Believe It's Not Butter",
"Jicama",
]
class NonceEnabledEncoder
class << self
def encode(value, nonce: false)
"#{JSON.dump([value])}#{nonce ? "+nonce" : ""}"
end
def decode(value, nonce: false)
if nonce
value = value.sub(/\+nonce$/, "")
end
JSON.parse(value).first
end
end
end
def self.build_schema(get_items:, connection_class:, total_count_connection_class:)
base_schema = Class.new(GraphQL::Schema)
Class.new(base_schema) do
default_max_page_size ConnectionAssertions::MAX_PAGE_SIZE
default_page_size ConnectionAssertions::DEFAULT_PAGE_SIZE
cursor_encoder(NonceEnabledEncoder)
# Make a way to get local variables (passed in as args)
# into method resolvers below
class << self
attr_accessor :get_items, :connection_class, :total_count_connection_class, :custom_connection_class_with_custom_edge
end
self.get_items = get_items
self.connection_class = connection_class
self.total_count_connection_class = total_count_connection_class
item = Class.new(GraphQL::Schema::Object) do
graphql_name "Item"
field :name, String, null: false
end
custom_item_edge = Class.new(GraphQL::Types::Relay::BaseEdge) do
node_type item
graphql_name "CustomItemEdge"
field :parent_class, String, null: false
def parent_class
object.parent.class.inspect
end
field :node_class_name, String, null: false
end
custom_edge_class = Class.new(GraphQL::Pagination::Connection::Edge) do
def node_class_name
node.class.name
end
end
custom_item_connection = Class.new(GraphQL::Types::Relay::BaseConnection) do
graphql_name "CustomItemConnection"
edge_type custom_item_edge, edge_class: custom_edge_class
field :total_count, Integer, null: false
end
if connection_class
self.custom_connection_class_with_custom_edge = Class.new(connection_class) do
const_set(:Edge, custom_edge_class)
end
end
custom_items_with_custom_edge = Class.new(GraphQL::Types::Relay::BaseConnection) do
graphql_name "AnotherCustomItemConnection"
edge_type custom_item_edge
end
query = Class.new(GraphQL::Schema::Object) do
graphql_name "Query"
field :items, item.connection_type, null: false do
argument :max_page_size_override, Integer, required: false
argument :default_page_size_override, Integer, required: false
end
def items(max_page_size_override: :no_value, default_page_size_override: :no_value)
if max_page_size_override == :no_value && default_page_size_override == :no_value
# don't manually apply the wrapper when it's not required -- check automatic wrapping.
get_items
else
args = {}
args[:max_page_size] = max_page_size_override if max_page_size_override != :no_value
args[:default_page_size] = default_page_size_override if default_page_size_override != :no_value
context.schema.connection_class.new(get_items, **args)
end
end
field :custom_items, custom_item_connection, null: false
def custom_items
context.schema.total_count_connection_class.new(get_items)
end
if connection_class
field :custom_items_with_custom_edge, custom_items_with_custom_edge, null: false
def custom_items_with_custom_edge
context.schema.custom_connection_class_with_custom_edge.new(get_items)
end
end
field :limited_items, item.connection_type, null: false, max_page_size: 2
def limited_items
get_items
end
field :preloaded_items, item.connection_type
def preloaded_items
relation = get_items
relation.load # force the unbounded relation to load from the database
relation
end
field :unbounded_items, item.connection_type, max_page_size: nil, default_page_size: nil
def unbounded_items
get_items
end
field :offset_items, item.connection_type
def offset_items
get_items.offset(2)
end
private
def get_items
context.schema.get_items.call
end
end
query(query)
end
end
def self.included(child_module)
child_module.class_exec do
def exec_query(query_str, variables)
schema.execute(query_str, variables: variables)
end
def get_page_info(result, page_info_field)
result["data"]["items"]["pageInfo"][page_info_field]
end
def assert_names(expected_names, result)
nodes_names = result["data"]["items"]["nodes"].map { |n| n["name"] }
assert_equal expected_names, nodes_names, "The nodes shortcut field has expected names"
edges_names = result["data"]["items"]["edges"].map { |n| n["node"]["name"] }
assert_equal expected_names, edges_names, "The edges.node has expected names"
end
describe "cursor-based pagination" do
let(:query_str) { <<-GRAPHQL
query($first: Int, $after: String, $last: Int, $before: String) {
items(first: $first, after: $after, last: $last, before: $before) {
nodes {
name
}
edges {
node {
name
}
cursor
}
pageInfo {
hasNextPage
hasPreviousPage
startCursor
endCursor
}
}
}
GRAPHQL
}
it "works with first/after" do
res = exec_query(query_str, first: 3)
assert_names(["Avocado", "Beet", "Cucumber"], res)
assert get_page_info(res, "hasNextPage")
refute get_page_info(res, "hasPreviousPage")
after_cursor = get_page_info(res, "endCursor")
res2 = exec_query(query_str, first: 3, after: after_cursor)
assert_names(["Dill", "Eggplant", "Fennel"], res2)
assert get_page_info(res2, "hasNextPage")
assert get_page_info(res2, "hasPreviousPage")
after_cursor2 = get_page_info(res2, "endCursor")
res3 = exec_query(query_str, first: 30, after: after_cursor2)
assert_names(["Ginger", "Horseradish", "I Can't Believe It's Not Butter", "Jicama"], res3)
refute get_page_info(res3, "hasNextPage")
assert get_page_info(res3, "hasPreviousPage")
end
it "works with last/before" do
res = exec_query(query_str, last: 3)
assert_names(["Horseradish", "I Can't Believe It's Not Butter", "Jicama"], res)
refute get_page_info(res, "hasNextPage")
assert get_page_info(res, "hasPreviousPage")
before_cursor = get_page_info(res, "startCursor")
res2 = exec_query(query_str, last: 3, before: before_cursor)
assert_names(["Eggplant", "Fennel", "Ginger"], res2)
assert get_page_info(res2, "hasNextPage")
assert get_page_info(res2, "hasPreviousPage")
before_cursor2 = get_page_info(res2, "startCursor")
res3 = exec_query(query_str, last: 10, before: before_cursor2)
assert_names(["Avocado", "Beet", "Cucumber", "Dill"], res3)
assert get_page_info(res3, "hasNextPage")
refute get_page_info(res3, "hasPreviousPage")
end
it "returns empty lists for `after: 1` and `before: 2`" do
res = exec_query(query_str, first: 2)
assert_names(["Avocado", "Beet"], res)
after_cursor = get_page_info(res, "startCursor")
before_cursor = get_page_info(res, "endCursor")
res = exec_query(query_str, after: after_cursor, before: before_cursor)
assert_equal true, get_page_info(res, "hasNextPage")
assert_equal true, get_page_info(res, "hasPreviousPage")
assert_names [], res
res = exec_query(query_str, after: after_cursor, before: before_cursor, first: 3)
assert_equal true, get_page_info(res, "hasNextPage")
assert_equal true, get_page_info(res, "hasPreviousPage")
assert_names [], res
end
it "handles out-of-bounds cursors" do
# It treats negative cursors like zero
bogus_negative_cursor = NonceEnabledEncoder.encode("-10")
res = exec_query(query_str, first: 3, after: bogus_negative_cursor)
assert_names(["Avocado", "Beet", "Cucumber"], res)
# It returns nothing for cursors beyond the array
bogus_huge_cursor = NonceEnabledEncoder.encode("100")
res = exec_query(query_str, first: 3, after: bogus_huge_cursor)
assert_names([], res)
# It returns nothing before the first cursor
first_cursor = NonceEnabledEncoder.encode("1")
res = exec_query(query_str, first: 3, before: first_cursor)
assert_names([], res)
end
it "handles negative firsts and lasts by treating them as zero" do
res = exec_query(query_str, first: -3)
assert_names([], res)
res = exec_query(query_str, last: -9)
assert_names([], res)
end
it "handles blank cursors by treating them as nil" do
res = exec_query(query_str, first: 3, after: "")
assert_names(["Avocado", "Beet", "Cucumber"], res)
res = exec_query(query_str, last: 3, before: "")
assert_names(["Horseradish", "I Can't Believe It's Not Butter", "Jicama"], res)
end
it "builds cursors with nonce" do
res = exec_query(query_str, first: 3, after: "")
end_cursor = get_page_info(res, "endCursor")
assert end_cursor.end_with?("+nonce"), "it added nonce to #{end_cursor.inspect}"
end
it "applies max_page_size to first and last" do
# max_page_size overrides first
res = exec_query(query_str, first: 10)
assert_names(["Avocado", "Beet", "Cucumber", "Dill", "Eggplant", "Fennel"], res)
assert_equal true, get_page_info(res, "hasNextPage")
assert_equal false, get_page_info(res, "hasPreviousPage")
# max_page_size overrides last
res = exec_query(query_str, last: 10)
assert_names(["Eggplant", "Fennel", "Ginger", "Horseradish", "I Can't Believe It's Not Butter", "Jicama"], res)
assert_equal false, get_page_info(res, "hasNextPage")
assert_equal true, get_page_info(res, "hasPreviousPage")
end
it "applies default_page_size to first when first and last are unspecified" do
res = exec_query(query_str, {})
# Neither first nor last was provided, so default_page_size was applied.
assert_names(["Avocado", "Beet", "Cucumber", "Dill"], res)
assert_equal true, get_page_info(res, "hasNextPage")
assert_equal false, get_page_info(res, "hasPreviousPage")
end
it "returns unbounded lists" do
query_str = <<-GRAPHQL
query($first: Int, $after: String, $last: Int, $before: String) {
unboundedItems(first: $first, after: $after, last: $last, before: $before) {
nodes {
name
}
edges {
node {
name
}
cursor
}
pageInfo {
hasNextPage
hasPreviousPage
startCursor
endCursor
}
}
}
GRAPHQL
res = exec_query(query_str, {})
assert_equal 10, res["data"]["unboundedItems"]["nodes"].size
end
end
describe "customizing" do
it "serves custom fields" do
res = schema.execute <<-GRAPHQL, root_value: :something
{
items: customItems(first: 3) {
nodes {
name
}
edges {
node {
name
}
parentClass
nodeClassName
}
totalCount
}
}
GRAPHQL
assert_names(["Avocado", "Beet", "Cucumber"], res)
assert_equal 10, res["data"]["items"]["totalCount"]
edge = res["data"]["items"]["edges"][0]
# Since this connection hangs off `Query`, the root value is the parent.
assert_equal "Symbol", edge["parentClass"]
if schema.get_items
node_class_name = schema.get_items.call.first.class.name
assert_instance_of String, node_class_name
assert_equal node_class_name, edge["nodeClassName"]
end
end
it "uses custom ::Edge classes" do
skip "Not supported" if schema.connection_class.nil?
res = schema.execute <<-GRAPHQL, root_value: :something
{
items: customItemsWithCustomEdge(first: 3) {
nodes {
name
}
edges {
node {
name
}
nodeClassName
}
}
}
GRAPHQL
assert_names(["Avocado", "Beet", "Cucumber"], res)
edge = res["data"]["items"]["edges"][0]
node_class_name = schema.get_items.call.first.class.name
assert_instance_of String, node_class_name
assert_equal node_class_name, edge["nodeClassName"]
end
it "applies local max-page-size settings" do
# Smaller default:
res = schema.execute <<-GRAPHQL
{
items(first: 10, maxPageSizeOverride: 3) {
nodes {
name
}
edges {
node {
name
}
}
}
}
GRAPHQL
assert_names(["Avocado", "Beet", "Cucumber"], res)
# Larger than the default:
res = schema.execute <<-GRAPHQL
{
items(first: 10, maxPageSizeOverride: 7) {
nodes {
name
}
edges {
node {
name
}
}
}
}
GRAPHQL
assert_names(["Avocado", "Beet", "Cucumber", "Dill", "Eggplant", "Fennel", "Ginger"], res)
# Unlimited
res = schema.execute <<-GRAPHQL
{
items(first: 100, maxPageSizeOverride: null) {
nodes {
name
}
edges {
node {
name
}
}
}
}
GRAPHQL
assert_names(NAMES, res)
end
it "applies a field-level max-page-size configuration" do
res = schema.execute <<-GRAPHQL
{
items: limitedItems(first: 10) {
nodes {
name
}
edges {
node {
name
}
}
}
}
GRAPHQL
assert_names(["Avocado", "Beet"], res)
end
end
end
end
end
| ruby | MIT | fa2ba4e489f8475b194f7c6985e0b25681a442c2 | 2026-01-04T15:43:02.089024Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.