CombinedText stringlengths 4 3.42M |
|---|
require 'nokogiri'
require 'cdo/user_agent_parser'
require 'cdo/graphics/certificate_image'
module ApplicationHelper
include LocaleHelper
include ScriptLevelsHelper
USER_AGENT_PARSER = UserAgentParser::Parser.new
def browser
@browser ||= USER_AGENT_PARSER.parse request.headers["User-Agent"]
end
def ago(from_time)
s = distance_of_time_in_words_to_now(from_time)
# XXX This is horribly broken for localization.
s = s.gsub("about ", "")
s = s.gsub("less than ", "")
s = s.gsub("a minute", "1 minute")
"#{s} ago"
end
def format_xml(xml)
doc = Nokogiri::XML(xml)
doc.to_xhtml
end
def gender_options
User::GENDER_OPTIONS.map do |key, value|
[(key ? t(key) : ''), value]
end
end
def user_type_options
User::USER_TYPE_OPTIONS.map do |user_type|
[t("user_type.#{user_type}"), user_type]
end
end
def check_mark_html
#raw "✔"
image_tag(image_url('white-checkmark.png'))
end
def activity_css_class(result)
if result.nil?
'not_tried'
elsif result >= Activity::FREE_PLAY_RESULT
'perfect'
elsif result >= Activity::MINIMUM_PASS_RESULT
'passed'
else
'attempted'
end
end
def level_info(user, script_level)
result =
if user
script_level.try(:user_level).try(:best_result)
elsif session[:progress] && session[:progress][script_level.level_id]
session[:progress][script_level.level_id]
end
link = build_script_level_url(script_level)
[activity_css_class(result), link]
end
def show_flashes
ret = ''
if notice.present?
ret += content_tag(:div, flash.notice, {class: 'alert alert-success'})
flash.notice = nil
end
if alert.present?
ret += content_tag(:div, flash.alert, {class: 'alert alert-danger'})
flash.alert = nil
end
ret
end
def code_org_root_path
CDO.code_org_url
end
def teacher_dashboard_url
CDO.code_org_url '/teacher-dashboard'
end
# used by devise to redirect user after signing in
def signed_in_root_path(resource_or_scope)
if session[:return_to]
return session.delete(:return_to)
elsif resource_or_scope.is_a?(User) && resource_or_scope.teacher?
return teacher_dashboard_url
end
'/'
end
def external_oauth_sign_out_url(provider)
case provider.to_sym
when :facebook
'https://www.facebook.com/logout.php'
when :windowslive
'http://login.live.com/logout.srf'
when :google_oauth2
'https://accounts.google.com/logout'
end
end
def meta_image_url(opts = {})
app = opts[:level_source].try(:level).try(:game).try(:app) || opts[:level].try(:game).try(:app)
# playlab/studio and artist/turtle can have images
if opts[:level_source].try(:level_source_image).try(:image)
if level_source.level_source_image.s3?
if app == Game::ARTIST then
level_source.level_source_image.s3_framed_url
else
level_source.level_source_image.s3_url
end
else
url_for(controller: 'level_sources', action: 'generate_image', id: level_source.id, only_path: false)
end
elsif [Game::FLAPPY, Game::BOUNCE, Game::STUDIO].include? app
asset_url "#{app}_sharing_drawing.png"
else
asset_url 'sharing_drawing.png'
end
end
def original_image_url(level_source)
if level_source.try(:level_source_image).try(:s3?)
level_source.level_source_image.s3_url
else
original_image_level_source_path(level_source.id)
end
end
def signup_error_messages!
# See also https://github.com/plataformatec/devise/blob/master/app/helpers/devise_helper.rb
return "" if resource.errors.empty?
messages = resource.errors.full_messages.map { |msg| content_tag(:li, msg) }.join
sentence = resource.oauth? ?
I18n.t("signup_form.additional_information") :
I18n.t("errors.messages.not_saved",
count: resource.errors.count,
resource: resource.class.model_name.human.downcase)
html = <<-HTML
<div id="error_explanation">
<h2>#{sentence}</h2>
<ul>#{messages}</ul>
</div>
HTML
html.html_safe
end
def is_k1?
is_k1 = @script.try(:is_k1?)
is_k1 = current_user.try(:primary_script).try(:is_k1?) if is_k1.nil?
is_k1
end
def playlab_freeplay_path
script_stage_script_level_path(*is_k1? ? ['course1', 16, 6] : ['playlab', 1, 10])
end
def artist_freeplay_path
script_stage_script_level_path(*is_k1? ? ['course1', 18, 10] : ['artist', 1, 10])
end
def script_certificate_image_url(user, script)
if script.hoc?
script_name = 'hoc'
elsif script.twenty_hour?
script_name = '20hours'
else
script_name = data_t_suffix('script.name', script.name, "title")
end
certificate_image_url(name: user.name, course: script_name)
end
end
fix /c/ urls
require 'nokogiri'
require 'cdo/user_agent_parser'
require 'cdo/graphics/certificate_image'
module ApplicationHelper
include LocaleHelper
include ScriptLevelsHelper
USER_AGENT_PARSER = UserAgentParser::Parser.new
def browser
@browser ||= USER_AGENT_PARSER.parse request.headers["User-Agent"]
end
def ago(from_time)
s = distance_of_time_in_words_to_now(from_time)
# XXX This is horribly broken for localization.
s = s.gsub("about ", "")
s = s.gsub("less than ", "")
s = s.gsub("a minute", "1 minute")
"#{s} ago"
end
def format_xml(xml)
doc = Nokogiri::XML(xml)
doc.to_xhtml
end
def gender_options
User::GENDER_OPTIONS.map do |key, value|
[(key ? t(key) : ''), value]
end
end
def user_type_options
User::USER_TYPE_OPTIONS.map do |user_type|
[t("user_type.#{user_type}"), user_type]
end
end
def check_mark_html
#raw "✔"
image_tag(image_url('white-checkmark.png'))
end
def activity_css_class(result)
if result.nil?
'not_tried'
elsif result >= Activity::FREE_PLAY_RESULT
'perfect'
elsif result >= Activity::MINIMUM_PASS_RESULT
'passed'
else
'attempted'
end
end
def level_info(user, script_level)
result =
if user
script_level.try(:user_level).try(:best_result)
elsif session[:progress] && session[:progress][script_level.level_id]
session[:progress][script_level.level_id]
end
link = build_script_level_url(script_level)
[activity_css_class(result), link]
end
def show_flashes
ret = ''
if notice.present?
ret += content_tag(:div, flash.notice, {class: 'alert alert-success'})
flash.notice = nil
end
if alert.present?
ret += content_tag(:div, flash.alert, {class: 'alert alert-danger'})
flash.alert = nil
end
ret
end
def code_org_root_path
CDO.code_org_url
end
def teacher_dashboard_url
CDO.code_org_url '/teacher-dashboard'
end
# used by devise to redirect user after signing in
def signed_in_root_path(resource_or_scope)
if session[:return_to]
return session.delete(:return_to)
elsif resource_or_scope.is_a?(User) && resource_or_scope.teacher?
return teacher_dashboard_url
end
'/'
end
def external_oauth_sign_out_url(provider)
case provider.to_sym
when :facebook
'https://www.facebook.com/logout.php'
when :windowslive
'http://login.live.com/logout.srf'
when :google_oauth2
'https://accounts.google.com/logout'
end
end
def meta_image_url(opts = {})
app = opts[:level_source].try(:level).try(:game).try(:app) || opts[:level].try(:game).try(:app)
# playlab/studio and artist/turtle can have images
if opts[:level_source].try(:level_source_image).try(:image)
level_source = opts[:level_source]
if level_source.level_source_image.s3?
if app == Game::ARTIST then
level_source.level_source_image.s3_framed_url
else
level_source.level_source_image.s3_url
end
else
url_for(controller: 'level_sources', action: 'generate_image', id: level_source.id, only_path: false)
end
elsif [Game::FLAPPY, Game::BOUNCE, Game::STUDIO].include? app
asset_url "#{app}_sharing_drawing.png"
else
asset_url 'sharing_drawing.png'
end
end
def original_image_url(level_source)
if level_source.try(:level_source_image).try(:s3?)
level_source.level_source_image.s3_url
else
original_image_level_source_path(level_source.id)
end
end
def signup_error_messages!
# See also https://github.com/plataformatec/devise/blob/master/app/helpers/devise_helper.rb
return "" if resource.errors.empty?
messages = resource.errors.full_messages.map { |msg| content_tag(:li, msg) }.join
sentence = resource.oauth? ?
I18n.t("signup_form.additional_information") :
I18n.t("errors.messages.not_saved",
count: resource.errors.count,
resource: resource.class.model_name.human.downcase)
html = <<-HTML
<div id="error_explanation">
<h2>#{sentence}</h2>
<ul>#{messages}</ul>
</div>
HTML
html.html_safe
end
def is_k1?
is_k1 = @script.try(:is_k1?)
is_k1 = current_user.try(:primary_script).try(:is_k1?) if is_k1.nil?
is_k1
end
def playlab_freeplay_path
script_stage_script_level_path(*is_k1? ? ['course1', 16, 6] : ['playlab', 1, 10])
end
def artist_freeplay_path
script_stage_script_level_path(*is_k1? ? ['course1', 18, 10] : ['artist', 1, 10])
end
def script_certificate_image_url(user, script)
if script.hoc?
script_name = 'hoc'
elsif script.twenty_hour?
script_name = '20hours'
else
script_name = data_t_suffix('script.name', script.name, "title")
end
certificate_image_url(name: user.name, course: script_name)
end
end
|
class InitialSchema < ActiveRecord::Migration
def self.up
create_table :contributors do |t|
t.string :name
t.string :url_id, :null => false
t.integer :contributions_count
end
add_index :contributors, :url_id, :unique => true
create_table :commits do |t|
t.string :object_id, :null => false
t.string :author
t.timestamp :authored_timestamp
t.string :committer
t.timestamp :committed_timestamp
t.text :message
t.boolean :imported_from_svn
t.text :changelog
end
add_index :commits, :object_id, :unique => true
create_table :contributions do |t|
t.references :contributor, :null => false
t.references :commit, :null => false
end
add_index :contributions, :contributor_id
add_index :contributions, :commit_id
end
def self.down
drop_table :contributors
drop_table :commits
drop_table :contributions
end
end
add index for contributor names
class InitialSchema < ActiveRecord::Migration
def self.up
create_table :contributors do |t|
t.string :name
t.string :url_id, :null => false
t.integer :contributions_count
end
add_index :contributors, :name
add_index :contributors, :url_id, :unique => true
create_table :commits do |t|
t.string :object_id, :null => false
t.string :author
t.timestamp :authored_timestamp
t.string :committer
t.timestamp :committed_timestamp
t.text :message
t.boolean :imported_from_svn
t.text :changelog
end
add_index :commits, :object_id, :unique => true
create_table :contributions do |t|
t.references :contributor, :null => false
t.references :commit, :null => false
end
add_index :contributions, :contributor_id
add_index :contributions, :commit_id
end
def self.down
drop_table :contributors
drop_table :commits
drop_table :contributions
end
end
|
F=
->b{s=->l,w=p{c,*x=l.map &:dup
y=w||c.index(?L)
n=x[0]
v=[1,3,3,5,9,0]['PBNRQ'.index(c[y])||5]
w&&c[y]=?X
n ?(m=[]
n[y]<?.&&m<<s[x,y]
y<8&&n[y+1]>?-&&m<<s[x,y+1]
y>0&&n[y-1]>?-&&m<<s[x,y-1]
b=m.max_by{|m|m&&m[0]||0}
b&&[b[0]+v,c+b[1]]):[v, c]}
s[b.lines][1]}
require 'minitest/autorun'
describe F do
def test_case_1
input = <<-EOS
----L---
-----P--
------P-
--R--P-Q
----P-P-
---P-P-P
--P-N---
-P------
EOS
F[input].must_equal <<-EOS
----L---
-----X--
------X-
--R--P-X
----P-X-
---P-X-P
--P-X---
-P--X---
EOS
end
def test_case_2
input = <<-EOS
--L-----
-P------
P-------
-P------
P--Q----
-P------
P-------
-P------
EOS
F[input].must_equal <<-EOS
--L-----
-PX-----
P-X-----
-PX-----
P--X----
-P-X----
P--X----
-P-X----
EOS
end
end
combined assignment and usage of var `y` to save 2 chars
F=
->b{s=->l,w=p{c,*x=l.map &:dup
n=x[0]
v=[1,3,3,5,9,0]['PBNRQ'.index(c[y=w||c.index(?L)])||5]
w&&c[y]=?X
n ?(m=[]
n[y]<?.&&m<<s[x,y]
y<8&&n[y+1]>?-&&m<<s[x,y+1]
y>0&&n[y-1]>?-&&m<<s[x,y-1]
b=m.max_by{|m|m&&m[0]||0}
b&&[b[0]+v,c+b[1]]):[v, c]}
s[b.lines][1]}
require 'minitest/autorun'
describe F do
def test_case_1
input = <<-EOS
----L---
-----P--
------P-
--R--P-Q
----P-P-
---P-P-P
--P-N---
-P------
EOS
F[input].must_equal <<-EOS
----L---
-----X--
------X-
--R--P-X
----P-X-
---P-X-P
--P-X---
-P--X---
EOS
end
def test_case_2
input = <<-EOS
--L-----
-P------
P-------
-P------
P--Q----
-P------
P-------
-P------
EOS
F[input].must_equal <<-EOS
--L-----
-PX-----
P-X-----
-PX-----
P--X----
-P-X----
P--X----
-P-X----
EOS
end
end
|
class InitialSchema < ActiveRecord::Migration
def self.up
create_table :contributors do |t|
t.string :name
t.string :url_id, :null => false
end
add_index :contributors, :url_id, :unique => true
create_table :commits do |t|
t.string :object_id, :null => false
t.string :author
t.timestamp :author_timestamp
t.string :committer
t.timestamp :committer_timestamp
t.text :message
t.boolean :imported_from_svn
t.text :changelog
end
add_index :commits, :object_id, :unique => true
create_table :contributions do |t|
t.references :contributor
t.references :commit
end
add_index :contributions, :contributor_id
add_index :contributions, :commit_id
end
def self.down
drop_table :contributors
drop_table :commits
drop_table :contributions
end
end
fix commits schema in initial migration
class InitialSchema < ActiveRecord::Migration
def self.up
create_table :contributors do |t|
t.string :name
t.string :url_id, :null => false
end
add_index :contributors, :url_id, :unique => true
create_table :commits do |t|
t.string :object_id, :null => false
t.string :author
t.timestamp :authored_timestamp
t.string :committer
t.timestamp :committed_timestamp
t.text :message
t.boolean :imported_from_svn
t.text :changelog
end
add_index :commits, :object_id, :unique => true
create_table :contributions do |t|
t.references :contributor
t.references :commit
end
add_index :contributions, :contributor_id
add_index :contributions, :commit_id
end
def self.down
drop_table :contributors
drop_table :commits
drop_table :contributions
end
end
|
# Seed roles groups and permissions
# roles
puts ''
puts '---- Adding Roles ----'
admin_role = Role.create(:name => 'admin'.camelize)
owner_role = Role.create(:name => 'owner'.camelize)
user_role = Role.create(:name => 'user'.camelize)
developer_role = Role.create(:name => 'developer'.camelize)
collaborator_role = Role.create(:name => 'collaborator'.camelize)
site_admin_role = Role.create(:name => 'site_administrator'.camelize)
app_admin_role = Role.create(:name => 'application_administrator'.camelize)
app_manager_role = Role.create(:name => 'application_manager'.camelize)
workflow_developer_role = Role.create(:name => 'workflow_developer'.camelize)
project_manager_role = Role.create(:name => 'project_manager'.camelize)
project_developer_role = Role.create(:name => 'project_developer'.camelize)
contributor_role = Role.create(:name => 'contributor'.camelize)
data_scientist_role = Role.create(:name => 'data_scientist'.camelize)
puts ''
puts '---- Adding permissions ----'
chorusadmin = User.find_by_username("chorusadmin")
site_admin_role.users << chorusadmin if chorusadmin
# Groups
puts '---- Adding Default Group ----'
default_group = Group.create(:name => 'default_group')
# Scope
puts ''
puts '---- Adding application_realm as Default Scope ----'
application_realm = ChorusScope.create(:name => 'application_realm')
# add application_realm to default group
default_group.chorus_scope = application_realm
site_admin_role.groups << default_group
#Role.all.each do |role|
# role.groups << default_group
#end
# permissions
puts ''
puts '---- Adding Chorus object classes ----'
ChorusClass.create(
[
{:name => 'activity'.camelize},
{:name => 'account'.camelize},
{:name => 'alpine_workfile'.camelize},
{:name => 'associated_dataset'.camelize},
{:name => 'chorus_scope'.camelize},
{:name => 'chorus_view'.camelize},
{:name => 'chorus_workfile'.camelize},
{:name => 'comment'.camelize},
{:name => 'csv_file'.camelize},
{:name => 'csv_import'.camelize},
{:name => 'dashboard'.camelize},
{:name => 'dashboard_config'.camelize},
{:name => 'dashboard_item'.camelize},
{:name => 'data_source'.camelize},
{:name => 'data_source_account'.camelize},
{:name => 'database'.camelize},
{:name => 'dataset'.camelize},
{:name => 'database_column'.camelize},
{:name => 'datasets_note'.camelize},
{:name => 'external_table'.camelize},
{:name => 'gnip_data_source'.camelize},
{:name => 'gnip_import'.camelize},
{:name => 'gpdb_column_statistics'.camelize},
{:name => 'gpdb_data_source'.camelize},
{:name => 'gpdb_dataset'.camelize},
{:name => 'gpdb_database'.camelize},
{:name => 'gpdb_dataset_column'.camelize},
{:name => 'gpdb_schema'.camelize},
{:name => 'gpdb_table'.camelize},
{:name => 'gpdb_view'.camelize},
{:name => 'greenplum_sql_result'.camelize},
{:name => 'group'.camelize},
{:name => 'hdfs_data_source'.camelize},
{:name => 'hdfs_dataset'.camelize},
{:name => 'hdfs_dataset_statistics'.camelize},
{:name => 'hdfs_entry'.camelize},
{:name => 'hdfs_entry_statistics'.camelize},
{:name => 'hdfs_file'.camelize},
{:name => 'hdfs_import'.camelize},
{:name => 'insight'.camelize},
{:name => 'import'.camelize},
{:name => 'import_source_data_task'.camelize},
{:name => 'import_source_task_result'.camelize},
{:name => 'imoort_template'.camelize},
{:name => 'jdbc_data_source'.camelize},
{:name => 'jdbc_dataset'.camelize},
{:name => 'jdbc_dataset_column'.camelize},
{:name => 'jdbc_hive_data_source'.camelize},
{:name => 'jdbc_schema'.camelize},
{:name => 'jdbc_sql_result'.camelize},
{:name => 'jdbc_table'.camelize},
{:name => 'jdbc_view'.camelize},
{:name => 'job'.camelize},
{:name => 'job_result'.camelize},
{:name => 'job_task'.camelize},
{:name => 'job_task_result'.camelize},
{:name => 'ldap_config'.camelize},
{:name => 'license'.camelize},
{:name => 'linked_tableau_workfile'.camelize},
{:name => 'membership'.camelize},
{:name => 'milestone'.camelize},
{:name => 'my_workspace_search'.camelize},
{:name => 'note'.camelize},
{:name => 'notes_workflow_result'.camelize},
{:name => 'notes_workfile'.camelize},
{:name => 'notification'.camelize},
{:name => 'open_workfile_event'.camelize},
{:name => 'operation'.camelize},
{:name => 'oracle_data_source'.camelize},
{:name => 'oracle_dataset'.camelize},
{:name => 'oracle_dataset_column'.camelize},
{:name => 'oracle_schema'.camelize},
{:name => 'oracle_sql_result'.camelize},
{:name => 'oracle_table'.camelize},
{:name => 'oracle_view'.camelize},
{:name => 'permission'.camelize},
{:name => 'pg_data_source'.camelize},
{:name => 'pg_database'.camelize},
{:name => 'pg_dataset'.camelize},
{:name => 'pg_dataset_column'.camelize},
{:name => 'pg_schema'.camelize},
{:name => 'pg_table'.camelize},
{:name => 'pg_view'.camelize},
{:name => 'relational_dataset'.camelize},
{:name => 'role'.camelize},
{:name => 'run_sql_workfile_task'.camelize},
{:name => 'run_workflow_task'.camelize},
{:name => 'run_workflow_task_result'.camelize},
{:name => 'sandbox'.camelize},
{:name => 'schema'.camelize},
{:name => 'schema_function'.camelize},
{:name => 'schema_import'.camelize},
{:name => 'search'.camelize},
{:name => 'session'.camelize},
{:name => 'sql_result'.camelize},
{:name => 'sql_value_parser'.camelize},
{:name => 'system_status'.camelize},
{:name => 'tableau_publisher'.camelize},
{:name => 'tableau_workbook_publication'.camelize},
{:name => 'tag'.camelize},
{:name => 'tagging'.camelize},
{:name => 'task'.camelize},
{:name => 'type_ahead_search'.camelize},
{:name => 'upload'.camelize},
{:name => 'user'.camelize},
{:name => 'visualization'.camelize},
{:name => 'workfile'.camelize},
{:name => 'workfile_draft'.camelize},
{:name => 'workfile_execution_location'.camelize},
{:name => 'workfile_version'.camelize},
{:name => 'workflow'.camelize},
{:name => 'workspace'.camelize},
{:name => 'workspace_import'.camelize},
{:name => 'workspace_search'.camelize},
]
)
#models/dashboard
ChorusClass.create(
[
{:name => 'recent_workfiles'.camelize},
{:name => 'site_snapshot'.camelize},
{:name => 'workspace_activity'.camelize}
]
)
#models/events
ChorusClass.create(
[
{:name => 'events::Base'.camelize},
{:name => 'chorus_view_changed'.camelize},
{:name => 'chorus_view_created'.camelize},
{:name => 'credentials_invalid'.camelize},
{:name => 'data_source_changed_name'.camelize},
{:name => 'data_source_changed_owner'.camelize},
{:name => 'data_source_created'.camelize},
{:name => 'data_source_deleted'.camelize},
{:name => 'file_import_created'.camelize},
{:name => 'file_import_failed'.camelize},
# TBD. Can these event types be handle in better way?
]
)
#model/visualization
ChorusClass.create(
[
{:name => 'boxplot'.camelize},
{:name => 'frequency'.camelize},
{:name => 'heatmap'.camelize},
{:name => 'histograp'.camelize},
{:name => 'timeseries'.camelize}
]
)
role_class = ChorusClass.where(:name => 'role'.camelize).first
chorus_scope_class = ChorusClass.where(:name => 'chorus_scope'.camelize).first
workspace_class = ChorusClass.where(:name => 'workspace'.camelize).first
user_class = ChorusClass.where(:name => 'user'.camelize).first
account_class = ChorusClass.where(:name => 'account'.camelize).first
datasource_class = ChorusClass.where(:name => 'data_source'.camelize).first
datasource_class = ChorusClass.where(:name => 'data_source'.camelize).first
group_class = ChorusClass.where(:name => 'group'.camelize).first
database_class = ChorusClass.where(:name => 'database'.camelize).first
job_class = ChorusClass.where(:name => 'job'.camelize).first
gpdb_view_class = ChorusClass.where(:name => 'gpdb_view'.camelize).first
gpdb_table_class = ChorusClass.where(:name => 'gpdb_table'.camelize).first
gpdb_dataset_class = ChorusClass.where(:name => 'gpdb_dataset'.camelize).first
gpdb_schema_class = ChorusClass.where(:name => 'gpdb_schema'.camelize).first
hdfs_entry_class = ChorusClass.where(:name => 'hdfs_entry'.camelize).first
hdfs_data_source_class = ChorusClass.where(:name => 'hdfs_data_source'.camelize).first
milestone_class = ChorusClass.where(:name => 'milestone'.camelize).first
membership_class = ChorusClass.where(:name => 'membership'.camelize).first
workfile_class = ChorusClass.where(:name => 'workfile'.camelize).first
workflow_class = ChorusClass.where(:name => 'workflow'.camelize).first
activity_class = ChorusClass.where(:name => 'activity'.camelize).first
event_class = ChorusClass.where(:name => 'events::Base'.camelize).first
note_class = ChorusClass.where(:name => 'note'.camelize).first
comment_class = ChorusClass.where(:name => 'comment'.camelize).first
chorus_view_class = ChorusClass.where(:name => 'chorus_view'.camelize).first
sandbox_class = ChorusClass.where(:name => 'sandbox'.camelize).first
csv_file_class = ChorusClass.where(:name => 'csv_file'.camelize).first
dataset_class = ChorusClass.where(:name => 'dataset'.camelize).first
associated_dataset_class = ChorusClass.where(:name => 'associated_dataset'.camelize).first
import_class = ChorusClass.where(:name => 'import'.camelize).first
pg_table_class = ChorusClass.where(:name => 'pg_table'.camelize).first
pg_view_class = ChorusClass.where(:name => 'pg_view'.camelize).first
pg_schema_class = ChorusClass.where(:name => 'pg_schema'.camelize).first
hdfs_dataset_class = ChorusClass.where(:name => 'hdfs_dataset'.camelize).first
jdbc_dataset_class = ChorusClass.where(:name => 'jdbc_dataset'.camelize).first
tag_class = ChorusClass.where(:name => 'tag'.camelize).first
schema_class = ChorusClass.where(:name => 'schema'.camelize).first
task_class = ChorusClass.where(:name => 'task'.camelize).first
insight_class = ChorusClass.where(:name => 'insight'.camelize).first
upload_class = ChorusClass.where(:name => 'upload'.camelize).first
job_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
milestone_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
membership_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
workfile_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
workflow_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
activity_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
event_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
note_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
comment_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
chorus_view_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
sandbox_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
csv_file_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
dataset_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
associated_dataset_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
import_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
puts ''
puts '---- Adding Operations ----'
class_operations = {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'ChorusScope' => %w(create show update destroy manage_scopes),
'User' => %w(create show update destroy change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials ldap),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(create show update destroy),
'Workspace' => %w(create show update destroy admin create_workflow edit_settings add_members delete_members add_to_scope remove_from_scope add_sandbox delete_sandbox change_status add_data remove_data explore_data transform_data download_data),
'DataSource' => %w(create show update destroy add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(create show update destroy create_attachment_on demote_from_insight),
'Schema' => %w(create show update destroy),
'Sandbox' => %w(create show update destroy add_to_workspace delete_from_workspace),
'Comment' => %w(create show update destroy promote_to_insight),
'Workfile' => %w(create show update destroy create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(create show update destroy run stop),
'Task' => %w(create show update destroy run stop),
'Milestone' => %w(create show update destroy complete restart),
'Tag' => %w(create show update destroy apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
}
class_operations.each do |class_name, operations|
chorus_class = ChorusClass.find_by_name(class_name)
operations.each_with_index do |operation, index|
chorus_class.operations << Operation.create(:name => operation, :sequence => index )
end
end
puts ''
puts '=================== Adding permissions to Roles ======================'
role_permissions = {
# 'Admin' => {
# 'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
# 'ChorusScope' => %w(create show update destroy manage_scopes),
# 'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
# 'User' => %w(create show update destroy change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials ldap),
# 'Account' => %w(create show update destroy change_password lock unlock),
# 'Group' => %w(create show update destroy),
# 'Workspace' => %w(create show update destroy admin create_workflow edit_settings add_members delete_members add_to_scope remove_from_scope add_sandbox delete_sandbox change_status add_data remove_data explore_data transform_data download_data),
# 'DataSource' => %w(create show update destroy add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
# 'Note' => %w(create show update destroy create_attachment_on demote_from_insight),
# 'Schema' => %w(create show update destroy),
# 'Sandbox' => %w(create show update destroy add_to_workspace delete_from_workspace),
# 'Comment' => %w(create show update destroy promote_to_insight),
# 'Workfile' => %w(create show update destroy create_workflow run_workflow),
# 'Workflow' => %w(create show update destroy run stop open),
# 'Job' => %w(create show update destroy run stop),
# 'Task' => %w(create show update destroy run stop),
# 'Milestone' => %w(create show update destroy complete restart),
# 'Tag' => %w(create show update destroy apply remove),
# 'Upload' => %w(create show update destroy),
# 'Import' => %w(create show update destroy),
# 'Notification' => %w(create show update destroy),
# 'CsvFile' => %w(create show update destroy)
#
# },
'Owner' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Developer' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'Workflow' => %w(create show update destroy run stop open),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Collaborator' => {
'Events::Base' => %w(create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(),
'DataSource' => %w(show),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(),
'Workflow' => %w(),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(show create apply remove),
'Upload' => %w(),
'Import' => %w(),
'Notification' => %w(),
'CsvFile' => %w()
},
'SiteAdministrator' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create create update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show update destroy create change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ApplicationAdministrator' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create create update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show update destroy create change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ApplicationManager' => {
'Events::Base' => %w(create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(manage_workspace_roles),
'User' => %w(),
'Account' => %w(),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show),
'Sandbox' => %w(show),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(),
'Workflow' => %w(),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'WorkflowDeveloper' => {
'Events::Base' => %w(),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(),
'DataSource' => %w(),
'Note' => %w(),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(),
'Workfile' => %w(create create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(),
'Upload' => %w(),
'Import' => %w(),
'Notification' => %w(),
'CsvFile' => %w(create show update destroy)
},
'ProjectManager' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show update create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(),
'Note' => %w(show update destroy create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Contributor' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show explore_data transform_data download_data),
'DataSource' => %w(),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(show create),
'Workflow' => %w(),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ProjectDeveloper' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show create explore_data transform_data download_data),
'DataSource' => %w(show explore_data download_data),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(show create),
'Workflow' => %w(create show update),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
}
}
# Given an array of permission symbols, this function
# returns an integer with the proper permission bits set
def create_permission_mask_for(permissions, operations)
bits = 0
return bits if permissions.nil?
permissions.each do |permission|
index = operations.index(permission.to_sym)
puts "#{permission} operation not found" if index.nil?
bits |= ( 1 << index )
end
return bits
end
role_permissions.each do |role_name, permissions_hash|
role = Role.find_by_name(role_name)
#puts "---- Adding permissions for #{role_name} role ----"
permissions_hash.each do |class_name, permission_names|
chorus_class = ChorusClass.find_by_name(class_name)
puts "---- Adding permissions for #{role_name} role and #{class_name} ----"
role.permissions << Permission.create(:role_id => role.id,
:chorus_class_id => chorus_class.id,
:permissions_mask => create_permission_mask_for(permission_names, chorus_class.class_operations))
end
end
puts ''
puts "===================== Migrating users to new roles =========================="
puts ''
User.all.each do |user|
if user.admin
user.roles << site_admin_role unless user.roles.include? site_admin_role
end
if user.developer
user.roles << project_developer_role unless user.roles.include? project_developer_role
user.roles << workflow_developer_role unless user.roles.include? workflow_developer_role
end
user.roles << collaborator_role unless user.roles.include? collaborator_role
end
puts ''
puts "===================== Adding Chorus Object =========================="
puts ''
puts '--- Adding Users and children objects ----'
User.all.each do |user|
if ChorusClass.find_by_name(user.class.name) == nil
ChorusClass.create(:name => user.class.name)
end
user_object = ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(user.class.name).id, :instance_id => user.id)
user_object.chorus_object_roles << ChorusObjectRole.create(:chorus_object_id => user_object.id, :user_id => user.id, :role_id => user_role.id)
#user_object_role = ChorusObjectRole.create(:chorus_object_id => user_object.id, :user_id => user.id, :role_id => user_role.id)
# add all users to default scope (application realm) by adding user to the default group
#user.chorus_scopes << application_realm
user.groups << default_group
user.gpdb_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.oracle_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.jdbc_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.pg_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.hdfs_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.gnip_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.data_source_accounts.each do |account|
if ChorusClass.find_by_name(account.class.name) == nil
ChorusClass.create(:name => account.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(account.class.name).id, :instance_id => account.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.memberships.each do |member|
if ChorusClass.find_by_name(member.class.name) == nil
ChorusClass.create(:name => member.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(member.class.name).id, :instance_id => member.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.owned_jobs.each do |job|
if ChorusClass.find_by_name(job.class.name) == nil
ChorusClass.create(:name => job.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(job.class.name).id, :instance_id => job.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.notifications.each do |notification|
if ChorusClass.find_by_name(notification.class.name) == nil
ChorusClass.create(:name => notification.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(notification.class.name).id, :instance_id => notification.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.open_workfile_events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
end
puts ''
puts '--- Adding Workspace and children objects ----'
Workspace.all.each do |workspace|
if ChorusClass.find_by_name(workspace.class.name) == nil
ChorusClass.create(:name => workspace.class.name)
end
# Add owner as workspace role
workspace_object = ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(workspace.class.name).id, :instance_id => workspace.id, :owner_id => workspace.owner.id)
workspace_object.chorus_object_roles << ChorusObjectRole.create(:chorus_object_id => workspace_object.id, :user_id => workspace.owner.id, :role_id => owner_role.id)
#workspace_object_role = ChorusObjectRole.create(:chorus_object_id => workspace_object.id, :user_id => workspace.owner.id, :role_id => owner_role.id)
#workspace.owner.object_roles << workspace_object_role
#children = %w(jobs milestones memberships workfiles activities events owned_notes comments chorus_views csv_files associated_datasets source_datasets all_imports imports tags)
workspace.jobs.each do |job|
if ChorusClass.find_by_name(job.class.name) == nil
ChorusClass.create(:name => job.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(job.class.name).id, :instance_id => job.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.milestones.each do |milestone|
if ChorusClass.find_by_name(milestone.class.name) == nil
ChorusClass.create(:name => milestone.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(milestone.class.name).id, :instance_id => milestone.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.memberships.each do |membership|
if ChorusClass.find_by_name(membership.class.name) == nil
ChorusClass.create(:name => membership.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(membership.class.name).id, :instance_id => membership.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.workfiles.each do |workfile|
if ChorusClass.find_by_name(workfile.class.name) == nil
ChorusClass.create(:name => workfile.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(workfile.class.name).id, :instance_id => workfile.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
workfile.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.open_workfile_events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.comments.each do |comment|
if ChorusClass.find_by_name(comment.class.name) == nil
ChorusClass.create(:name => comment.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(comment.class.name).id, :instance_id => comment.id, :owner_id => workfile.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
end
workspace.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
#TODO: RPG. Don't know how to deal with events of differnt types in permissions framework. For now adding them as sub classes of (Events::Base)
workspace.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.owned_notes.each do |note|
if ChorusClass.find_by_name(note.class.name) == nil
ChorusClass.create(:name => note.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(note.class.name).id, :instance_id => note.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.comments.each do |comment|
if ChorusClass.find_by_name(comment.class.name) == nil
ChorusClass.create(:name => comment.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(comment.class.name).id, :instance_id => comment.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.chorus_views.each do |view|
if ChorusClass.find_by_name(view.class.name) == nil
ChorusClass.create(:name => view.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(view.class.name).id, :instance_id => view.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.csv_files.each do |file|
if ChorusClass.find_by_name(file.class.name) == nil
ChorusClass.create(:name => file.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(file.class.name).id, :instance_id => file.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.associated_datasets.each do |dataset|
if ChorusClass.find_by_name(dataset.class.name) == nil
ChorusClass.create(:name => dataset.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(dataset.class.name).id, :instance_id => dataset.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.source_datasets.each do |dataset|
if ChorusClass.find_by_name(dataset.class.name) == nil
ChorusClass.create(:name => dataset.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(dataset.class.name).id, :instance_id => dataset.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.all_imports.each do |import|
if ChorusClass.find_by_name(import.class.name) == nil
ChorusClass.create(:name => import.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(import.class.name).id, :instance_id => import.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.imports.each do |import|
if ChorusClass.find_by_name(import.class.name) == nil
ChorusClass.create(:name => import.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(import.class.name).id, :instance_id => import.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.tags.each do |tag|
if ChorusClass.find_by_name(tag.class.name) == nil
ChorusClass.create(:name => tag.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(tag.class.name).id, :instance_id => tag.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
end
puts ''
puts '--- Adding Data Sources and it children objects ----'
DataSource.all.each do |data_source|
ChorusObject.create(:chorus_class_id => datasource_class.id, :instance_id => data_source.id, :owner_id => data_source.owner.id)
end
puts ''
puts '--- Adding Data Sets and it children objects ----'
ChorusView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => chorus_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbTable.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_table_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
PgTable.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_table_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
PgView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
HdfsDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => hdfs_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
JdbcDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => jdbc_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
HdfsEntry.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => hdfs_entry_class.id, :instance_id => dataset.id)
if dataset.hdfs_data_source != nil
co.update_attributes(:owner_id => dataset.hdfs_data_source.owner.id, :parent_class_name => hdfs_data_source_class.name, :parent_class_id => hdfs_data_source_class.id, :parent_id => dataset.hdfs_data_source.id)
end
end
GpdbSchema.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_schema_class.id, :instance_id => dataset.id)
if dataset.parent != nil
co.update_attributes(:owner_id => dataset.parent.id, :parent_class_name => dataset.parent.class.name, :parent_id => dataset.parent.id)
end
end
PgSchema.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_schema_class.id, :instance_id => dataset.id)
if dataset.parent != nil
co.update_attributes(:owner_id => dataset.parent.id, :parent_class_name => dataset.parent.class.name, :parent_id => dataset.parent.id)
end
end
puts ''
puts '--- Assign application_realm scope to all objects ---'
ChorusObject.all.each do |chorus_object|
chorus_object.chorus_scope = application_realm
chorus_object.save!
end
Added legacy admin role back for backward compatibility
# Seed roles groups and permissions
# roles
puts ''
puts '---- Adding Roles ----'
admin_role = Role.create(:name => 'admin'.camelize)
owner_role = Role.create(:name => 'owner'.camelize)
user_role = Role.create(:name => 'user'.camelize)
developer_role = Role.create(:name => 'developer'.camelize)
collaborator_role = Role.create(:name => 'collaborator'.camelize)
site_admin_role = Role.create(:name => 'site_administrator'.camelize)
app_admin_role = Role.create(:name => 'application_administrator'.camelize)
app_manager_role = Role.create(:name => 'application_manager'.camelize)
workflow_developer_role = Role.create(:name => 'workflow_developer'.camelize)
project_manager_role = Role.create(:name => 'project_manager'.camelize)
project_developer_role = Role.create(:name => 'project_developer'.camelize)
contributor_role = Role.create(:name => 'contributor'.camelize)
data_scientist_role = Role.create(:name => 'data_scientist'.camelize)
puts ''
puts '---- Adding permissions ----'
chorusadmin = User.find_by_username("chorusadmin")
site_admin_role.users << chorusadmin if chorusadmin
admin_role.users << chorusadmin if chorusadmin
# Groups
puts '---- Adding Default Group ----'
default_group = Group.create(:name => 'default_group')
# Scope
puts ''
puts '---- Adding application_realm as Default Scope ----'
application_realm = ChorusScope.create(:name => 'application_realm')
# add application_realm to default group
default_group.chorus_scope = application_realm
site_admin_role.groups << default_group
admin_role.groups << default_group
#Role.all.each do |role|
# role.groups << default_group
#end
# permissions
puts ''
puts '---- Adding Chorus object classes ----'
ChorusClass.create(
[
{:name => 'activity'.camelize},
{:name => 'account'.camelize},
{:name => 'alpine_workfile'.camelize},
{:name => 'associated_dataset'.camelize},
{:name => 'chorus_scope'.camelize},
{:name => 'chorus_view'.camelize},
{:name => 'chorus_workfile'.camelize},
{:name => 'comment'.camelize},
{:name => 'csv_file'.camelize},
{:name => 'csv_import'.camelize},
{:name => 'dashboard'.camelize},
{:name => 'dashboard_config'.camelize},
{:name => 'dashboard_item'.camelize},
{:name => 'data_source'.camelize},
{:name => 'data_source_account'.camelize},
{:name => 'database'.camelize},
{:name => 'dataset'.camelize},
{:name => 'database_column'.camelize},
{:name => 'datasets_note'.camelize},
{:name => 'external_table'.camelize},
{:name => 'gnip_data_source'.camelize},
{:name => 'gnip_import'.camelize},
{:name => 'gpdb_column_statistics'.camelize},
{:name => 'gpdb_data_source'.camelize},
{:name => 'gpdb_dataset'.camelize},
{:name => 'gpdb_database'.camelize},
{:name => 'gpdb_dataset_column'.camelize},
{:name => 'gpdb_schema'.camelize},
{:name => 'gpdb_table'.camelize},
{:name => 'gpdb_view'.camelize},
{:name => 'greenplum_sql_result'.camelize},
{:name => 'group'.camelize},
{:name => 'hdfs_data_source'.camelize},
{:name => 'hdfs_dataset'.camelize},
{:name => 'hdfs_dataset_statistics'.camelize},
{:name => 'hdfs_entry'.camelize},
{:name => 'hdfs_entry_statistics'.camelize},
{:name => 'hdfs_file'.camelize},
{:name => 'hdfs_import'.camelize},
{:name => 'insight'.camelize},
{:name => 'import'.camelize},
{:name => 'import_source_data_task'.camelize},
{:name => 'import_source_task_result'.camelize},
{:name => 'imoort_template'.camelize},
{:name => 'jdbc_data_source'.camelize},
{:name => 'jdbc_dataset'.camelize},
{:name => 'jdbc_dataset_column'.camelize},
{:name => 'jdbc_hive_data_source'.camelize},
{:name => 'jdbc_schema'.camelize},
{:name => 'jdbc_sql_result'.camelize},
{:name => 'jdbc_table'.camelize},
{:name => 'jdbc_view'.camelize},
{:name => 'job'.camelize},
{:name => 'job_result'.camelize},
{:name => 'job_task'.camelize},
{:name => 'job_task_result'.camelize},
{:name => 'ldap_config'.camelize},
{:name => 'license'.camelize},
{:name => 'linked_tableau_workfile'.camelize},
{:name => 'membership'.camelize},
{:name => 'milestone'.camelize},
{:name => 'my_workspace_search'.camelize},
{:name => 'note'.camelize},
{:name => 'notes_workflow_result'.camelize},
{:name => 'notes_workfile'.camelize},
{:name => 'notification'.camelize},
{:name => 'open_workfile_event'.camelize},
{:name => 'operation'.camelize},
{:name => 'oracle_data_source'.camelize},
{:name => 'oracle_dataset'.camelize},
{:name => 'oracle_dataset_column'.camelize},
{:name => 'oracle_schema'.camelize},
{:name => 'oracle_sql_result'.camelize},
{:name => 'oracle_table'.camelize},
{:name => 'oracle_view'.camelize},
{:name => 'permission'.camelize},
{:name => 'pg_data_source'.camelize},
{:name => 'pg_database'.camelize},
{:name => 'pg_dataset'.camelize},
{:name => 'pg_dataset_column'.camelize},
{:name => 'pg_schema'.camelize},
{:name => 'pg_table'.camelize},
{:name => 'pg_view'.camelize},
{:name => 'relational_dataset'.camelize},
{:name => 'role'.camelize},
{:name => 'run_sql_workfile_task'.camelize},
{:name => 'run_workflow_task'.camelize},
{:name => 'run_workflow_task_result'.camelize},
{:name => 'sandbox'.camelize},
{:name => 'schema'.camelize},
{:name => 'schema_function'.camelize},
{:name => 'schema_import'.camelize},
{:name => 'search'.camelize},
{:name => 'session'.camelize},
{:name => 'sql_result'.camelize},
{:name => 'sql_value_parser'.camelize},
{:name => 'system_status'.camelize},
{:name => 'tableau_publisher'.camelize},
{:name => 'tableau_workbook_publication'.camelize},
{:name => 'tag'.camelize},
{:name => 'tagging'.camelize},
{:name => 'task'.camelize},
{:name => 'type_ahead_search'.camelize},
{:name => 'upload'.camelize},
{:name => 'user'.camelize},
{:name => 'visualization'.camelize},
{:name => 'workfile'.camelize},
{:name => 'workfile_draft'.camelize},
{:name => 'workfile_execution_location'.camelize},
{:name => 'workfile_version'.camelize},
{:name => 'workflow'.camelize},
{:name => 'workspace'.camelize},
{:name => 'workspace_import'.camelize},
{:name => 'workspace_search'.camelize},
]
)
#models/dashboard
ChorusClass.create(
[
{:name => 'recent_workfiles'.camelize},
{:name => 'site_snapshot'.camelize},
{:name => 'workspace_activity'.camelize}
]
)
#models/events
ChorusClass.create(
[
{:name => 'events::Base'.camelize},
{:name => 'chorus_view_changed'.camelize},
{:name => 'chorus_view_created'.camelize},
{:name => 'credentials_invalid'.camelize},
{:name => 'data_source_changed_name'.camelize},
{:name => 'data_source_changed_owner'.camelize},
{:name => 'data_source_created'.camelize},
{:name => 'data_source_deleted'.camelize},
{:name => 'file_import_created'.camelize},
{:name => 'file_import_failed'.camelize},
# TBD. Can these event types be handle in better way?
]
)
#model/visualization
ChorusClass.create(
[
{:name => 'boxplot'.camelize},
{:name => 'frequency'.camelize},
{:name => 'heatmap'.camelize},
{:name => 'histograp'.camelize},
{:name => 'timeseries'.camelize}
]
)
role_class = ChorusClass.where(:name => 'role'.camelize).first
chorus_scope_class = ChorusClass.where(:name => 'chorus_scope'.camelize).first
workspace_class = ChorusClass.where(:name => 'workspace'.camelize).first
user_class = ChorusClass.where(:name => 'user'.camelize).first
account_class = ChorusClass.where(:name => 'account'.camelize).first
datasource_class = ChorusClass.where(:name => 'data_source'.camelize).first
datasource_class = ChorusClass.where(:name => 'data_source'.camelize).first
group_class = ChorusClass.where(:name => 'group'.camelize).first
database_class = ChorusClass.where(:name => 'database'.camelize).first
job_class = ChorusClass.where(:name => 'job'.camelize).first
gpdb_view_class = ChorusClass.where(:name => 'gpdb_view'.camelize).first
gpdb_table_class = ChorusClass.where(:name => 'gpdb_table'.camelize).first
gpdb_dataset_class = ChorusClass.where(:name => 'gpdb_dataset'.camelize).first
gpdb_schema_class = ChorusClass.where(:name => 'gpdb_schema'.camelize).first
hdfs_entry_class = ChorusClass.where(:name => 'hdfs_entry'.camelize).first
hdfs_data_source_class = ChorusClass.where(:name => 'hdfs_data_source'.camelize).first
milestone_class = ChorusClass.where(:name => 'milestone'.camelize).first
membership_class = ChorusClass.where(:name => 'membership'.camelize).first
workfile_class = ChorusClass.where(:name => 'workfile'.camelize).first
workflow_class = ChorusClass.where(:name => 'workflow'.camelize).first
activity_class = ChorusClass.where(:name => 'activity'.camelize).first
event_class = ChorusClass.where(:name => 'events::Base'.camelize).first
note_class = ChorusClass.where(:name => 'note'.camelize).first
comment_class = ChorusClass.where(:name => 'comment'.camelize).first
chorus_view_class = ChorusClass.where(:name => 'chorus_view'.camelize).first
sandbox_class = ChorusClass.where(:name => 'sandbox'.camelize).first
csv_file_class = ChorusClass.where(:name => 'csv_file'.camelize).first
dataset_class = ChorusClass.where(:name => 'dataset'.camelize).first
associated_dataset_class = ChorusClass.where(:name => 'associated_dataset'.camelize).first
import_class = ChorusClass.where(:name => 'import'.camelize).first
pg_table_class = ChorusClass.where(:name => 'pg_table'.camelize).first
pg_view_class = ChorusClass.where(:name => 'pg_view'.camelize).first
pg_schema_class = ChorusClass.where(:name => 'pg_schema'.camelize).first
hdfs_dataset_class = ChorusClass.where(:name => 'hdfs_dataset'.camelize).first
jdbc_dataset_class = ChorusClass.where(:name => 'jdbc_dataset'.camelize).first
tag_class = ChorusClass.where(:name => 'tag'.camelize).first
schema_class = ChorusClass.where(:name => 'schema'.camelize).first
task_class = ChorusClass.where(:name => 'task'.camelize).first
insight_class = ChorusClass.where(:name => 'insight'.camelize).first
upload_class = ChorusClass.where(:name => 'upload'.camelize).first
job_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
milestone_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
membership_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
workfile_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
workflow_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
activity_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
event_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
note_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
comment_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
chorus_view_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
sandbox_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
csv_file_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
dataset_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
associated_dataset_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
import_class.update_attributes({:parent_class_name => 'workspace'.camelize}, {:parent_class_id => workspace_class.id} )
puts ''
puts '---- Adding Operations ----'
class_operations = {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'ChorusScope' => %w(create show update destroy manage_scopes),
'User' => %w(create show update destroy change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials ldap),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(create show update destroy),
'Workspace' => %w(create show update destroy admin create_workflow edit_settings add_members delete_members add_to_scope remove_from_scope add_sandbox delete_sandbox change_status add_data remove_data explore_data transform_data download_data),
'DataSource' => %w(create show update destroy add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(create show update destroy create_attachment_on demote_from_insight),
'Schema' => %w(create show update destroy),
'Sandbox' => %w(create show update destroy add_to_workspace delete_from_workspace),
'Comment' => %w(create show update destroy promote_to_insight),
'Workfile' => %w(create show update destroy create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(create show update destroy run stop),
'Task' => %w(create show update destroy run stop),
'Milestone' => %w(create show update destroy complete restart),
'Tag' => %w(create show update destroy apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
}
class_operations.each do |class_name, operations|
chorus_class = ChorusClass.find_by_name(class_name)
operations.each_with_index do |operation, index|
chorus_class.operations << Operation.create(:name => operation, :sequence => index )
end
end
puts ''
puts '=================== Adding permissions to Roles ======================'
role_permissions = {
'Admin' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'User' => %w(create show update destroy change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials ldap),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(create show update destroy),
'Workspace' => %w(create show update destroy admin create_workflow edit_settings add_members delete_members add_to_scope remove_from_scope add_sandbox delete_sandbox change_status add_data remove_data explore_data transform_data download_data),
'DataSource' => %w(create show update destroy add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(create show update destroy create_attachment_on demote_from_insight),
'Schema' => %w(create show update destroy),
'Sandbox' => %w(create show update destroy add_to_workspace delete_from_workspace),
'Comment' => %w(create show update destroy promote_to_insight),
'Workfile' => %w(create show update destroy create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(create show update destroy run stop),
'Task' => %w(create show update destroy run stop),
'Milestone' => %w(create show update destroy complete restart),
'Tag' => %w(create show update destroy apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Owner' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Developer' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create show update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'Workflow' => %w(create show update destroy run stop open),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Collaborator' => {
'Events::Base' => %w(create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(),
'DataSource' => %w(show),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(),
'Workflow' => %w(),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(show create apply remove),
'Upload' => %w(),
'Import' => %w(),
'Notification' => %w(),
'CsvFile' => %w()
},
'SiteAdministrator' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create create update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show update destroy create change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ApplicationAdministrator' => {
'Events::Base' => %w(create show update destroy create_comment_on create_attachment_on),
'ChorusScope' => %w(create show update destroy manage_scopes),
'Role' => %w(create create update destroy manage_application_roles manage_workspace_roles),
'User' => %w(show update destroy create change_password edit_dashboard manage_notifications manage_comments manage_notes manage_insights manage_data_source_credentials),
'Account' => %w(create show update destroy change_password lock unlock),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_credentials edit_credentials delete_credentials add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show update destroy create),
'Sandbox' => %w(show update destroy create add_to_workspace delete_from_workspace),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create run_workflow ),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ApplicationManager' => {
'Events::Base' => %w(create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(manage_workspace_roles),
'User' => %w(),
'Account' => %w(),
'Group' => %w(show update destroy create),
'Workspace' => %w(show update destroy admin create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(show update destroy create add_data remove_data explore_data download_data),
'Note' => %w(show update destroy create),
'Schema' => %w(show),
'Sandbox' => %w(show),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(),
'Workflow' => %w(),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'WorkflowDeveloper' => {
'Events::Base' => %w(),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(),
'DataSource' => %w(),
'Note' => %w(),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(),
'Workfile' => %w(create create_workflow run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(),
'Task' => %w(),
'Milestone' => %w(),
'Tag' => %w(),
'Upload' => %w(),
'Import' => %w(),
'Notification' => %w(),
'CsvFile' => %w(create show update destroy)
},
'ProjectManager' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show update create edit_settings add_members delete_members add_to_scope remove_from_scope change_status explore_data transform_data download_data),
'DataSource' => %w(),
'Note' => %w(show update destroy create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show update destroy create promote_to_insight),
'Workfile' => %w(show update destroy create run_workflow),
'Workflow' => %w(create show update destroy run stop open),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(show update destroy create complete restart),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'Contributor' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show explore_data transform_data download_data),
'DataSource' => %w(),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(show create),
'Workflow' => %w(),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
},
'ProjectDeveloper' => {
'Events::Base' => %w(show create create_comment_on create_attachment_on),
'ChorusScope' => %w(),
'Role' => %w(),
'User' => %w(),
'Account' => %w(),
'Group' => %w(),
'Workspace' => %w(show create explore_data transform_data download_data),
'DataSource' => %w(show explore_data download_data),
'Note' => %w(show create),
'Schema' => %w(),
'Sandbox' => %w(),
'Comment' => %w(show create promote_to_insight),
'Workfile' => %w(show create),
'Workflow' => %w(create show update),
'Job' => %w(show update destroy create run stop),
'Task' => %w(show update destroy create run stop),
'Milestone' => %w(),
'Tag' => %w(show update destroy create apply remove),
'Upload' => %w(create show update destroy),
'Import' => %w(create show update destroy),
'Notification' => %w(create show update destroy),
'CsvFile' => %w(create show update destroy)
}
}
# Given an array of permission symbols, this function
# returns an integer with the proper permission bits set
def create_permission_mask_for(permissions, operations)
bits = 0
return bits if permissions.nil?
permissions.each do |permission|
index = operations.index(permission.to_sym)
puts "#{permission} operation not found" if index.nil?
bits |= ( 1 << index )
end
return bits
end
role_permissions.each do |role_name, permissions_hash|
role = Role.find_by_name(role_name)
#puts "---- Adding permissions for #{role_name} role ----"
permissions_hash.each do |class_name, permission_names|
chorus_class = ChorusClass.find_by_name(class_name)
puts "---- Adding permissions for #{role_name} role and #{class_name} ----"
role.permissions << Permission.create(:role_id => role.id,
:chorus_class_id => chorus_class.id,
:permissions_mask => create_permission_mask_for(permission_names, chorus_class.class_operations))
end
end
puts ''
puts "===================== Migrating users to new roles =========================="
puts ''
User.all.each do |user|
if user.admin
user.roles << site_admin_role unless user.roles.include? site_admin_role
end
if user.developer
user.roles << project_developer_role unless user.roles.include? project_developer_role
user.roles << workflow_developer_role unless user.roles.include? workflow_developer_role
end
user.roles << collaborator_role unless user.roles.include? collaborator_role
end
puts ''
puts "===================== Adding Chorus Object =========================="
puts ''
puts '--- Adding Users and children objects ----'
User.all.each do |user|
if ChorusClass.find_by_name(user.class.name) == nil
ChorusClass.create(:name => user.class.name)
end
user_object = ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(user.class.name).id, :instance_id => user.id)
user_object.chorus_object_roles << ChorusObjectRole.create(:chorus_object_id => user_object.id, :user_id => user.id, :role_id => user_role.id)
#user_object_role = ChorusObjectRole.create(:chorus_object_id => user_object.id, :user_id => user.id, :role_id => user_role.id)
# add all users to default scope (application realm) by adding user to the default group
#user.chorus_scopes << application_realm
user.groups << default_group
user.gpdb_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.oracle_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.jdbc_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.pg_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.hdfs_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.gnip_data_sources.each do |data_source|
if ChorusClass.find_by_name(data_source.class.name) == nil
ChorusClass.create(:name => data_source.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(data_source.class.name).id, :instance_id => data_source.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.data_source_accounts.each do |account|
if ChorusClass.find_by_name(account.class.name) == nil
ChorusClass.create(:name => account.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(account.class.name).id, :instance_id => account.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.memberships.each do |member|
if ChorusClass.find_by_name(member.class.name) == nil
ChorusClass.create(:name => member.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(member.class.name).id, :instance_id => member.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.owned_jobs.each do |job|
if ChorusClass.find_by_name(job.class.name) == nil
ChorusClass.create(:name => job.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(job.class.name).id, :instance_id => job.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.notifications.each do |notification|
if ChorusClass.find_by_name(notification.class.name) == nil
ChorusClass.create(:name => notification.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(notification.class.name).id, :instance_id => notification.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
user.open_workfile_events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => user.id, :parent_class_name => user.class.name, :parent_class_id => ChorusClass.find_by_name(user.class.name).id, :parent_id => user.id)
end
end
puts ''
puts '--- Adding Workspace and children objects ----'
Workspace.all.each do |workspace|
if ChorusClass.find_by_name(workspace.class.name) == nil
ChorusClass.create(:name => workspace.class.name)
end
# Add owner as workspace role
workspace_object = ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(workspace.class.name).id, :instance_id => workspace.id, :owner_id => workspace.owner.id)
workspace_object.chorus_object_roles << ChorusObjectRole.create(:chorus_object_id => workspace_object.id, :user_id => workspace.owner.id, :role_id => owner_role.id)
#workspace_object_role = ChorusObjectRole.create(:chorus_object_id => workspace_object.id, :user_id => workspace.owner.id, :role_id => owner_role.id)
#workspace.owner.object_roles << workspace_object_role
#children = %w(jobs milestones memberships workfiles activities events owned_notes comments chorus_views csv_files associated_datasets source_datasets all_imports imports tags)
workspace.jobs.each do |job|
if ChorusClass.find_by_name(job.class.name) == nil
ChorusClass.create(:name => job.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(job.class.name).id, :instance_id => job.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.milestones.each do |milestone|
if ChorusClass.find_by_name(milestone.class.name) == nil
ChorusClass.create(:name => milestone.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(milestone.class.name).id, :instance_id => milestone.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.memberships.each do |membership|
if ChorusClass.find_by_name(membership.class.name) == nil
ChorusClass.create(:name => membership.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(membership.class.name).id, :instance_id => membership.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.workfiles.each do |workfile|
if ChorusClass.find_by_name(workfile.class.name) == nil
ChorusClass.create(:name => workfile.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(workfile.class.name).id, :instance_id => workfile.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
workfile.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.open_workfile_events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
workfile.comments.each do |comment|
if ChorusClass.find_by_name(comment.class.name) == nil
ChorusClass.create(:name => comment.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(comment.class.name).id, :instance_id => comment.id, :owner_id => workfile.owner.id, :parent_class_name => workfile.class.name, :parent_class_id => ChorusClass.find_by_name(workfile.class.name).id, :parent_id => workfile.id)
end
end
workspace.activities.each do |activity|
if ChorusClass.find_by_name(activity.class.name) == nil
ChorusClass.create(:name => activity.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(activity.class.name).id, :instance_id => activity.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
#TODO: RPG. Don't know how to deal with events of differnt types in permissions framework. For now adding them as sub classes of (Events::Base)
workspace.events.each do |event|
if ChorusClass.find_by_name(event.class.name) == nil
ChorusClass.create(:name => event.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(event.class.name).id, :instance_id => event.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.owned_notes.each do |note|
if ChorusClass.find_by_name(note.class.name) == nil
ChorusClass.create(:name => note.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(note.class.name).id, :instance_id => note.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.comments.each do |comment|
if ChorusClass.find_by_name(comment.class.name) == nil
ChorusClass.create(:name => comment.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(comment.class.name).id, :instance_id => comment.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.chorus_views.each do |view|
if ChorusClass.find_by_name(view.class.name) == nil
ChorusClass.create(:name => view.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(view.class.name).id, :instance_id => view.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.csv_files.each do |file|
if ChorusClass.find_by_name(file.class.name) == nil
ChorusClass.create(:name => file.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(file.class.name).id, :instance_id => file.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.associated_datasets.each do |dataset|
if ChorusClass.find_by_name(dataset.class.name) == nil
ChorusClass.create(:name => dataset.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(dataset.class.name).id, :instance_id => dataset.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.source_datasets.each do |dataset|
if ChorusClass.find_by_name(dataset.class.name) == nil
ChorusClass.create(:name => dataset.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(dataset.class.name).id, :instance_id => dataset.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.all_imports.each do |import|
if ChorusClass.find_by_name(import.class.name) == nil
ChorusClass.create(:name => import.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(import.class.name).id, :instance_id => import.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.imports.each do |import|
if ChorusClass.find_by_name(import.class.name) == nil
ChorusClass.create(:name => import.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(import.class.name).id, :instance_id => import.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
workspace.tags.each do |tag|
if ChorusClass.find_by_name(tag.class.name) == nil
ChorusClass.create(:name => tag.class.name)
end
ChorusObject.create(:chorus_class_id => ChorusClass.find_by_name(tag.class.name).id, :instance_id => tag.id, :owner_id => workspace.owner.id, :parent_class_name => workspace.class.name, :parent_class_id => ChorusClass.find_by_name(workspace.class.name).id, :parent_id => workspace.id)
end
end
puts ''
puts '--- Adding Data Sources and it children objects ----'
DataSource.all.each do |data_source|
ChorusObject.create(:chorus_class_id => datasource_class.id, :instance_id => data_source.id, :owner_id => data_source.owner.id)
end
puts ''
puts '--- Adding Data Sets and it children objects ----'
ChorusView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => chorus_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbTable.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_table_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
PgTable.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_table_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
PgView.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_view_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
HdfsDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => hdfs_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
GpdbDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
JdbcDataset.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => jdbc_dataset_class.id, :instance_id => dataset.id)
if dataset.workspace != nil
co.update_attributes(:owner_id => dataset.workspace.owner.id, :parent_class_name => workspace_class.name, :parent_class_id => workspace_class.id, :parent_id => dataset.workspace.id)
end
end
HdfsEntry.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => hdfs_entry_class.id, :instance_id => dataset.id)
if dataset.hdfs_data_source != nil
co.update_attributes(:owner_id => dataset.hdfs_data_source.owner.id, :parent_class_name => hdfs_data_source_class.name, :parent_class_id => hdfs_data_source_class.id, :parent_id => dataset.hdfs_data_source.id)
end
end
GpdbSchema.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => gpdb_schema_class.id, :instance_id => dataset.id)
if dataset.parent != nil
co.update_attributes(:owner_id => dataset.parent.id, :parent_class_name => dataset.parent.class.name, :parent_id => dataset.parent.id)
end
end
PgSchema.all.each do |dataset|
co = ChorusObject.create(:chorus_class_id => pg_schema_class.id, :instance_id => dataset.id)
if dataset.parent != nil
co.update_attributes(:owner_id => dataset.parent.id, :parent_class_name => dataset.parent.class.name, :parent_id => dataset.parent.id)
end
end
puts ''
puts '--- Assign application_realm scope to all objects ---'
ChorusObject.all.each do |chorus_object|
chorus_object.chorus_scope = application_realm
chorus_object.save!
end
|
#! /usr/bin/env ruby -S rspec
require 'spec_helper_acceptance'
describe 'fqdn_rotate function', :unless => UNSUPPORTED_PLATFORMS.include?(fact('operatingsystem')) do
describe 'success' do
let(:facts_d) do
if fact('is_pe', '--puppet') == "true"
if fact('osfamily') =~ /windows/i
if fact('kernelmajversion').to_f < 6.0
'C:/Documents and Settings/All Users/Application Data/PuppetLabs/facter/facts.d'
else
'C:/ProgramData/PuppetLabs/facter/facts.d'
end
else
'/etc/puppetlabs/facter/facts.d'
end
else
'/etc/facter/facts.d'
end
end
after :each do
shell("if [ -f '#{facts_d}/fqdn.txt' ] ; then rm '#{facts_d}/fqdn.txt' ; fi")
end
before :all do
#No need to create on windows, PE creates by default
if fact('osfamily') !~ /windows/i
shell("mkdir -p '#{facts_d}'")
end
end
it 'fqdn_rotates floats' do
shell("echo fqdn=fakehost.localdomain > '#{facts_d}/fqdn.txt'")
pp = <<-EOS
$a = ['a','b','c','d']
$o = fqdn_rotate($a)
notice(inline_template('fqdn_rotate is <%= @o.inspect %>'))
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/fqdn_rotate is \["c", "d", "a", "b"\]/)
end
end
end
describe 'failure' do
it 'handles improper argument counts'
it 'handles non-numbers'
end
end
Change all to each
The existence of this directory is behavior for each test, but will also
stop rspec 3 from complaining.
#! /usr/bin/env ruby -S rspec
require 'spec_helper_acceptance'
describe 'fqdn_rotate function', :unless => UNSUPPORTED_PLATFORMS.include?(fact('operatingsystem')) do
describe 'success' do
let(:facts_d) do
if fact('is_pe', '--puppet') == "true"
if fact('osfamily') =~ /windows/i
if fact('kernelmajversion').to_f < 6.0
'C:/Documents and Settings/All Users/Application Data/PuppetLabs/facter/facts.d'
else
'C:/ProgramData/PuppetLabs/facter/facts.d'
end
else
'/etc/puppetlabs/facter/facts.d'
end
else
'/etc/facter/facts.d'
end
end
after :each do
shell("if [ -f '#{facts_d}/fqdn.txt' ] ; then rm '#{facts_d}/fqdn.txt' ; fi")
end
before :each do
#No need to create on windows, PE creates by default
if fact('osfamily') !~ /windows/i
shell("mkdir -p '#{facts_d}'")
end
end
it 'fqdn_rotates floats' do
shell("echo fqdn=fakehost.localdomain > '#{facts_d}/fqdn.txt'")
pp = <<-EOS
$a = ['a','b','c','d']
$o = fqdn_rotate($a)
notice(inline_template('fqdn_rotate is <%= @o.inspect %>'))
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/fqdn_rotate is \["c", "d", "a", "b"\]/)
end
end
end
describe 'failure' do
it 'handles improper argument counts'
it 'handles non-numbers'
end
end
|
require 'spec_helper'
describe AwsSnsManager::Client do
describe '#send' do
context 'when client arn is valid' do
let(:client) do
manager = AwsSnsManager::Client.new(stub_responses: true)
manager.client.stub_responses(
:publish, message_id: '606781ee-ff46-5f93-bd04-cc4ac4dd68c3')
manager
end
it 'should send SNS successfully' do
# without message
expect(client.send).to be true
# with message
expect(client.send('Hi')).to be true
end
end
context 'when client arn is not valid' do
let(:client) do
manager = AwsSnsManager::Client.new(stub_responses: true)
manager.client.stub_responses(
:publish, Aws::SNS::Errors::InvalidParameter)
manager
end
let(:error) do
begin
client.send
rescue => error
# suppress for testing
end
error
end
it 'should raise Aws::SNS::Errors::InvalidParameter' do
expect(error.class.name).to eq('Aws::SNS::Errors::InvalidParameter')
end
end
end
end
Update error stub_response
See
https://github.com/aws/aws-sdk-ruby/blob/master/aws-sdk-core/spec/aws/client_spec.rb
Documents are old http://docs.aws.amazon.com/sdkforruby/api/index.html
require 'spec_helper'
describe AwsSnsManager::Client do
describe '#send' do
context 'when client arn is valid' do
let(:client) do
manager = AwsSnsManager::Client.new(stub_responses: true)
manager.client.stub_responses(
:publish, message_id: '606781ee-ff46-5f93-bd04-cc4ac4dd68c3')
manager
end
it 'should send SNS successfully' do
# without message
expect(client.send).to be true
# with message
expect(client.send('Hi')).to be true
end
end
context 'when client arn is not valid' do
let(:client) do
manager = AwsSnsManager::Client.new(stub_responses: true)
manager.client.stub_responses(
:publish, 'InvalidParameter')
manager
end
let(:error) do
begin
client.send
rescue => error
# suppress for testing
end
error
end
it 'should raise Aws::SNS::Errors::InvalidParameter' do
expect(error.class.name).to eq('Aws::SNS::Errors::InvalidParameter')
end
end
end
end
|
require 'spec_helper'
describe Koala::Facebook::API::GraphCollection do
let(:paging){ {"paging" => true} }
before(:each) do
@data = {
"data" => [1, 2, 'three'],
"paging" => paging,
"summary" => [3]
}
@result = Koala::HTTPService::Response.new(200, @data.to_json, {})
@api = Koala::Facebook::API.new("123")
@collection = Koala::Facebook::API::GraphCollection.new(@result, @api)
end
it "subclasses Array" do
expect(Koala::Facebook::API::GraphCollection.ancestors).to include(Array)
end
it "creates an array-like object" do
expect(Koala::Facebook::API::GraphCollection.new(@result, @api)).to be_an(Array)
end
it "contains the result data" do
@data["data"].each_with_index {|r, i| expect(@collection[i]).to eq(r)}
end
it "has a read-only paging attribute" do
expect(@collection.methods.map(&:to_sym)).to include(:paging)
expect(@collection.methods.map(&:to_sym)).not_to include(:paging=)
end
it "sets paging to results['paging']" do
expect(@collection.paging).to eq(@data["paging"])
end
it "sets summary to results['summary']" do
expect(@collection.summary).to eq(@data["summary"])
end
it "sets raw_response to the original results" do
expect(@collection.raw_response).to eq(@result.data)
end
it "sets the API to the provided API" do
expect(@collection.api).to eq(@api)
end
describe "when getting a whole page" do
before(:each) do
@second_page = {
"data" => ["second", "page", "data"],
"paging" => {}
}
@base = double("base")
@args = {"a" => 1}
@page_of_results = double("page of results")
@result = Koala::HTTPService::Response.new(200, @second_page.to_json, {})
@result.data
end
it "should return the previous page of results" do
expect(@collection).to receive(:previous_page_params).and_return([@base, @args])
expect(@api).to receive(:api).with(@base, @args, anything, anything).and_return(@result)
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(@result, @api).and_return(@page_of_results)
expect(@collection.previous_page).to eq(@page_of_results)
end
it "should return the next page of results" do
expect(@collection).to receive(:next_page_params).and_return([@base, @args])
expect(@api).to receive(:api).with(@base, @args, anything, anything).and_return(@result)
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(@result, @api).and_return(@page_of_results)
expect(@collection.next_page).to eq(@page_of_results)
end
it "should return nil it there are no other pages" do
%w{next previous}.each do |this|
expect(@collection).to receive("#{this}_page_params".to_sym).and_return(nil)
expect(@collection.send("#{this}_page")).to eq(nil)
end
end
end
describe "when parsing page paramters" do
describe "#parse_page_url" do
it "should pass the url to the class method" do
url = double("url")
expect(Koala::Facebook::API::GraphCollection).to receive(:parse_page_url).with(url)
@collection.parse_page_url(url)
end
it "should return the result of the class method" do
parsed_content = double("parsed_content")
allow(Koala::Facebook::API::GraphCollection).to receive(:parse_page_url).and_return(parsed_content)
expect(@collection.parse_page_url(double("url"))).to eq(parsed_content)
end
end
describe ".parse_page_url" do
it "should return the base as the first array entry" do
base = "url_path"
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/#{base}?anything").first).to eq(base)
end
it "should return the arguments as a hash as the last array entry" do
args_hash = {"one" => "val_one", "two" => "val_two"}
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/anything?#{args_hash.map {|k,v| "#{k}=#{v}" }.join("&")}").last).to eq(args_hash)
end
it "works with non-.com addresses" do
base = "url_path"
args_hash = {"one" => "val_one", "two" => "val_two"}
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/#{base}?#{args_hash.map {|k,v| "#{k}=#{v}" }.join("&")}")).to eq([base, args_hash])
end
it "works with addresses with irregular characters" do
access_token = "appid123a|fdcba"
base, args_hash = Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/foo?token=#{access_token}")
expect(args_hash["token"]).to eq(access_token)
end
end
end
describe ".evaluate" do
it "returns the body of the original response if it's provided a Response with a non-hash data key" do
result = double('fake response')
allow(result).to receive(:is_a?).with(Hash).and_return(false)
allow(result).to receive(:data).and_return([])
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq([])
end
it "returns the original result if it's provided a nil result" do
result = nil
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(result)
end
it "returns the original result body if the result doesn't have a data key" do
paging = {"paging" => {}}
result = Koala::HTTPService::Response.new(200, paging.to_json, {})
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(paging)
end
it "returns the original result if the result's data key isn't an array" do
body = {"data" => {}, "paging" => {}}
result = Koala::HTTPService::Response.new(200, body.to_json, {})
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(body)
end
it "returns a new GraphCollection of the result if it has an array data key and a paging key" do
body = {"data" => [], "paging" => {}}
result = Koala::HTTPService::Response.new(200, body.to_json, {})
expected = :foo
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(result, @api).and_return(expected)
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(expected)
end
end
describe "#next_page" do
let(:paging){ {"next" => "http://example.com/abc?a=2&b=3"} }
it "should get next page" do
expect(@api).to receive(:get_page).with(["abc", {"a" => "2", "b" => "3"}])
@collection.next_page
end
it "should get next page with extra parameters" do
expect(@api).to receive(:get_page).with(["abc", {"a" => "2", "b" => "3", "c" => "4"}])
@collection.next_page("c" => "4")
end
end
describe "#previous_page" do
let(:paging){ {"previous" => "http://example.com/?a=2&b=3"} }
it "should get previous page" do
expect(@api).to receive(:get_page).with(["", {"a" => "2", "b" => "3"}])
@collection.previous_page
end
it "should get previous page with extra parameters" do
expect(@api).to receive(:get_page).with(["", {"a" => "2", "b" => "3", "c" => "4"}])
@collection.previous_page("c" => "4")
end
end
end
Test that collection recognizes headers
require 'spec_helper'
describe Koala::Facebook::API::GraphCollection do
let(:paging){ {"paging" => true} }
before(:each) do
@headers = {'Content-Type' => 'application/json'}
@data = {
"data" => [1, 2, 'three'],
"paging" => paging,
"summary" => [3]
}
@result = Koala::HTTPService::Response.new(200, @data.to_json, @headers)
@api = Koala::Facebook::API.new("123")
@collection = Koala::Facebook::API::GraphCollection.new(@result, @api)
end
it "subclasses Array" do
expect(Koala::Facebook::API::GraphCollection.ancestors).to include(Array)
end
it "creates an array-like object" do
expect(Koala::Facebook::API::GraphCollection.new(@result, @api)).to be_an(Array)
end
it "contains the result data" do
@data["data"].each_with_index {|r, i| expect(@collection[i]).to eq(r)}
end
it "has a read-only paging attribute" do
expect(@collection.methods.map(&:to_sym)).to include(:paging)
expect(@collection.methods.map(&:to_sym)).not_to include(:paging=)
end
it "sets paging to results['paging']" do
expect(@collection.paging).to eq(@data["paging"])
end
it "sets summary to results['summary']" do
expect(@collection.summary).to eq(@data["summary"])
end
it "sets raw_response to the original results" do
expect(@collection.raw_response).to eq(@result.data)
end
it "sets the API to the provided API" do
expect(@collection.api).to eq(@api)
end
it "sets the headers correctly" do
expect(@collection.headers).to eq(@headers)
end
describe "when getting a whole page" do
before(:each) do
@second_page = {
"data" => ["second", "page", "data"],
"paging" => {}
}
@base = double("base")
@args = {"a" => 1}
@page_of_results = double("page of results")
@result = Koala::HTTPService::Response.new(200, @second_page.to_json, {})
@result.data
end
it "should return the previous page of results" do
expect(@collection).to receive(:previous_page_params).and_return([@base, @args])
expect(@api).to receive(:api).with(@base, @args, anything, anything).and_return(@result)
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(@result, @api).and_return(@page_of_results)
expect(@collection.previous_page).to eq(@page_of_results)
end
it "should return the next page of results" do
expect(@collection).to receive(:next_page_params).and_return([@base, @args])
expect(@api).to receive(:api).with(@base, @args, anything, anything).and_return(@result)
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(@result, @api).and_return(@page_of_results)
expect(@collection.next_page).to eq(@page_of_results)
end
it "should return nil it there are no other pages" do
%w{next previous}.each do |this|
expect(@collection).to receive("#{this}_page_params".to_sym).and_return(nil)
expect(@collection.send("#{this}_page")).to eq(nil)
end
end
end
describe "when parsing page paramters" do
describe "#parse_page_url" do
it "should pass the url to the class method" do
url = double("url")
expect(Koala::Facebook::API::GraphCollection).to receive(:parse_page_url).with(url)
@collection.parse_page_url(url)
end
it "should return the result of the class method" do
parsed_content = double("parsed_content")
allow(Koala::Facebook::API::GraphCollection).to receive(:parse_page_url).and_return(parsed_content)
expect(@collection.parse_page_url(double("url"))).to eq(parsed_content)
end
end
describe ".parse_page_url" do
it "should return the base as the first array entry" do
base = "url_path"
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/#{base}?anything").first).to eq(base)
end
it "should return the arguments as a hash as the last array entry" do
args_hash = {"one" => "val_one", "two" => "val_two"}
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/anything?#{args_hash.map {|k,v| "#{k}=#{v}" }.join("&")}").last).to eq(args_hash)
end
it "works with non-.com addresses" do
base = "url_path"
args_hash = {"one" => "val_one", "two" => "val_two"}
expect(Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/#{base}?#{args_hash.map {|k,v| "#{k}=#{v}" }.join("&")}")).to eq([base, args_hash])
end
it "works with addresses with irregular characters" do
access_token = "appid123a|fdcba"
base, args_hash = Koala::Facebook::API::GraphCollection.parse_page_url("http://facebook.com/foo?token=#{access_token}")
expect(args_hash["token"]).to eq(access_token)
end
end
end
describe ".evaluate" do
it "returns the body of the original response if it's provided a Response with a non-hash data key" do
result = double('fake response')
allow(result).to receive(:is_a?).with(Hash).and_return(false)
allow(result).to receive(:data).and_return([])
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq([])
end
it "returns the original result if it's provided a nil result" do
result = nil
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(result)
end
it "returns the original result body if the result doesn't have a data key" do
paging = {"paging" => {}}
result = Koala::HTTPService::Response.new(200, paging.to_json, {})
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(paging)
end
it "returns the original result if the result's data key isn't an array" do
body = {"data" => {}, "paging" => {}}
result = Koala::HTTPService::Response.new(200, body.to_json, {})
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(body)
end
it "returns a new GraphCollection of the result if it has an array data key and a paging key" do
body = {"data" => [], "paging" => {}}
result = Koala::HTTPService::Response.new(200, body.to_json, {})
expected = :foo
expect(Koala::Facebook::API::GraphCollection).to receive(:new).with(result, @api).and_return(expected)
expect(Koala::Facebook::API::GraphCollection.evaluate(result, @api)).to eq(expected)
end
end
describe "#next_page" do
let(:paging){ {"next" => "http://example.com/abc?a=2&b=3"} }
it "should get next page" do
expect(@api).to receive(:get_page).with(["abc", {"a" => "2", "b" => "3"}])
@collection.next_page
end
it "should get next page with extra parameters" do
expect(@api).to receive(:get_page).with(["abc", {"a" => "2", "b" => "3", "c" => "4"}])
@collection.next_page("c" => "4")
end
end
describe "#previous_page" do
let(:paging){ {"previous" => "http://example.com/?a=2&b=3"} }
it "should get previous page" do
expect(@api).to receive(:get_page).with(["", {"a" => "2", "b" => "3"}])
@collection.previous_page
end
it "should get previous page with extra parameters" do
expect(@api).to receive(:get_page).with(["", {"a" => "2", "b" => "3", "c" => "4"}])
@collection.previous_page("c" => "4")
end
end
end
|
require 'spec_helper'
describe 'managedmac::mcx', :type => 'class' do
let(:facts) do
{ :macosx_productversion_major => "10.9" }
end
context "when passed NO params" do
it { should_not contain_exec('refresh_mcx') }
end
context "when passed ANY valid param" do
let(:params) do
{ :bluetooth => 'on' }
end
it { should contain_exec('refresh_mcx') }
end
context "when passed a BAD param" do
let(:params) do
{ :bluetooth => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not a boolean/)
end
end
context "when $bluetooth == on" do
let(:params) do
{ :bluetooth => 'on' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $bluetooth == off" do
let(:params) do
{ :bluetooth => 'off' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $bluetooth == enable" do
let(:params) do
{ :bluetooth => 'enable' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $bluetooth == disable" do
let(:params) do
{ :bluetooth => 'disable' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $bluetooth == true" do
let(:params) do
{ :bluetooth => true }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $bluetooth == false" do
let(:params) do
{ :bluetooth => false }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableBluetooth/)
end
end
context "when $wifi == true" do
let(:params) do
{ :wifi => true }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DisableAirPort/)
end
end
context "when $wifi == ''" do
let(:params) do
{ :wifi => '' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'absent')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content('')
end
end
context "when $bluetooth == ''" do
let(:params) do
{ :bluetooth => '' }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'absent')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content('')
end
end
context "when $logintitems are defined" do
let(:params) do
{ :loginitems => ['/path/to/some/file'] }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/\/path\/to\/some\/file/)
end
end
context "when NO $logintitems are defined" do
let(:params) do
{ :loginitems => [] }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'absent')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content('')
end
end
context "when $logintitems is not an Array" do
let(:params) do
{ :loginitems => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not an Array/)
end
end
context "when $suppress_icloud_setup == false" do
let(:params) do
{ :suppress_icloud_setup => false }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'absent')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content('')
end
end
context "when $suppress_icloud_setup == true" do
let(:params) do
{ :suppress_icloud_setup => true }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/DidSeeCloudSetup/)
end
end
context "when $hidden_preference_panes are defined" do
let(:params) do
{ :hidden_preference_panes => ['com.apple.preferences.icloud'] }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'present')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content(
/com\.apple\.preferences\.icloud/)
end
end
context "when NO $hidden_preference_panes are defined" do
let(:params) do
{ :hidden_preference_panes => [] }
end
it do
should contain_computer('mcx_puppet').with_ensure(
'absent')
end
it do
should contain_mcx('/Computers/mcx_puppet').with_content('')
end
end
context "when $hidden_preference_panes is not an Array" do
let(:params) do
{ :hidden_preference_panes => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not an Array/)
end
end
end
overhaul the mcx specs
- forgot to do this in 0.4.7
require 'spec_helper'
describe 'managedmac::mcx', :type => 'class' do
let(:facts) do
{ :macosx_productversion_major => "10.9" }
end
context "when passed NO params" do
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when passed ANY valid param" do
let(:params) do
{ :bluetooth => 'on' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when passed a BAD param" do
let(:params) do
{ :bluetooth => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not a boolean/)
end
end
context "when $bluetooth == on" do
let(:params) do
{ :bluetooth => 'on' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $bluetooth == off" do
let(:params) do
{ :bluetooth => 'off' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $bluetooth == enable" do
let(:params) do
{ :bluetooth => 'enable' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $bluetooth == disable" do
let(:params) do
{ :bluetooth => 'disable' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $bluetooth == true" do
let(:params) do
{ :bluetooth => true }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $bluetooth == false" do
let(:params) do
{ :bluetooth => false }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $wifi == true" do
let(:params) do
{ :wifi => true }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'present')
}
end
context "when $wifi == ''" do
let(:params) do
{ :wifi => '' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when $bluetooth == ''" do
let(:params) do
{ :bluetooth => '' }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when $logintitems are defined" do
let(:params) do
{ :loginitems => ['/path/to/some/file'] }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_content(
/\/path\/to\/some\/file/)
}
end
context "when NO $logintitems are defined" do
let(:params) do
{ :loginitems => [] }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when $logintitems is not an Array" do
let(:params) do
{ :loginitems => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not an Array/)
end
end
context "when $suppress_icloud_setup == false" do
let(:params) do
{ :suppress_icloud_setup => false }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when $suppress_icloud_setup == true" do
let(:params) do
{ :suppress_icloud_setup => true }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_content(
/DidSeeCloudSetup/)
}
end
context "$suppress_icloud_setup == true, NO $logintitems are defined" do
let(:params) do
{
:suppress_icloud_setup => true,
:loginitems => [],
}
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_content(
/DidSeeCloudSetup/)
}
it { should_not contain_mobileconfig('managedmac.mcx.alacarte').with_content(
/AutoLaunchedApplicationDictionary-managed/)
}
end
context "when $hidden_preference_panes are defined" do
let(:params) do
{ :hidden_preference_panes => ['com.apple.preferences.icloud'] }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_content(
/com\.apple\.preferences\.icloud/)
}
end
context "when NO $hidden_preference_panes are defined" do
let(:params) do
{ :hidden_preference_panes => [] }
end
it { should contain_mobileconfig('managedmac.mcx.alacarte').with_ensure(
'absent')
}
end
context "when $hidden_preference_panes is not an Array" do
let(:params) do
{ :hidden_preference_panes => 'foo' }
end
specify do
expect {
should compile
}.to raise_error(Puppet::Error, /not an Array/)
end
end
end
|
require 'spec_helper'
describe 'zabbix::server' do
context 'on gentoo' do
let(:facts) {
{
:operatingsystem => 'Gentoo',
:osfamily => 'gentoo',
}
}
it {
should contain_class('zabbix::server::gentoo')
should contain_service('zabbix-server').with({
:ensure => 'running',
:enable => 'true',
})
should contain_file('/etc/zabbix/zabbix_server.conf')
}
end
context 'it should have params' do
let(:params) {
{
:ensure => 'present',
:conf_file => 'undef',
:template => 'undef',
:node_id => 'undef',
:db_server => 'undef',
:db_database => 'undef',
:db_user => 'undef',
:db_password => 'undef',
:export => 'undef',
:hostname => 'undef',
}
}
end
context 'test zabbix::server::template call with args', :broken => true do
# broken due to dependency on rodjek/rspec-puppet#51
let(:exported_resources) {
{
'zabbix::server::template' => {
'test_template' => {
'ensure' => 'present',
}
}
}
}
it {
should contain_zabbix__server__template('test_template').with({
:ensure => 'present'
})
}
end
context 'with export present' do
let(:facts) {
{
'fqdn' => 'server_host'
}
}
let(:params) {
{
'export' => 'present',
}
}
it {
should contain_zabbix__agent('zabbix::agent').with({
:ensure => 'present',
:server => 'server_host'
})
}
end
end
need a define to facilitate things
require 'spec_helper'
describe 'zabbix::server' do
context 'on gentoo' do
let(:facts) {
{
:operatingsystem => 'Gentoo',
:osfamily => 'gentoo',
}
}
it {
should contain_class('zabbix::server::gentoo')
should contain_service('zabbix-server').with({
:ensure => 'running',
:enable => 'true',
})
should contain_file('/etc/zabbix/zabbix_server.conf')
}
end
context 'it should have params' do
let(:params) {
{
:ensure => 'present',
:conf_file => 'undef',
:template => 'undef',
:node_id => 'undef',
:db_server => 'undef',
:db_database => 'undef',
:db_user => 'undef',
:db_password => 'undef',
:export => 'undef',
:hostname => 'undef',
}
}
end
context 'test zabbix::server::template call with args', :broken => true do
# broken due to dependency on rodjek/rspec-puppet#51
let(:exported_resources) {
{
'zabbix::server::template' => {
'test_template' => {
'ensure' => 'present',
}
}
}
}
it {
should contain_zabbix__server__template('test_template').with({
:ensure => 'present'
})
}
end
context 'with export present' do
let(:facts) {
{
'fqdn' => 'server_host'
}
}
let(:params) {
{
'export' => 'present',
}
}
it {
should contain_zabbix__agent__server('server_host').with({
:ensure => 'present',
:server => 'server_host'
})
}
end
end |
Fabricator(:user) do
provider "github"
uid 123
name { Faker::Name.name }
verified false
gravatar_id "7194e8d48fa1d2b689f99443b767316c"
end
changed to Faker::Number
Fabricator(:user) do
provider "github"
uid {Faker::Number.number(5)}
name { Faker::Name.name }
verified false
gravatar_id "7194e8d48fa1d2b689f99443b767316c"
end
|
Add AWS::File#copy_to integration test
This does not currently test if the storage class is copied, because the
AWS Ruby SDK doesn't seem to expose a method to query the storage class.
require 'spec_helper'
describe 'Copying Files', type: :feature do
it 'copies an existing file to the specified path' do
uploader = Class.new(CarrierWave::Uploader::Base) do
def filename; 'image.png'; end
end
image = File.open('spec/fixtures/image.png', 'r')
original = uploader.new
original.store!(image)
original.retrieve_from_store!('image.png')
original.file.copy_to('uploads/image2.png')
copy = uploader.new
copy.retrieve_from_store!('image2.png')
original_attributes = original.file.attributes
original_attributes.reject! { |k,v| k == :last_modified }
copy_attributes = copy.file.attributes
copy_attributes.reject! { |k,v| k == :last_modified }
expect(copy_attributes).to eq(original_attributes)
image.close
original.file.delete
copy.file.delete
end
end
|
require 'rails_helper'
describe 'a user with invalid oauth credentials', type: :feature do
before do
user = create(:user, wiki_token: 'invalid')
login_as user
create(:cohort, slug: Figaro.env.default_cohort)
end
it 'should get logged out and see a message about the problem' do
error_message = I18n.t('error.oauth_invalid')
visit '/courses'
expect(page).to have_content error_message
expect(page).to have_content 'Login'
end
end
Update auth spec to look for 'Log in' instead of 'Login'
require 'rails_helper'
describe 'a user with invalid oauth credentials', type: :feature do
before do
user = create(:user, wiki_token: 'invalid')
login_as user
create(:cohort, slug: Figaro.env.default_cohort)
end
it 'should get logged out and see a message about the problem' do
error_message = I18n.t('error.oauth_invalid')
visit '/courses'
expect(page).to have_content error_message
expect(page).to have_content 'Log in'
end
end
|
require 'rails_helper'
describe "Questionnaire tests for instructor interface" do
before(:each) do
create(:assignment)
create_list(:participant, 3)
create(:assignment_node)
create(:deadline_type,name:"submission")
create(:deadline_type,name:"review")
create(:deadline_type,name:"resubmission")
create(:deadline_type,name:"rereview")
create(:deadline_type,name:"metareview")
create(:deadline_type,name:"drop_topic")
create(:deadline_type,name:"signup")
create(:deadline_type,name:"team_formation")
create(:deadline_right)
create(:deadline_right, name: 'Late')
create(:deadline_right, name: 'OK')
create(:due_date)
create(:due_date, deadline_type: DeadlineType.where(name: 'review').first, due_at: Time.now + (100*24*60*60))
end
describe "Instructor login" do
it "with valid username and password" do
login_as("instructor6")
visit '/tree_display/list'
expect(page).to have_content("Manage content")
end
it "with invalid username and password" do
visit root_path
fill_in 'login_name', with: 'instructor6'
fill_in 'login_password', with: 'something'
click_button 'SIGN IN'
expect(page).to have_content('Incorrect Name/Password')
end
end
describe "Create a review questionnaire", :type => :controller do
it "is able to create a public review questionnaire" do
login_as("instructor6")
visit '/questionnaires/new?model=ReviewQuestionnaire&private=0'
fill_in('questionnaire_name', :with => 'Review 1')
fill_in('questionnaire_min_question_score', :with =>'0')
fill_in('questionnaire_max_question_score', :with => '5')
select('no', :from=> 'questionnaire_private')
click_button "Create"
expect(Questionnaire.where(name: "Review 1")).to exist
end
end
def load_questionnaire
login_as("instructor6")
visit '/questionnaires/new?model=ReviewQuestionnaire&private=0'
fill_in('questionnaire_name', :with => 'Review n')
fill_in('questionnaire_min_question_score', :with =>'0')
fill_in('questionnaire_max_question_score', :with => '5')
select('no', :from=> 'questionnaire_private')
click_button "Create"
end
describe "Create a review question", :type => :controller do
it "is able to create a public review question" do
load_questionnaire
fill_in('question_total_num', :with => '1')
select('Criterion', :from=> 'question_type')
click_button "Add"
expect(page).to have_content('Remove')
first("textarea[placeholder='Edit question content here']").set "Question 1"
click_button "Save review questionnaire"
expect(page).to have_content('All questions has been saved successfully!')
expect(page).to have_content('Question 1')
click_on('Remove')
expect(page).to have_content('You have successfully deleted one question!')
end
end
def load_question
load_questionnaire
fill_in('question_total_num', :with => '1')
select('Criterion', :from=> 'question_type')
click_button "Add"
click_button "Save review questionnaire"
end
describe "Create a review advice", :type => :controller do
it "is able to create a public review advice" do
load_question
click_button "Edit/View advice"
expect(page).to have_content('Edit an existing questionnaire')
first(:css, "textarea[id^='horizontal_'][id$='advice']").set("Advice 1")
click_button "Save and redisplay advice"
expect(page).to have_content('advice was successfully saved')
expect(page).to have_content('Advice 1')
end
end
end
Questionnaire Private
require 'rails_helper'
describe "Questionnaire tests for instructor interface" do
before(:each) do
create(:assignment)
create_list(:participant, 3)
create(:assignment_node)
create(:deadline_type,name:"submission")
create(:deadline_type,name:"review")
create(:deadline_type,name:"resubmission")
create(:deadline_type,name:"rereview")
create(:deadline_type,name:"metareview")
create(:deadline_type,name:"drop_topic")
create(:deadline_type,name:"signup")
create(:deadline_type,name:"team_formation")
create(:deadline_right)
create(:deadline_right, name: 'Late')
create(:deadline_right, name: 'OK')
create(:due_date)
create(:due_date, deadline_type: DeadlineType.where(name: 'review').first, due_at: Time.now + (100*24*60*60))
end
describe "Instructor login" do
it "with valid username and password" do
login_as("instructor6")
visit '/tree_display/list'
expect(page).to have_content("Manage content")
end
it "with invalid username and password" do
visit root_path
fill_in 'login_name', with: 'instructor6'
fill_in 'login_password', with: 'something'
click_button 'SIGN IN'
expect(page).to have_content('Incorrect Name/Password')
end
end
describe "Create a review questionnaire", :type => :controller do
it "is able to create a public review questionnaire" do
login_as("instructor6")
visit '/questionnaires/new?model=ReviewQuestionnaire&private=0'
fill_in('questionnaire_name', :with => 'Review 1')
fill_in('questionnaire_min_question_score', :with =>'0')
fill_in('questionnaire_max_question_score', :with => '5')
select('no', :from=> 'questionnaire_private')
click_button "Create"
expect(Questionnaire.where(name: "Review 1")).to exist
end
end
describe "Create a review questionnaire", :type => :controller do
it "is able to create a private review questionnaire" do
login_as("instructor6")
visit '/questionnaires/new?model=ReviewQuestionnaire&private=1'
fill_in('questionnaire_name', :with => 'Review 1')
fill_in('questionnaire_min_question_score', :with =>'0')
fill_in('questionnaire_max_question_score', :with => '5')
select('yes', :from=> 'questionnaire_private')
click_button "Create"
expect(Questionnaire.where(name: "Review 1")).to exist
end
end
def load_questionnaire
login_as("instructor6")
visit '/questionnaires/new?model=ReviewQuestionnaire&private=0'
fill_in('questionnaire_name', :with => 'Review n')
fill_in('questionnaire_min_question_score', :with =>'0')
fill_in('questionnaire_max_question_score', :with => '5')
select('no', :from=> 'questionnaire_private')
click_button "Create"
end
describe "Create a review question", :type => :controller do
it "is able to create a public review question" do
load_questionnaire
fill_in('question_total_num', :with => '1')
select('Criterion', :from=> 'question_type')
click_button "Add"
expect(page).to have_content('Remove')
first("textarea[placeholder='Edit question content here']").set "Question 1"
click_button "Save review questionnaire"
expect(page).to have_content('All questions has been saved successfully!')
expect(page).to have_content('Question 1')
click_on('Remove')
expect(page).to have_content('You have successfully deleted one question!')
end
end
def load_question
load_questionnaire
fill_in('question_total_num', :with => '1')
select('Criterion', :from=> 'question_type')
click_button "Add"
click_button "Save review questionnaire"
end
describe "Create a review advice", :type => :controller do
it "is able to create a public review advice" do
load_question
click_button "Edit/View advice"
expect(page).to have_content('Edit an existing questionnaire')
first(:css, "textarea[id^='horizontal_'][id$='advice']").set("Advice 1")
click_button "Save and redisplay advice"
expect(page).to have_content('advice was successfully saved')
expect(page).to have_content('Advice 1')
end
end
end
|
require 'spec_helper'
feature 'Signing up:' do
let(:user_email) { generate :user_email }
let(:user_password) { 'démarches-simplifiées-pwd' }
let(:procedure) { create :simple_procedure, :with_service }
scenario 'a new user can sign-up' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
sign_up_with user_email, user_password
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
click_confirmation_link_for user_email
expect(page).to have_content 'Votre compte a été activé'
expect(page).to have_current_path commencer_path(path: procedure.path)
end
context 'when the user makes a typo in their email address' do
let(:procedure) { create :simple_procedure, :with_service }
before do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
expect(page).to have_selector('.suspect-email', visible: false)
fill_in 'Email', with: 'bidou@yahoo.rf'
fill_in 'Mot de passe', with: '12345'
end
scenario 'they can accept the suggestion', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
click_on 'Oui'
expect(page).to have_field("Email", :with => 'bidou@yahoo.fr')
expect(page).to have_selector('.suspect-email', visible: false)
end
scenario 'they can discard the suggestion', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
click_on 'Non'
expect(page).to have_field("Email", :with => 'bidou@yahoo.rf')
expect(page).to have_selector('.suspect-email', visible: false)
end
scenario 'they can fix the typo themselves', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
fill_in 'Email', with: 'bidou@yahoo.fr'
blur
expect(page).to have_selector('.suspect-email', visible: false)
end
end
scenario 'a new user can’t sign-up with too short password when visiting a procedure' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
expect(page).to have_current_path new_user_registration_path
sign_up_with user_email, '1234567'
expect(page).to have_current_path user_registration_path
expect(page).to have_content 'Le mot de passe est trop court'
# Then with a good password
sign_up_with user_email, user_password
expect(page).to have_current_path new_user_confirmation_path user: { email: user_email }
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
end
context 'when visiting a procedure' do
let(:procedure) { create :simple_procedure, :with_service }
before do
visit commencer_path(path: procedure.path)
end
scenario 'a new user can sign-up and fill the procedure' do
click_on 'Créer un compte'
expect(page).to have_current_path new_user_registration_path
expect(page).to have_procedure_description(procedure)
sign_up_with user_email, user_password
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
click_confirmation_link_for user_email
expect(page).to have_current_path(commencer_path(path: procedure.path))
expect(page).to have_content 'Votre compte a été activé'
click_on 'Commencer la démarche'
expect(page).to have_current_path identite_dossier_path(procedure.reload.dossiers.last)
expect(page).to have_procedure_description(procedure)
end
end
context 'when a user is not confirmed yet' do
before do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
sign_up_with user_email, user_password
end
# Ideally, when signing-in with an unconfirmed account,
# the user would be redirected to the "resend email confirmation" page.
#
# However the check for unconfirmed accounts is made by Warden every time a page is loaded –
# and much earlier than SessionsController#create.
#
# For now only test the default behavior (an error message is displayed).
scenario 'they get an error message' do
visit root_path
click_on 'Connexion'
sign_in_with user_email, user_password
expect(page).to have_content 'Vous devez confirmer votre adresse email pour continuer'
end
end
context 'when the user already has a confirmed account' do
before do
create(:user, email: user_email, password: user_password)
end
scenario 'they get a warning email, containing a link to the procedure' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte'
sign_up_with user_email, user_password
# The same page than for initial sign-ups is displayed, to avoid leaking informations
# about the accound existence.
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
# A warning email is sent
warning_email = open_email(user_email)
expect(warning_email.body).to have_text('Votre compte existe déjà')
# When clicking the main button, the user has a link to directly sign-in
# for the procedure they were initially starting
click_procedure_sign_in_link_for user_email
expect(page).to have_current_path new_user_session_path
expect(page).to have_procedure_description(procedure)
end
end
end
specs: fix the sign-up with unconfirmed account test
It was testing the sign-in instead of the sign-up.
require 'spec_helper'
feature 'Signing up:' do
let(:user_email) { generate :user_email }
let(:user_password) { 'démarches-simplifiées-pwd' }
let(:procedure) { create :simple_procedure, :with_service }
scenario 'a new user can sign-up' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
sign_up_with user_email, user_password
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
click_confirmation_link_for user_email
expect(page).to have_content 'Votre compte a été activé'
expect(page).to have_current_path commencer_path(path: procedure.path)
end
context 'when the user makes a typo in their email address' do
let(:procedure) { create :simple_procedure, :with_service }
before do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
expect(page).to have_selector('.suspect-email', visible: false)
fill_in 'Email', with: 'bidou@yahoo.rf'
fill_in 'Mot de passe', with: '12345'
end
scenario 'they can accept the suggestion', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
click_on 'Oui'
expect(page).to have_field("Email", :with => 'bidou@yahoo.fr')
expect(page).to have_selector('.suspect-email', visible: false)
end
scenario 'they can discard the suggestion', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
click_on 'Non'
expect(page).to have_field("Email", :with => 'bidou@yahoo.rf')
expect(page).to have_selector('.suspect-email', visible: false)
end
scenario 'they can fix the typo themselves', js: true do
expect(page).to have_selector('.suspect-email', visible: true)
fill_in 'Email', with: 'bidou@yahoo.fr'
blur
expect(page).to have_selector('.suspect-email', visible: false)
end
end
scenario 'a new user can’t sign-up with too short password when visiting a procedure' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte demarches-simplifiees.fr'
expect(page).to have_current_path new_user_registration_path
sign_up_with user_email, '1234567'
expect(page).to have_current_path user_registration_path
expect(page).to have_content 'Le mot de passe est trop court'
# Then with a good password
sign_up_with user_email, user_password
expect(page).to have_current_path new_user_confirmation_path user: { email: user_email }
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
end
context 'when visiting a procedure' do
let(:procedure) { create :simple_procedure, :with_service }
before do
visit commencer_path(path: procedure.path)
end
scenario 'a new user can sign-up and fill the procedure' do
click_on 'Créer un compte'
expect(page).to have_current_path new_user_registration_path
expect(page).to have_procedure_description(procedure)
sign_up_with user_email, user_password
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
click_confirmation_link_for user_email
expect(page).to have_current_path(commencer_path(path: procedure.path))
expect(page).to have_content 'Votre compte a été activé'
click_on 'Commencer la démarche'
expect(page).to have_current_path identite_dossier_path(procedure.reload.dossiers.last)
expect(page).to have_procedure_description(procedure)
end
end
context 'when the user is not confirmed yet' do
before do
create(:user, :unconfirmed, email: user_email, password: user_password)
end
scenario 'the email confirmation page is displayed' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte'
sign_up_with user_email, user_password
# The same page than for initial sign-ups is displayed, to avoid leaking informations
# about the accound existence.
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
# The confirmation email is sent again
confirmation_email = open_email(user_email)
expect(confirmation_email.body).to have_text('Pour activer votre compte')
end
end
context 'when the user already has a confirmed account' do
before do
create(:user, email: user_email, password: user_password)
end
scenario 'they get a warning email, containing a link to the procedure' do
visit commencer_path(path: procedure.path)
click_on 'Créer un compte'
sign_up_with user_email, user_password
# The same page than for initial sign-ups is displayed, to avoid leaking informations
# about the accound existence.
expect(page).to have_content "nous avons besoin de vérifier votre adresse #{user_email}"
# A warning email is sent
warning_email = open_email(user_email)
expect(warning_email.body).to have_text('Votre compte existe déjà')
# When clicking the main button, the user has a link to directly sign-in
# for the procedure they were initially starting
click_procedure_sign_in_link_for user_email
expect(page).to have_current_path new_user_session_path
expect(page).to have_procedure_description(procedure)
end
end
end
|
# coding: utf-8
require 'rails_helper'
describe 'Welcome page', js: true do
let (:dances) {[:dance, :box_the_gnat_contra, :call_me].map {|d| FactoryGirl.create(d)}}
it 'has a link to help on filters' do
visit '/'
expect(page).to have_link('', href: "https://github.com/contradb/contra/blob/master/doc/search.md#advanced-search-on-contradb")
end
context 'datatable' do
let (:dance) {FactoryGirl.create(:box_the_gnat_contra, created_at: DateTime.now - 10.years, updated_at: DateTime.now - 1.week, publish: :all)}
it 'displays dance columns' do
dance
visit '/'
expect(page).to have_link(dance.title, href: dance_path(dance))
expect(page).to have_link(dance.choreographer.name, href: choreographer_path(dance.choreographer))
expect(page).to have_text(dance.start_type)
expect(page).to have_text(dance.hook)
expect(page).to have_link(dance.user.name, href: user_path(dance.user))
expect(page).to have_text(dance.created_at.strftime('%Y-%m-%d'))
expect(page).to_not have_text(dance.updated_at.strftime('%Y-%m-%d')) # column invisible by default, it's not hidden, it's simply not there
expect(page).to_not have_css('th', text: 'Sharing') # column invisible by default, it's not hidden, it's simply not there
expect(page).to_not have_css('td', text: 'whole dance') # column invisible by default, it's not hidden, it's simply not there
end
it 'displays in descending created_at order by default' do
dance1 = FactoryGirl.create(:box_the_gnat_contra, title: "The Middle Dance", created_at: DateTime.now - 1.minute)
dance2 = FactoryGirl.create(:box_the_gnat_contra, title: "The First Dance")
dance3 = FactoryGirl.create(:box_the_gnat_contra, title: "The Last Dance", created_at: DateTime.now - 2.minutes)
visit '/'
expect(page).to have_content(dance1.title) # js wait
txt = page.text
# check order dance2 dance1 dance3
expect((/#{dance2.title}/ =~ txt) < (/#{dance1.title}/ =~ txt)).to eq(true)
expect((/#{dance1.title}/ =~ txt) < (/#{dance3.title}/ =~ txt)).to eq(true)
end
it 'shows only dances visible to current user' do
with_login do |user|
dance2 = FactoryGirl.create(:box_the_gnat_contra, title: "this dance should be visible", publish: :off, user: user)
dance3 = FactoryGirl.create(:box_the_gnat_contra, title: "this dance should be invisible", publish: :sketchbook)
visit '/'
expect(page).to have_content(dance2.title)
expect(page).to_not have_content(dance3.title)
end
end
it 'figure filter is initially just one figure set to wildcard' do
visit '/'
expect(page).to have_css("#figure-filter-root>.figure-filter-op")
expect(find("#figure-filter-root>.figure-filter-op").value).to eq('figure')
expect(page).to have_css("#figure-filter-root>.figure-filter-move")
expect(find("#figure-filter-root>.figure-filter-move").value).to eq('*')
end
it "changing figure filter from 'figure' to 'and' installs two subfilters" do
visit '/'
select('and')
expect(page).to have_css('.figure-filter', count: 3)
expect(page).to have_css('.figure-filter-move', count: 2)
end
it "searches for the problematicly named figure \"Rory O'More\" work" do
rory = FactoryGirl.create(:dance_with_a_rory_o_more)
box = FactoryGirl.create(:box_the_gnat_contra)
visit '/'
select "Rory O'More"
expect(page).to_not have_content(box.title) # js wait
expect(page).to have_content(rory.title)
expect(rory.title).to eq("Just Rory")
end
it "'not' filter works" do
dance
only_a_swing = FactoryGirl.create(:dance_with_a_swing)
with_retries do
visit '/'
expect(page).to have_text(only_a_swing.title)
expect(page).to have_text(dance.title)
select('not')
select('swing', match: :first)
expect(page).to_not have_text(only_a_swing.title)
expect(page).to have_text(dance.title) # because it has a figure that's not a swing
end
end
it "'&' and 'progression' filters work" do
dances
with_retries do
visit '/'
select('&')
select('slide along set', match: :first)
all('.figure-filter-op').last.select('progression')
expect(page).to have_text('The Rendevouz')
expect(page).to_not have_text('Box the Gnat Contra')
expect(page).to_not have_text('Call Me')
expect(page).to have_text('The Rendevouz')
end
end
it "'formation' filters work" do
becket = FactoryGirl.create(:call_me, start_type: 'Becket', title: 'Becket')
square = FactoryGirl.create(:dance, start_type: 'square dance', title: 'square')
dances2 = dances + [becket, square]
visit '/'
select 'formation'
select 'improper'
expect(Set.new(dances2.map(&:start_type))).to eq(Set['improper', 'Becket ccw', 'Becket', 'square dance'])
expect(page).to_not have_text('Call Me')
expect(page).to_not have_css('#dances-table', text: 'Becket')
expect(page).to_not have_css('#dances-table', text: 'sqaure')
dances2.each do |dance|
expect(page).to have_link(dance.title) if dance.start_type == 'improper'
end
select 'Becket *'
expect(page).to have_link(becket.title)
expect(page).to have_link('Call Me')
dances2.each do |dance|
if dance.title.in?([becket.title, 'Call Me'])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'Becket cw'
expect(page).to_not have_link('Call Me')
expect(page).to have_link(becket.title)
dances2.each do |dance|
if dance.title.in?([becket.title])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'Becket ccw'
expect(page).to_not have_link(becket.title)
expect(page).to have_link('Call Me')
dances2.each do |dance|
if dance.title.in?(['Call Me'])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'everything else'
expect(page).to_not have_link('Call Me')
expect(page).to have_link(square.title)
dances2.each do |dance|
if dance.title.in?([square.title])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
end
describe 'figure filter machinantions' do
def setup_and_filter
dances
visit '/'
# get down to (and (filter '*')):
select('and')
all('.figure-filter-remove').last.click
end
it 'the precondition of all these other tests is fulfilled' do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css("#figure-filter-root>.figure-filter-op") # js wait for...
expect(find("#figure-filter-root>.figure-filter-op").value).to eq('and')
expect(find("#figure-filter-root>.figure-filter .figure-filter-op").value).to eq('figure')
expect(find("#figure-filter-root>.figure-filter .figure-filter-move").value).to eq('*')
expect(page).to have_content('Call Me')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to have_content('The Rendevouz')
end
end
it "changing move changes values" do
with_retries do
setup_and_filter
select('circle')
expect(page).to have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
end
end
it "clicking 'add and' inserts a figure filter that responds to change events" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
click_button('add and')
expect(page).to have_css('.figure-filter', count: 3)
expect(page).to have_css('.figure-filter-move', count: 2)
all('.figure-filter-move').first.select('chain')
all('.figure-filter-move').last.select('circle')
expect(page).to have_content('Call Me')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
end
end
it "changing from 'and' to 'figure' purges subfilters and installs a new working move select" do
with_retries do
setup_and_filter
select('circle') # rendevous and call me
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
first('.figure-filter-op').select('figure')
select('chain')
expect(page).to have_css('.figure-filter', count: 1)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
end
end
it "changing from 'and' to 'number of' installs a new working number filter" do
with_retries do
setup_and_filter
select('circle') # rendevous and call me
first('.figure-filter-op').select('number of')
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
select('0')
select('>')
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
expect(page).to have_content('The Rendevouz')
select('≥')
select('2')
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('The Rendevouz')
end
end
it "change from an empty 'or' to 'no'" do
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or');
expect(page).to have_css('.figure-filter-op', count: 4)
expect(page).to have_css('.figure-filter', count: 4)
expect(page).to have_css('.figure-filter-add', count: 2)
expect(page).to have_css('.figure-filter-move', count: 2)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 3) # css wait
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 2) # css wait
all('.figure-filter-op').last.select('no');
expect(page).to have_css('.figure-filter', count: 3) # <- the main point here
end
end
it "change from binary 'and' to 'no'" do
with_retries do
setup_and_filter
click_button('add and')
all('.figure-filter-move').first.select('chain')
all('.figure-filter-move').last.select('circle')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
all('.figure-filter-op').first.select('no'); # have no chain
expect(page).to have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
end
end
it "change from 'figure' to 'no'" do
with_retries do
setup_and_filter
# now we're ['and', ['figure', '*']]
first('.figure-filter-op').select('figure')
# now we're just ['figure', '*']
select('no')
# now we're ['no', ['figure', '*']]
select('circle')
# now we're ['no', ['figure', 'circle']]
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
end
end
it "change from 'figure' to 'or'" do
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or')
expect(page).to have_css('.figure-filter', count: 4)
expect(page).to have_css('.figure-filter-add', count: 2)
expect(find("#figure-query-buffer", visible: false).value).to eq('["and",["or",["figure","*"],["figure","*"]]]')
end
end
it "it adds/removes 'add' button depending on arity of the filter, and 'add' button works" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter-add', count: 1)
all('.figure-filter-op').first.select('no')
expect(page).to have_css('.figure-filter-add', count: 0)
all('.figure-filter-op').first.select('and')
expect(page).to have_css('.figure-filter-add', count: 1)
expect(page).to have_css('.figure-filter', count: 3)
click_button('add and')
expect(page).to have_css('.figure-filter', count: 4)
end
end
describe 'filter remove button' do
it "root filter does not have a remove button" do
with_retries do
setup_and_filter
expect(page).to_not have_css('#figure-filter-root > button.figure-filter-remove')
end
end
it "initial subfilter has a working remove button" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter > button.figure-filter-remove', count: 1)
find('.figure-filter-remove').click
expect(page).to have_css('.figure-filter', count: 1)
end
end
it "another subfilter has a working remove button" do
with_retries do
setup_and_filter
select('circle')
click_button('add and') # adds a '*'
expect(page).to have_css('.figure-filter > button.figure-filter-remove', count: 2)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 2)
expect(find("#figure-query-buffer", visible: false).value).to eq('["and",["figure","circle"]]')
end
end
it "changing my op still allows my remove button" do # this was a bug at one point
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or')
expect(page).to have_css('#figure-filter-root > .figure-filter > .figure-filter-remove')
expect(page).to have_css('.figure-filter-remove', count: 3)
end
end
it "changing my op removes illegal remove buttons among my children, and adds them back in when they are legal" do
with_retries do
setup_and_filter
first('.figure-filter-op').select('no')
# [no, [figure *]]
expect(page).to_not have_css('.figure-filter-remove') # remove illegal X buttons
first('.figure-filter-op').select('and')
expect(page).to have_css('#figure-filter-root > .figure-filter > .figure-filter-remove') # re-add X button
expect(page).to have_css('.figure-filter-remove', count: 2)
end
end
end
describe 'figure query sentence' do
# "figure: chain" => "dances with a chain"
# "no (figure: chain)" => "dances with no chain"
# "no ((figure: chain) or (figure: hey))" => "dances with no chain or hey"
it 'works with precondition' do
with_retries do
setup_and_filter
expect(page).to have_content('dances with any figure')
end
end
end
end
describe 'figure ... button' do
it "is visible initially, when figure is 'any figure'" do
visit '/'
expect(page).to have_button('...')
end
it 'changing figure filter hides this one but creates two more' do
visit '/'
select('then')
expect(page).to have_button('...', count: 2)
end
it "clicking '...' toggles 'ellipsis-expanded' class" do
visit '/'
select('chain')
expect(page).to_not have_css('.figure-filter-ellipsis.ellipsis-expanded')
click_button '...'
expect(page).to have_css('.figure-filter-ellipsis.ellipsis-expanded')
click_button '...'
expect(page).to_not have_css('.figure-filter-ellipsis.ellipsis-expanded')
end
context 'accordion' do
it 'lurks invisibly' do
visit '/'
expect(page).to_not have_css('.figure-filter-accordion')
expect(page).to have_css('.figure-filter-accordion', visible: false)
end
it 'pops forth when clicked' do
visit '/'
select('chain')
expect(page).to have_css('.figure-filter-accordion', visible: false)
click_button('...')
expect(page).to have_css('.figure-filter-accordion')
end
it "circle 4 places finds only 'The Rendevouz'" do
with_retries do
visit '/'
dances
select('circle')
click_button('...')
select('4 places')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to_not have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","*","360","*"]')
end
end
it "circle right finds only a dance with circle right" do
with_retries do
visit '/'
right = FactoryGirl.create(:dance_with_a_circle_right)
dances
select('circle')
click_button('...')
choose('right')
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to_not have_content('Call Me') # has circle left 3 places
expect(page).to have_content(right.title) # has circle right
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","false","*","*"]')
end
end
it "A slightly different query is sent if the ... is clicked" do
dances
with_retries do
visit '/'
select('circle')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle"]') # first query
click_button('...')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","*","*","*"]') # second query
end
end
it 'circle has an angle select box with the right options' do
with_retries do
visit '/'
select('circle')
click_button('...')
angles = JSLibFigure.angles_for_move('circle')
too_small_angle = angles.min - 90
too_big_angle = angles.max + 90
expect(page).to_not have_css("#figure-filter-root select option", text: JSLibFigure.degrees_to_words(too_small_angle, 'circle'))
expect(page).to_not have_css("#figure-filter-root select option", text: JSLibFigure.degrees_to_words(too_big_angle, 'circle'))
angles.each do |angle|
expect(page).to have_css("#figure-filter-root select option[value='#{angle}']", text: JSLibFigure.degrees_to_words(angle, 'circle'))
end
expect(page).to have_css("#figure-filter-root .figure-filter-accordion select option[value='*']")
end
end
it "swing for 8 doesn't find 'The Rendevouz', which features only long swings" do
dances
with_retries do
visit '/'
select('swing', match: :first)
click_button('...')
select('8', match: :prefer_exact) # '8' is in the menu twice, and also in 'figure 8'
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","swing","*","*","8"]')
end
end
it "non-balance & swing doesn't find 'The Rendevouz', which features only balance & swings" do
dances
with_retries do
visit '/'
select('swing', match: :first)
click_button('...')
choose('none')
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","swing","*","none","*"]')
end
end
it 'labels appear on chooser elements' do
visit '/'
with_retries do
click_button('...')
select('swing', match: :first) # swing uses simple label system
expect(page).to have_css('.chooser-label-text', text: 'who')
expect(page).to have_css('.chooser-label-text', text: 'prefix')
expect(page).to have_css('.chooser-label-text', text: 'beats')
select('allemande orbit') # allemande orbit uses fancier label system
expect(page).to have_css('.chooser-label-text', text: 'who')
expect(page).to have_css('.chooser-label-text', text: 'allemande')
expect(page).to have_css('.chooser-label-text', text: 'inner')
expect(page).to have_css('.chooser-label-text', text: 'outer')
expect(page).to have_css('.chooser-label-text', text: 'for')
end
end
it "allemande with ladles finds only 'Box the Gnat'" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
select('ladles')
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to_not have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","ladles","*","*","*"]')
end
end
it "allemande has the right dancer chooser menu entries" do
dances
with_retries do
visit '/'
select('allemande')
click_button('...')
expect(page).to have_css("option[value='ladles']")
expect(page).to have_css("option[value='gentlespoons']")
expect(page).to have_css("option[value='neighbors']")
expect(page).to have_css("option[value='partners']")
expect(page).to have_css("option[value='same roles']")
expect(page).to have_css("option[value='shadows']")
expect(page).to have_css("option[value='ones']")
expect(page).to have_css("option[value='twos']")
expect(page).to have_css("option[value='first corners']")
expect(page).to have_css("option[value='second corners']")
expect(page).to have_css("option[value='shadows']")
expect(page).to_not have_css("option[value='prev neighbors']")
expect(page).to_not have_css("option[value='next neighbors']")
expect(page).to_not have_css("option[value='2nd neighbors']")
expect(page).to_not have_css("option[value='3rd neighbors']")
expect(page).to_not have_css("option[value='4th neighbors']")
expect(page).to_not have_css("option[value='1st shadows']")
expect(page).to_not have_css("option[value='2nd shadows']")
end
end
it "allemande with allemande left finds only 'Just Allemande'" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
choose('left', exact: true)
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","*","false","*","*"]')
end
end
it "allemande once around works" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
select('once')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","*","*","360","*"]')
end
end
it "text input keywords work" do
dances
with_retries do
visit '/'
apple = FactoryGirl.create(:dance_with_a_custom, custom_text: 'apple', title: 'just apple')
banana = FactoryGirl.create(:dance_with_a_custom, custom_text: 'banana', title: 'just banana')
orange = FactoryGirl.create(:dance_with_a_custom, custom_text: 'orange', title: 'just orange')
apple_banana = FactoryGirl.create(:dance_with_a_custom, custom_text: 'apple banana', title: 'apple_banana')
select('custom')
click_button('...')
find(:css, "input.chooser-argument[type=string]").set('apple orange')
blur
dances.each do |dance|
expect(page).to_not have_content(dance.title)
end
expect(page).to have_content(apple.title)
expect(page).to_not have_content(banana.title)
expect(page).to have_content(orange.title)
expect(page).to have_content(apple_banana.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","custom","apple orange","*"]')
end
end
it "wrist grip filter works" do
dances
with_retries do
visit '/'
grip = FactoryGirl.create(:dance_with_a_wrist_grip_star)
select('star')
click_button('...')
select('unspecified')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
expect(page).to_not have_content(grip.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","star","*","*","","*"]')
end
end
it "facing filter works" do
fb = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'forward then backward')
f = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'forward')
b = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'backward')
with_retries do
visit '/'
select('down the hall')
click_button('...')
select('forward then backward')
expect(page).to_not have_content(f.title)
expect(page).to_not have_content(b.title)
expect(page).to have_content(fb.title)
end
end
it "down the hall ender filter works" do
ta = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'turn-alone', title: 'dth_alone')
tc = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'turn-couple', title: 'dth_couples')
circle = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'circle', title: 'dth_circle')
unspec = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: '', title: 'dth_unspec')
with_retries(15) do
visit '/'
select('down the hall')
click_button('...')
select('turn as a couple')
# select('turn alone') # hard because multiple
select('bend into a ring')
expect(page).to_not have_content(tc.title)
expect(page).to_not have_content(unspec.title)
expect(page).to_not have_content(ta.title)
expect(page).to have_content(circle.title)
end
end
it "half_or_full filter works" do
poussette = FactoryGirl.create(:dance_with_a_full_poussette)
dances
with_retries do
visit '/'
select('poussette')
click_button('...')
choose('full')
dances.each do |dance|
expect(page).to_not have_content(dance.title)
end
expect(page).to have_content(poussette.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","poussette","1","*","*","*","*"]')
end
end
it "hey_length filter works" do
hey_dances = %w(ladles%%1 half ladles%%2 full).map {|hey_length| FactoryGirl.create(:dance_with_a_hey, hey_length: hey_length)}
hey_lengths = ['less than half',
'half',
'between half and full',
'full']
with_retries do
visit '/'
select('hey')
click_button('...')
hey_dances.each_with_index do |dance, i|
hey_length = hey_lengths[i]
select(hey_length)
hey_dances.each do |dance2|
if dance == dance2
expect(page).to have_content(dance2.title)
else
expect(page).to_not have_content(dance2.title)
end
end
expect(find("#figure-query-buffer", visible: false).value).to eq(%{["figure","hey","*","*","*",#{hey_length.inspect},"*","*","*","*","*","*"]})
end
end
end
it 'aliases are subsets' do
do_si_do = FactoryGirl.create(:dance_with_a_do_si_do)
see_saw = FactoryGirl.create(:dance_with_a_see_saw)
with_retries do
visit '/'
select('see saw')
expect(page).to have_content(see_saw.title)
expect(page).to_not have_content(do_si_do.title)
click_button('...')
choose('*')
expect(page).to have_content(do_si_do.title)
expect(page).to have_content(see_saw.title)
expect(page).to have_content("Showing dances with a * do si do *")
expect(find(".figure-filter-move").value).to eq('do si do')
choose('left')
expect(page).to have_content(see_saw.title)
expect(page).to_not have_content(do_si_do.title)
expect(find(".figure-filter-move").value).to eq('see saw')
expect(page).to have_content("Showing dances with a * see saw *")
end
end
end
end
describe 'dialect' do
it 'figure filter move' do
expect_any_instance_of(WelcomeController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
with_retries do
visit '/'
dances
expect(page).to_not have_css('option', text: exactly('allemande'))
expect(page).to have_css('option[value=allemande]', text: exactly('almond'))
expect(page).to_not have_css('option', text: exactly('gyre'))
expect(page).to have_css('option[value=gyre]', text: exactly('darcy'))
select('almond')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Call Me')
end
end
it 'figure filter dancers' do
expect_any_instance_of(WelcomeController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
with_retries do
visit '/'
dances
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
expect(page).to_not have_content('Processing...')
select('almond')
click_button('...')
select('ravens')
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to_not have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","ladles","*","*","*"]')
end
end
it "displays dialect-transformed hooks" do
expect_any_instance_of(DancesController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
dance2 = FactoryGirl.create(:dance, hook: 'hook allemande ladles hook')
visit '/'
expect(page).to have_content('hook almond ravens hook')
expect(page).to_not have_content(dance2.hook)
end
end
describe 'back button' do
it 'works' do
dances
visit '/'
select('and')
expect(page).to have_css('.figure-filter-move', count: 2) # js wait
all('.figure-filter-move').first.select('swing', match: :first)
all('.figure-filter-move').last.select('allemande')
all('.figure-filter-ellipsis').last.click
select('ladles') # ladles allemande right 1½ for '*' beats
choose('right')
select('1½')
click_button('add and')
expect(page).to have_css('.figure-filter-op', count: 4) # js wait
all('.figure-filter-op').last.select('formation')
select('improper')
click_link('Box the Gnat Contra')
expect(page).to have_content('partners swing') # wait for page to load
page.go_back
move_selector = '.figure-filter-move'
expect(page).to have_css(move_selector, count: 2)
expect(all(move_selector).first.value).to eq('swing')
expect(all(move_selector).last.value).to eq('allemande')
expect(page).to_not have_content('Processing...')
expect(page).to have_css('.figure-filter-ellipsis.ellipsis-expanded', count: 1)
expect(page).to have_css('.figure-filter-accordion', count: 1, visible: true)
expect(page).to have_css('.chooser-argument', count: 4)
expect(all(".chooser-argument")[0].value).to eq('ladles')
expect(find(".chooser-argument [type=radio][value='*']")).to_not be_checked
expect(find(".chooser-argument [type=radio][value='true']")).to be_checked
expect(find(".chooser-argument [type=radio][value='false']")).to_not be_checked
expect(all(".chooser-argument")[2].value.to_s).to eq(540.to_s)
expect(all(".chooser-argument")[3].value).to eq('*')
expect(page).to have_css('.figure-filter-formation', count: 1)
op_values = find_all('.figure-filter-op').map(&:value)
expect(op_values.count('and')).to eq(1)
expect(op_values.count('figure')).to eq(2)
expect(op_values.count('formation')).to eq(1)
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('Showing dances with a swing and a ladles allemande right 1½ and an improper formation.')
end
it 'all filters at least load when back-ed' do
dances
visit '/'
filters = page.find('.figure-filter-op').all('option').map {|elt| elt['innerHTML'].gsub('&', '&') }
filters.each do |filter|
page.select(filter, match: :first)
expect(page).to have_text(/Showing \d+ to \d+ of \d+ entries/)
page.refresh
# when filters are broken on reload, it has an empty table and doesn't show this text:
expect(page).to have_text(/Showing \d+ to \d+ of \d+ entries/)
end
end
it 'number of' do
dances
visit '/'
select('number of')
select('contra corners') # clear out search results
expect(page).to_not have_content('Processing')
expect(page).to have_css('.figure-filter-op', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
select('>')
select('1')
select('circle')
# these are flaky and not really important to the back button testing:
# expect(page).to_not have_content('Processing')
# expect(page).to_not have_content('Box the Gnat Contra')
# expect(page).to_not have_content('Call Me')
# expect(page).to have_content('The Rendevouz')
click_link('The Rendevouz')
expect(page).to have_css('h1', text: 'The Rendevouz') # wait for page to load
page.go_back
expect(page).to have_css('.figure-filter-op', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('The Rendevouz')
expect(find('.figure-filter-count-comparison').value).to eq('>')
expect(find('.figure-filter-count-number').value).to eq('1')
expect(find('.figure-filter-move').value).to eq('circle')
end
it 'decorates subfilters with [x] buttons, conjunctions, and "add or"' do
dances
visit '/'
select('or')
click_link(dances.first.title)
expect(page).to have_css('h1', text: dances.first.title)
page.go_back
expect(page).to have_css('.figure-filter[data-op=or]', count: 2)
expect(page).to have_css('.figure-filter-remove', count: 2)
expect(page).to have_css('button.figure-filter-add', text: 'add or')
click_button('add or')
expect(page).to have_css('.figure-filter-remove', count: 3)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter-remove', count: 2)
end
end
describe 'columns' do
it "Clicking vis toggles buttons cause columns to disappear" do
dances
visit '/'
%w[Title Choreographer Formation Hook User Entered].each do |col|
expect(page).to have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
click_button col
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
click_button col
expect(page).to have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
end
%w[Updated Sharing Figures].each do |col|
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
click_button col
expect(page).to have_css('#dances-table th', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
click_button col
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
end
end
include DancesHelper
it 'published column cells' do
with_login do |user|
dances.each_with_index do |dance, i|
publish = [:off, :sketchbook, :all][i]
publish_string = dance_publish_word(publish)
dance.update!(publish: publish, user: user)
visit '/'
click_button 'Sharing'
expect(page).to have_css('tr', text: /#{dance.title}.*#{publish_string}/)
end
end
end
it 'figures column' do
dances
visit '/'
expect(page).to_not have_content('whole dance')
click_button 'Figures'
expect(page).to have_content('whole dance', count: 3)
select('circle')
expect(page).to have_css('tr', text: /The Rendevouz.*circle left 4 places\ncircle left 3 places/)
expect(page).to have_css('tr', text: /Call Me.*circle left 3 places/)
end
end
end
private
def exactly(string)
/\A#{string}\z/
end
end
comment out broken specs for #611
# coding: utf-8
require 'rails_helper'
describe 'Welcome page', js: true do
let (:dances) {[:dance, :box_the_gnat_contra, :call_me].map {|d| FactoryGirl.create(d)}}
it 'has a link to help on filters' do
visit '/'
expect(page).to have_link('', href: "https://github.com/contradb/contra/blob/master/doc/search.md#advanced-search-on-contradb")
end
context 'datatable' do
let (:dance) {FactoryGirl.create(:box_the_gnat_contra, created_at: DateTime.now - 10.years, updated_at: DateTime.now - 1.week, publish: :all)}
it 'displays dance columns' do
dance
visit '/'
expect(page).to have_link(dance.title, href: dance_path(dance))
expect(page).to have_link(dance.choreographer.name, href: choreographer_path(dance.choreographer))
expect(page).to have_text(dance.start_type)
expect(page).to have_text(dance.hook)
expect(page).to have_link(dance.user.name, href: user_path(dance.user))
expect(page).to have_text(dance.created_at.strftime('%Y-%m-%d'))
expect(page).to_not have_text(dance.updated_at.strftime('%Y-%m-%d')) # column invisible by default, it's not hidden, it's simply not there
expect(page).to_not have_css('th', text: 'Sharing') # column invisible by default, it's not hidden, it's simply not there
expect(page).to_not have_css('td', text: 'whole dance') # column invisible by default, it's not hidden, it's simply not there
end
it 'displays in descending created_at order by default' do
dance1 = FactoryGirl.create(:box_the_gnat_contra, title: "The Middle Dance", created_at: DateTime.now - 1.minute)
dance2 = FactoryGirl.create(:box_the_gnat_contra, title: "The First Dance")
dance3 = FactoryGirl.create(:box_the_gnat_contra, title: "The Last Dance", created_at: DateTime.now - 2.minutes)
visit '/'
expect(page).to have_content(dance1.title) # js wait
txt = page.text
# check order dance2 dance1 dance3
expect((/#{dance2.title}/ =~ txt) < (/#{dance1.title}/ =~ txt)).to eq(true)
expect((/#{dance1.title}/ =~ txt) < (/#{dance3.title}/ =~ txt)).to eq(true)
end
it 'shows only dances visible to current user' do
with_login do |user|
dance2 = FactoryGirl.create(:box_the_gnat_contra, title: "this dance should be visible", publish: :off, user: user)
dance3 = FactoryGirl.create(:box_the_gnat_contra, title: "this dance should be invisible", publish: :sketchbook)
visit '/'
expect(page).to have_content(dance2.title)
expect(page).to_not have_content(dance3.title)
end
end
it 'figure filter is initially just one figure set to wildcard' do
visit '/'
expect(page).to have_css("#figure-filter-root>.figure-filter-op")
expect(find("#figure-filter-root>.figure-filter-op").value).to eq('figure')
expect(page).to have_css("#figure-filter-root>.figure-filter-move")
expect(find("#figure-filter-root>.figure-filter-move").value).to eq('*')
end
it "changing figure filter from 'figure' to 'and' installs two subfilters" do
visit '/'
select('and')
expect(page).to have_css('.figure-filter', count: 3)
expect(page).to have_css('.figure-filter-move', count: 2)
end
it "searches for the problematicly named figure \"Rory O'More\" work" do
rory = FactoryGirl.create(:dance_with_a_rory_o_more)
box = FactoryGirl.create(:box_the_gnat_contra)
visit '/'
select "Rory O'More"
expect(page).to_not have_content(box.title) # js wait
expect(page).to have_content(rory.title)
expect(rory.title).to eq("Just Rory")
end
it "'not' filter works" do
dance
only_a_swing = FactoryGirl.create(:dance_with_a_swing)
with_retries do
visit '/'
expect(page).to have_text(only_a_swing.title)
expect(page).to have_text(dance.title)
select('not')
select('swing', match: :first)
expect(page).to_not have_text(only_a_swing.title)
expect(page).to have_text(dance.title) # because it has a figure that's not a swing
end
end
it "'&' and 'progression' filters work" do
dances
with_retries do
visit '/'
select('&')
select('slide along set', match: :first)
all('.figure-filter-op').last.select('progression')
expect(page).to have_text('The Rendevouz')
expect(page).to_not have_text('Box the Gnat Contra')
expect(page).to_not have_text('Call Me')
expect(page).to have_text('The Rendevouz')
end
end
it "'formation' filters work" do
becket = FactoryGirl.create(:call_me, start_type: 'Becket', title: 'Becket')
square = FactoryGirl.create(:dance, start_type: 'square dance', title: 'square')
dances2 = dances + [becket, square]
visit '/'
select 'formation'
select 'improper'
expect(Set.new(dances2.map(&:start_type))).to eq(Set['improper', 'Becket ccw', 'Becket', 'square dance'])
expect(page).to_not have_text('Call Me')
expect(page).to_not have_css('#dances-table', text: 'Becket')
expect(page).to_not have_css('#dances-table', text: 'sqaure')
dances2.each do |dance|
expect(page).to have_link(dance.title) if dance.start_type == 'improper'
end
select 'Becket *'
expect(page).to have_link(becket.title)
expect(page).to have_link('Call Me')
dances2.each do |dance|
if dance.title.in?([becket.title, 'Call Me'])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'Becket cw'
expect(page).to_not have_link('Call Me')
expect(page).to have_link(becket.title)
dances2.each do |dance|
if dance.title.in?([becket.title])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'Becket ccw'
expect(page).to_not have_link(becket.title)
expect(page).to have_link('Call Me')
dances2.each do |dance|
if dance.title.in?(['Call Me'])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
select 'everything else'
expect(page).to_not have_link('Call Me')
expect(page).to have_link(square.title)
dances2.each do |dance|
if dance.title.in?([square.title])
expect(page).to have_link(dance.title)
else
expect(page).to_not have_link(dance.title)
end
end
end
describe 'figure filter machinantions' do
def setup_and_filter
dances
visit '/'
# get down to (and (filter '*')):
select('and')
all('.figure-filter-remove').last.click
end
it 'the precondition of all these other tests is fulfilled' do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css("#figure-filter-root>.figure-filter-op") # js wait for...
expect(find("#figure-filter-root>.figure-filter-op").value).to eq('and')
expect(find("#figure-filter-root>.figure-filter .figure-filter-op").value).to eq('figure')
expect(find("#figure-filter-root>.figure-filter .figure-filter-move").value).to eq('*')
expect(page).to have_content('Call Me')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to have_content('The Rendevouz')
end
end
it "changing move changes values" do
with_retries do
setup_and_filter
select('circle')
expect(page).to have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
end
end
it "clicking 'add and' inserts a figure filter that responds to change events" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
click_button('add and')
expect(page).to have_css('.figure-filter', count: 3)
expect(page).to have_css('.figure-filter-move', count: 2)
all('.figure-filter-move').first.select('chain')
all('.figure-filter-move').last.select('circle')
expect(page).to have_content('Call Me')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
end
end
it "changing from 'and' to 'figure' purges subfilters and installs a new working move select" do
with_retries do
setup_and_filter
select('circle') # rendevous and call me
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
first('.figure-filter-op').select('figure')
select('chain')
expect(page).to have_css('.figure-filter', count: 1)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
end
end
it "changing from 'and' to 'number of' installs a new working number filter" do
with_retries do
setup_and_filter
select('circle') # rendevous and call me
first('.figure-filter-op').select('number of')
expect(page).to have_css('.figure-filter', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
select('0')
select('>')
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
expect(page).to have_content('The Rendevouz')
select('≥')
select('2')
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('The Rendevouz')
end
end
it "change from an empty 'or' to 'no'" do
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or');
expect(page).to have_css('.figure-filter-op', count: 4)
expect(page).to have_css('.figure-filter', count: 4)
expect(page).to have_css('.figure-filter-add', count: 2)
expect(page).to have_css('.figure-filter-move', count: 2)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 3) # css wait
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 2) # css wait
all('.figure-filter-op').last.select('no');
expect(page).to have_css('.figure-filter', count: 3) # <- the main point here
end
end
it "change from binary 'and' to 'no'" do
with_retries do
setup_and_filter
click_button('add and')
all('.figure-filter-move').first.select('chain')
all('.figure-filter-move').last.select('circle')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
all('.figure-filter-op').first.select('no'); # have no chain
expect(page).to have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
end
end
it "change from 'figure' to 'no'" do
with_retries do
setup_and_filter
# now we're ['and', ['figure', '*']]
first('.figure-filter-op').select('figure')
# now we're just ['figure', '*']
select('no')
# now we're ['no', ['figure', '*']]
select('circle')
# now we're ['no', ['figure', 'circle']]
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
end
end
it "change from 'figure' to 'or'" do
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or')
expect(page).to have_css('.figure-filter', count: 4)
expect(page).to have_css('.figure-filter-add', count: 2)
expect(find("#figure-query-buffer", visible: false).value).to eq('["and",["or",["figure","*"],["figure","*"]]]')
end
end
it "it adds/removes 'add' button depending on arity of the filter, and 'add' button works" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter-add', count: 1)
all('.figure-filter-op').first.select('no')
expect(page).to have_css('.figure-filter-add', count: 0)
all('.figure-filter-op').first.select('and')
expect(page).to have_css('.figure-filter-add', count: 1)
expect(page).to have_css('.figure-filter', count: 3)
click_button('add and')
expect(page).to have_css('.figure-filter', count: 4)
end
end
describe 'filter remove button' do
it "root filter does not have a remove button" do
with_retries do
setup_and_filter
expect(page).to_not have_css('#figure-filter-root > button.figure-filter-remove')
end
end
it "initial subfilter has a working remove button" do
with_retries do
setup_and_filter
expect(page).to have_css('.figure-filter > button.figure-filter-remove', count: 1)
find('.figure-filter-remove').click
expect(page).to have_css('.figure-filter', count: 1)
end
end
it "another subfilter has a working remove button" do
with_retries do
setup_and_filter
select('circle')
click_button('add and') # adds a '*'
expect(page).to have_css('.figure-filter > button.figure-filter-remove', count: 2)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter', count: 2)
expect(find("#figure-query-buffer", visible: false).value).to eq('["and",["figure","circle"]]')
end
end
it "changing my op still allows my remove button" do # this was a bug at one point
with_retries do
setup_and_filter
all('.figure-filter-op').last.select('or')
expect(page).to have_css('#figure-filter-root > .figure-filter > .figure-filter-remove')
expect(page).to have_css('.figure-filter-remove', count: 3)
end
end
it "changing my op removes illegal remove buttons among my children, and adds them back in when they are legal" do
with_retries do
setup_and_filter
first('.figure-filter-op').select('no')
# [no, [figure *]]
expect(page).to_not have_css('.figure-filter-remove') # remove illegal X buttons
first('.figure-filter-op').select('and')
expect(page).to have_css('#figure-filter-root > .figure-filter > .figure-filter-remove') # re-add X button
expect(page).to have_css('.figure-filter-remove', count: 2)
end
end
end
describe 'figure query sentence' do
# "figure: chain" => "dances with a chain"
# "no (figure: chain)" => "dances with no chain"
# "no ((figure: chain) or (figure: hey))" => "dances with no chain or hey"
it 'works with precondition' do
with_retries do
setup_and_filter
expect(page).to have_content('dances with any figure')
end
end
end
end
describe 'figure ... button' do
it "is visible initially, when figure is 'any figure'" do
visit '/'
expect(page).to have_button('...')
end
it 'changing figure filter hides this one but creates two more' do
visit '/'
select('then')
expect(page).to have_button('...', count: 2)
end
it "clicking '...' toggles 'ellipsis-expanded' class" do
visit '/'
select('chain')
expect(page).to_not have_css('.figure-filter-ellipsis.ellipsis-expanded')
click_button '...'
expect(page).to have_css('.figure-filter-ellipsis.ellipsis-expanded')
click_button '...'
expect(page).to_not have_css('.figure-filter-ellipsis.ellipsis-expanded')
end
context 'accordion' do
it 'lurks invisibly' do
visit '/'
expect(page).to_not have_css('.figure-filter-accordion')
expect(page).to have_css('.figure-filter-accordion', visible: false)
end
it 'pops forth when clicked' do
visit '/'
select('chain')
expect(page).to have_css('.figure-filter-accordion', visible: false)
click_button('...')
expect(page).to have_css('.figure-filter-accordion')
end
it "circle 4 places finds only 'The Rendevouz'" do
with_retries do
visit '/'
dances
select('circle')
click_button('...')
select('4 places')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to_not have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","*","360","*"]')
end
end
it "circle right finds only a dance with circle right" do
with_retries do
visit '/'
right = FactoryGirl.create(:dance_with_a_circle_right)
dances
select('circle')
click_button('...')
choose('right')
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to_not have_content('Call Me') # has circle left 3 places
expect(page).to have_content(right.title) # has circle right
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","false","*","*"]')
end
end
it "A slightly different query is sent if the ... is clicked" do
dances
with_retries do
visit '/'
select('circle')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle"]') # first query
click_button('...')
expect(page).to have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to_not have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","circle","*","*","*"]') # second query
end
end
it 'circle has an angle select box with the right options' do
with_retries do
visit '/'
select('circle')
click_button('...')
angles = JSLibFigure.angles_for_move('circle')
too_small_angle = angles.min - 90
too_big_angle = angles.max + 90
expect(page).to_not have_css("#figure-filter-root select option", text: JSLibFigure.degrees_to_words(too_small_angle, 'circle'))
expect(page).to_not have_css("#figure-filter-root select option", text: JSLibFigure.degrees_to_words(too_big_angle, 'circle'))
angles.each do |angle|
expect(page).to have_css("#figure-filter-root select option[value='#{angle}']", text: JSLibFigure.degrees_to_words(angle, 'circle'))
end
expect(page).to have_css("#figure-filter-root .figure-filter-accordion select option[value='*']")
end
end
it "swing for 8 doesn't find 'The Rendevouz', which features only long swings" do
dances
with_retries do
visit '/'
select('swing', match: :first)
click_button('...')
select('8', match: :prefer_exact) # '8' is in the menu twice, and also in 'figure 8'
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","swing","*","*","8"]')
end
end
it "non-balance & swing doesn't find 'The Rendevouz', which features only balance & swings" do
dances
with_retries do
visit '/'
select('swing', match: :first)
click_button('...')
choose('none')
expect(page).to_not have_content('The Rendevouz') # has circle left 3 & 4 places
expect(page).to have_content('Box the Gnat Contra') # no circles
expect(page).to have_content('Call Me') # has circle left 3 places
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","swing","*","none","*"]')
end
end
it 'labels appear on chooser elements' do
visit '/'
with_retries do
click_button('...')
select('swing', match: :first) # swing uses simple label system
expect(page).to have_css('.chooser-label-text', text: 'who')
expect(page).to have_css('.chooser-label-text', text: 'prefix')
expect(page).to have_css('.chooser-label-text', text: 'beats')
select('allemande orbit') # allemande orbit uses fancier label system
expect(page).to have_css('.chooser-label-text', text: 'who')
expect(page).to have_css('.chooser-label-text', text: 'allemande')
expect(page).to have_css('.chooser-label-text', text: 'inner')
expect(page).to have_css('.chooser-label-text', text: 'outer')
expect(page).to have_css('.chooser-label-text', text: 'for')
end
end
it "allemande with ladles finds only 'Box the Gnat'" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
select('ladles')
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to_not have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","ladles","*","*","*"]')
end
end
it "allemande has the right dancer chooser menu entries" do
dances
with_retries do
visit '/'
select('allemande')
click_button('...')
expect(page).to have_css("option[value='ladles']")
expect(page).to have_css("option[value='gentlespoons']")
expect(page).to have_css("option[value='neighbors']")
expect(page).to have_css("option[value='partners']")
expect(page).to have_css("option[value='same roles']")
expect(page).to have_css("option[value='shadows']")
expect(page).to have_css("option[value='ones']")
expect(page).to have_css("option[value='twos']")
expect(page).to have_css("option[value='first corners']")
expect(page).to have_css("option[value='second corners']")
expect(page).to have_css("option[value='shadows']")
expect(page).to_not have_css("option[value='prev neighbors']")
expect(page).to_not have_css("option[value='next neighbors']")
expect(page).to_not have_css("option[value='2nd neighbors']")
expect(page).to_not have_css("option[value='3rd neighbors']")
expect(page).to_not have_css("option[value='4th neighbors']")
expect(page).to_not have_css("option[value='1st shadows']")
expect(page).to_not have_css("option[value='2nd shadows']")
end
end
it "allemande with allemande left finds only 'Just Allemande'" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
choose('left', exact: true)
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","*","false","*","*"]')
end
end
it "allemande once around works" do
dances
with_retries do
visit '/'
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
select('allemande')
click_button('...')
select('once')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","*","*","360","*"]')
end
end
it "text input keywords work" do
dances
with_retries do
visit '/'
apple = FactoryGirl.create(:dance_with_a_custom, custom_text: 'apple', title: 'just apple')
banana = FactoryGirl.create(:dance_with_a_custom, custom_text: 'banana', title: 'just banana')
orange = FactoryGirl.create(:dance_with_a_custom, custom_text: 'orange', title: 'just orange')
apple_banana = FactoryGirl.create(:dance_with_a_custom, custom_text: 'apple banana', title: 'apple_banana')
select('custom')
click_button('...')
find(:css, "input.chooser-argument[type=string]").set('apple orange')
blur
dances.each do |dance|
expect(page).to_not have_content(dance.title)
end
expect(page).to have_content(apple.title)
expect(page).to_not have_content(banana.title)
expect(page).to have_content(orange.title)
expect(page).to have_content(apple_banana.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","custom","apple orange","*"]')
end
end
it "wrist grip filter works" do
dances
with_retries do
visit '/'
grip = FactoryGirl.create(:dance_with_a_wrist_grip_star)
select('star')
click_button('...')
select('unspecified')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to have_content('Call Me')
expect(page).to_not have_content(grip.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","star","*","*","","*"]')
end
end
it "facing filter works" do
fb = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'forward then backward')
f = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'forward')
b = FactoryGirl.create(:dance_with_a_down_the_hall, march_facing: 'backward')
with_retries do
visit '/'
select('down the hall')
click_button('...')
select('forward then backward')
expect(page).to_not have_content(f.title)
expect(page).to_not have_content(b.title)
expect(page).to have_content(fb.title)
end
end
it "down the hall ender filter works" do
ta = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'turn-alone', title: 'dth_alone')
tc = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'turn-couple', title: 'dth_couples')
circle = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: 'circle', title: 'dth_circle')
unspec = FactoryGirl.create(:dance_with_a_down_the_hall, down_the_hall_ender: '', title: 'dth_unspec')
with_retries(15) do
visit '/'
select('down the hall')
click_button('...')
select('turn as a couple')
# select('turn alone') # hard because multiple
select('bend into a ring')
expect(page).to_not have_content(tc.title)
expect(page).to_not have_content(unspec.title)
expect(page).to_not have_content(ta.title)
expect(page).to have_content(circle.title)
end
end
it "half_or_full filter works" do
poussette = FactoryGirl.create(:dance_with_a_full_poussette)
dances
with_retries do
visit '/'
select('poussette')
click_button('...')
choose('full')
dances.each do |dance|
expect(page).to_not have_content(dance.title)
end
expect(page).to have_content(poussette.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","poussette","1","*","*","*","*"]')
end
end
it "hey_length filter works" do
hey_dances = %w(ladles%%1 half ladles%%2 full).map {|hey_length| FactoryGirl.create(:dance_with_a_hey, hey_length: hey_length)}
hey_lengths = ['less than half',
'half',
'between half and full',
'full']
with_retries do
visit '/'
select('hey')
click_button('...')
hey_dances.each_with_index do |dance, i|
hey_length = hey_lengths[i]
select(hey_length)
hey_dances.each do |dance2|
if dance == dance2
expect(page).to have_content(dance2.title)
else
expect(page).to_not have_content(dance2.title)
end
end
expect(find("#figure-query-buffer", visible: false).value).to eq(%{["figure","hey","*","*","*",#{hey_length.inspect},"*","*","*","*","*","*"]})
end
end
end
it 'aliases are subsets' do
do_si_do = FactoryGirl.create(:dance_with_a_do_si_do)
see_saw = FactoryGirl.create(:dance_with_a_see_saw)
with_retries do
visit '/'
select('see saw')
expect(page).to have_content(see_saw.title)
expect(page).to_not have_content(do_si_do.title)
click_button('...')
choose('*')
expect(page).to have_content(do_si_do.title)
expect(page).to have_content(see_saw.title)
expect(page).to have_content("Showing dances with a * do si do *")
expect(find(".figure-filter-move").value).to eq('do si do')
choose('left')
expect(page).to have_content(see_saw.title)
expect(page).to_not have_content(do_si_do.title)
expect(find(".figure-filter-move").value).to eq('see saw')
expect(page).to have_content("Showing dances with a * see saw *")
end
end
end
end
describe 'dialect' do
it 'figure filter move' do
expect_any_instance_of(WelcomeController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
with_retries do
visit '/'
dances
expect(page).to_not have_css('option', text: exactly('allemande'))
expect(page).to have_css('option[value=allemande]', text: exactly('almond'))
expect(page).to_not have_css('option', text: exactly('gyre'))
expect(page).to have_css('option[value=gyre]', text: exactly('darcy'))
select('almond')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Call Me')
end
end
it 'figure filter dancers' do
expect_any_instance_of(WelcomeController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
with_retries do
visit '/'
dances
allemande = FactoryGirl.create(:dance_with_a_gentlespoons_allemande_left_once)
expect(page).to_not have_content('Processing...')
select('almond')
click_button('...')
select('ravens')
expect(page).to_not have_content('The Rendevouz')
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to_not have_content(allemande.title)
expect(find("#figure-query-buffer", visible: false).value).to eq('["figure","allemande","ladles","*","*","*"]')
end
end
it "displays dialect-transformed hooks" do
expect_any_instance_of(DancesController).to receive(:dialect).and_return(JSLibFigure.test_dialect)
dance2 = FactoryGirl.create(:dance, hook: 'hook allemande ladles hook')
visit '/'
expect(page).to have_content('hook almond ravens hook')
expect(page).to_not have_content(dance2.hook)
end
end
xdescribe 'back button' do
# these tests are xited because they found a real bug on chrome
# that I can't fix right now. https://github.com/contradb/contra/issues/611
it 'works' do
dances
visit '/'
select('and')
expect(page).to have_css('.figure-filter-move', count: 2) # js wait
all('.figure-filter-move').first.select('swing', match: :first)
all('.figure-filter-move').last.select('allemande')
all('.figure-filter-ellipsis').last.click
select('ladles') # ladles allemande right 1½ for '*' beats
choose('right')
select('1½')
click_button('add and')
expect(page).to have_css('.figure-filter-op', count: 4) # js wait
all('.figure-filter-op').last.select('formation')
select('improper')
click_link('Box the Gnat Contra')
expect(page).to have_content('partners swing') # wait for page to load
page.go_back
move_selector = '.figure-filter-move'
expect(page).to have_css(move_selector, count: 2)
expect(all(move_selector).first.value).to eq('swing')
expect(all(move_selector).last.value).to eq('allemande')
expect(page).to_not have_content('Processing...')
expect(page).to have_css('.figure-filter-ellipsis.ellipsis-expanded', count: 1)
expect(page).to have_css('.figure-filter-accordion', count: 1, visible: true)
expect(page).to have_css('.chooser-argument', count: 4)
expect(all(".chooser-argument")[0].value).to eq('ladles')
expect(find(".chooser-argument [type=radio][value='*']")).to_not be_checked
expect(find(".chooser-argument [type=radio][value='true']")).to be_checked
expect(find(".chooser-argument [type=radio][value='false']")).to_not be_checked
expect(all(".chooser-argument")[2].value.to_s).to eq(540.to_s)
expect(all(".chooser-argument")[3].value).to eq('*')
expect(page).to have_css('.figure-filter-formation', count: 1)
op_values = find_all('.figure-filter-op').map(&:value)
expect(op_values.count('and')).to eq(1)
expect(op_values.count('figure')).to eq(2)
expect(op_values.count('formation')).to eq(1)
expect(page).to have_content('Box the Gnat Contra')
expect(page).to_not have_content('The Rendevouz')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('Showing dances with a swing and a ladles allemande right 1½ and an improper formation.')
end
it 'all filters at least load when back-ed' do
dances
visit '/'
filters = page.find('.figure-filter-op').all('option').map {|elt| elt['innerHTML'].gsub('&', '&') }
filters.each do |filter|
page.select(filter, match: :first)
expect(page).to have_text(/Showing \d+ to \d+ of \d+ entries/)
page.refresh
# when filters are broken on reload, it has an empty table and doesn't show this text:
expect(page).to have_text(/Showing \d+ to \d+ of \d+ entries/)
end
end
it 'number of' do
dances
visit '/'
select('number of')
select('contra corners') # clear out search results
expect(page).to_not have_content('Processing')
expect(page).to have_css('.figure-filter-op', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
select('>')
select('1')
select('circle')
# these are flaky and not really important to the back button testing:
# expect(page).to_not have_content('Processing')
# expect(page).to_not have_content('Box the Gnat Contra')
# expect(page).to_not have_content('Call Me')
# expect(page).to have_content('The Rendevouz')
click_link('The Rendevouz')
expect(page).to have_css('h1', text: 'The Rendevouz') # wait for page to load
page.go_back
expect(page).to have_css('.figure-filter-op', count: 2)
expect(page).to have_css('.figure-filter-move', count: 1)
expect(page).to have_css('.figure-filter-count-comparison', count: 1)
expect(page).to have_css('.figure-filter-count-number', count: 1)
expect(page).to_not have_content('Processing')
expect(page).to_not have_content('Box the Gnat Contra')
expect(page).to_not have_content('Call Me')
expect(page).to have_content('The Rendevouz')
expect(find('.figure-filter-count-comparison').value).to eq('>')
expect(find('.figure-filter-count-number').value).to eq('1')
expect(find('.figure-filter-move').value).to eq('circle')
end
it 'decorates subfilters with [x] buttons, conjunctions, and "add or"' do
dances
visit '/'
select('or')
click_link(dances.first.title)
expect(page).to have_css('h1', text: dances.first.title)
page.go_back
expect(page).to have_css('.figure-filter[data-op=or]', count: 2)
expect(page).to have_css('.figure-filter-remove', count: 2)
expect(page).to have_css('button.figure-filter-add', text: 'add or')
click_button('add or')
expect(page).to have_css('.figure-filter-remove', count: 3)
all('.figure-filter-remove').last.click
expect(page).to have_css('.figure-filter-remove', count: 2)
end
end
describe 'columns' do
it "Clicking vis toggles buttons cause columns to disappear" do
dances
visit '/'
%w[Title Choreographer Formation Hook User Entered].each do |col|
expect(page).to have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
click_button col
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
click_button col
expect(page).to have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
end
%w[Updated Sharing Figures].each do |col|
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
click_button col
expect(page).to have_css('#dances-table th', text: col)
expect(page).to_not have_css('button.toggle-vis-inactive', text: col)
expect(page).to have_css('button.toggle-vis-active', text: col)
click_button col
expect(page).to_not have_css('#dances-table th', text: col)
expect(page).to have_css('button.toggle-vis-inactive', text: col)
expect(page).to_not have_css('button.toggle-vis-active', text: col)
end
end
include DancesHelper
it 'published column cells' do
with_login do |user|
dances.each_with_index do |dance, i|
publish = [:off, :sketchbook, :all][i]
publish_string = dance_publish_word(publish)
dance.update!(publish: publish, user: user)
visit '/'
click_button 'Sharing'
expect(page).to have_css('tr', text: /#{dance.title}.*#{publish_string}/)
end
end
end
it 'figures column' do
dances
visit '/'
expect(page).to_not have_content('whole dance')
click_button 'Figures'
expect(page).to have_content('whole dance', count: 3)
select('circle')
expect(page).to have_css('tr', text: /The Rendevouz.*circle left 4 places\ncircle left 3 places/)
expect(page).to have_css('tr', text: /Call Me.*circle left 3 places/)
end
end
end
private
def exactly(string)
/\A#{string}\z/
end
end
|
require "spec_helper"
describe FlatironLabGenerator::TemplateMaker do
let(:lab_generator_object) {FlatironLabGenerator::TemplateMaker.new("fundamental-ruby","test_lab","git_repo")}
describe "#git_init" do
it "initializes a new local git repo" do
lab_generator_object.git_init
expect(system('git rev-parse')).to eq(true)
end
end
describe "::run" do
let(:lab_name) {"test_lab"}
it "creates a lab template with expected name and returns nil" do
expect(FlatironLabGenerator::TemplateMaker.run("fundamental-ruby",lab_name, "git repo")).to be_nil
expect(File.directory?(lab_name)).to eq(true)
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n2 directories, 5 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a command-line lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("command-line",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── bin\n│ └── runner.rb\n├── lib\n│ ├── environment.rb\n│ └── test_lab\n│ └── cli.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 7 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a SQL lab" do
expect(FlatironLabGenerator::TemplateMaker.run("sql",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── bin\n│ ├── environment.rb\n│ ├── run.rb\n│ └── sql_runner.rb\n├── lib\n│ └── sample.sql\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n3 directories, 8 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates an activerecord lab" do
expect(FlatironLabGenerator::TemplateMaker.run("activerecord",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app\n│ └── models\n│ └── sample-class.rb\n├── config\n│ └── environment.rb\n├── db\n│ └── migrations\n├── lib\n│ └── support\n│ ├── connection_adapter.rb\n│ └── db_registry.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n8 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a rake lab" do
expect(FlatironLabGenerator::TemplateMaker.run("rake",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── bin\n│ └── console\n├── config\n│ └── environment.rb\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── rakefile_spec.rb\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a erb-static-site lab" do
expect(FlatironLabGenerator::TemplateMaker.run("erb-static-site",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── bin\n│ └── console\n├── config\n│ └── environment.rb\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── rakefile_spec.rb\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a rack lab" do
expect(FlatironLabGenerator::TemplateMaker.run("rack",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── app\n│ ├── application.rb\n│ └── controllers\n│ └── erb_maker.rb\n├── config\n│ └── environment.rb\n├── config.ru\n├── lib\n│ └── templates\n│ └── my_cool_response.html.erb\n└── spec\n ├── controllers\n │ └── 01_server_port_spec.rb\n ├── spec_helper.rb\n ├── support\n │ └── an_ok_request.rb\n └── test_lab_spec.rb\n\n8 directories, 11 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a sinatra-classic lab" do
expect(FlatironLabGenerator::TemplateMaker.run("sinatra-classic",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app.rb\n├── config\n│ └── environment.rb\n├── config.ru\n├── models\n├── public\n│ ├── images\n│ ├── javascripts\n│ └── stylesheets\n├── spec\n│ ├── spec_helper.rb\n│ └── test_lab_spec.rb\n└── views\n\n8 directories, 8 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a sinatra-mvc lab" do
expect(FlatironLabGenerator::TemplateMaker.run("sinatra-mvc",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app\n│ ├── controllers\n│ │ └── application_controller.rb\n│ ├── models\n│ └── views\n│ └── layout.erb\n├── config\n│ └── environment.rb\n├── config.ru\n├── db\n│ ├── migrate\n│ └── seeds.rb\n├── public\n│ └── stylesheets\n└── spec\n ├── controllers\n ├── features\n ├── models\n └── spec_helper.rb\n\n13 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
end
describe "#create" do
it "does something" do
expect(true).to eq(true)
end
end
end
js lab template is now tested
require "spec_helper"
describe FlatironLabGenerator::TemplateMaker do
let(:lab_generator_object) {FlatironLabGenerator::TemplateMaker.new("fundamental-ruby","test_lab template","git_repo")}
describe "#git_init" do
it "initializes a new local git repo" do
lab_generator_object.git_init
expect(system('git rev-parse')).to eq(true)
end
end
describe "::run" do
let(:lab_name) {"test_lab"}
it "creates a lab template with expected name and returns nil" do
expect(FlatironLabGenerator::TemplateMaker.run("fundamental-ruby",lab_name, "git repo")).to be_nil
expect(File.directory?(lab_name)).to eq(true)
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n2 directories, 5 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a command-line lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("command-line",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── bin\n│ └── runner.rb\n├── lib\n│ ├── environment.rb\n│ └── test_lab\n│ └── cli.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 7 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a SQL lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("sql",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── bin\n│ ├── environment.rb\n│ ├── run.rb\n│ └── sql_runner.rb\n├── lib\n│ └── sample.sql\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n3 directories, 8 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates an activerecord lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("activerecord",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app\n│ └── models\n│ └── sample-class.rb\n├── config\n│ └── environment.rb\n├── db\n│ └── migrations\n├── lib\n│ └── support\n│ ├── connection_adapter.rb\n│ └── db_registry.rb\n└── spec\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n8 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a rake lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("rake",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── bin\n│ └── console\n├── config\n│ └── environment.rb\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── rakefile_spec.rb\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a erb-static-site lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("erb-static-site",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── bin\n│ └── console\n├── config\n│ └── environment.rb\n├── lib\n│ └── test_lab.rb\n└── spec\n ├── rakefile_spec.rb\n ├── spec_helper.rb\n └── test_lab_spec.rb\n\n4 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a rack lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("rack",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── app\n│ ├── application.rb\n│ └── controllers\n│ └── erb_maker.rb\n├── config\n│ └── environment.rb\n├── config.ru\n├── lib\n│ └── templates\n│ └── my_cool_response.html.erb\n└── spec\n ├── controllers\n │ └── 01_server_port_spec.rb\n ├── spec_helper.rb\n ├── support\n │ └── an_ok_request.rb\n └── test_lab_spec.rb\n\n8 directories, 11 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a sinatra-classic lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("sinatra-classic",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app.rb\n├── config\n│ └── environment.rb\n├── config.ru\n├── models\n├── public\n│ ├── images\n│ ├── javascripts\n│ └── stylesheets\n├── spec\n│ ├── spec_helper.rb\n│ └── test_lab_spec.rb\n└── views\n\n8 directories, 8 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a sinatra-mvc lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("sinatra-mvc",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── Gemfile\n├── README.md\n├── Rakefile\n├── app\n│ ├── controllers\n│ │ └── application_controller.rb\n│ ├── models\n│ └── views\n│ └── layout.erb\n├── config\n│ └── environment.rb\n├── config.ru\n├── db\n│ ├── migrate\n│ └── seeds.rb\n├── public\n│ └── stylesheets\n└── spec\n ├── controllers\n ├── features\n ├── models\n └── spec_helper.rb\n\n13 directories, 9 files\n")
FileUtils.rm_rf(lab_name)
end
it "creates a js lab template" do
expect(FlatironLabGenerator::TemplateMaker.run("js",lab_name, "git repo")).to be_nil
expect(`tree #{lab_name}`).to eq("test_lab\n├── README.md\n├── css\n├── images\n├── index.html\n├── js\n│ ├── jquery-1.8.3.min.js\n│ └── test_lab.js\n├── requires.yml\n└── spec\n\n4 directories, 5 files\n" )
FileUtils.rm_rf(lab_name)
end
end
describe "#create" do
it "does something" do
expect(true).to eq(true)
end
end
end |
# encoding: utf-8
require 'spec_helper'
describe Github::Users::Followers do
end # Github::Users::Followers
Drop old followers tests.
|
node.reverse_merge!({
message: "Hello, Itamae"
})
execute 'apt-get update'
execute 'deluser --remove-home itamae2' do
only_if "id itamae2"
end
include_recipe "./included.rb"
include_recipe "./included.rb" # including the same recipe is expected to be skipped.
user "create itamae user" do
uid 123
username "itamae"
password "$1$ltOY8bZv$iZ57f1KAp8jwKViNm3pze."
home '/home/foo'
shell '/bin/sh'
end
user "update itamae user" do
uid 1234
username "itamae"
password "$1$TQz9gPMl$nHYrsA5W2ZdZ0Yn021BQH1"
home '/home/itamae'
shell '/bin/dash'
end
directory "/home/itamae" do
mode "755"
owner "itamae"
group "itamae"
end
user "create itamae2 user with create home directory" do
username "itamae2"
create_home true
home "/home/itamae2"
shell "/bin/sh"
end
######
package 'dstat' do
action :install
end
package 'sl' do
version '3.03-17'
end
package 'resolvconf' do
action :remove
end
######
package "ruby"
gem_package 'tzinfo' do
version '1.1.0'
end
gem_package 'tzinfo' do
version '1.2.2'
end
gem_package 'bundler' do
options ['--no-ri', '--no-rdoc']
end
######
execute "echo -n > /tmp/notifies"
execute "echo -n 1 >> /tmp/notifies" do
action :nothing
end
execute "echo -n 2 >> /tmp/notifies" do
notifies :run, "execute[echo -n 1 >> /tmp/notifies]"
end
execute "echo -n 3 >> /tmp/notifies" do
action :nothing
end
execute "echo -n 4 >> /tmp/notifies" do
notifies :run, "execute[echo -n 3 >> /tmp/notifies]", :immediately
end
######
execute "echo -n > /tmp/subscribes"
execute "echo -n 1 >> /tmp/subscribes" do
action :nothing
subscribes :run, "execute[echo -n 2 >> /tmp/subscribes]"
end
execute "echo -n 2 >> /tmp/subscribes"
execute "echo -n 3 >> /tmp/subscribes" do
action :nothing
subscribes :run, "execute[echo -n 4 >> /tmp/subscribes]", :immediately
end
execute "echo -n 4 >> /tmp/subscribes"
######
remote_file "/tmp/remote_file" do
source "hello.txt"
end
remote_file "/tmp/remote_file_auto" do
source :auto
end
######
directory "/tmp/directory" do
mode "700"
owner "itamae"
group "itamae"
end
directory "/tmp/directory_never_exist1" do
action :create
end
directory "/tmp/directory_never_exist1" do
action :delete
end
template "/tmp/template" do
source "hello.erb"
variables goodbye: "Good bye"
end
template "/tmp/template_auto" do
source :auto
variables goodbye: "Good bye"
end
file "/tmp/file" do
content "Hello World"
mode "777"
end
execute "echo 'Hello Execute' > /tmp/execute"
file "/tmp/never_exist1" do
only_if "exit 1"
end
file "/tmp/never_exist2" do
not_if "exit 0"
end
######
http_request "/tmp/http_request.html" do
url "https://httpbin.org/get?from=itamae"
end
http_request "/tmp/http_request_delete.html" do
action :delete
url "https://httpbin.org/delete?from=itamae"
end
http_request "/tmp/http_request_post.html" do
action :post
message "love=sushi"
url "https://httpbin.org/post?from=itamae"
end
http_request "/tmp/http_request_put.html" do
action :put
message "love=sushi"
url "https://httpbin.org/put?from=itamae"
end
http_request "/tmp/http_request_headers.html" do
headers "User-Agent" => "Itamae"
url "https://httpbin.org/get"
end
http_request "/tmp/http_request_redirect.html" do
redirect_limit 1
url "https://httpbin.org/redirect-to?url=https%3A%2F%2Fhttpbin.org%2Fget%3Ffrom%3Ditamae"
end
######
service "cron" do
action :stop
end
execute "ps -C cron > /tmp/cron_stopped; true"
service "cron" do
action :start
end
execute "ps -C cron > /tmp/cron_running; true"
######
package "nginx" do
options "--force-yes"
end
service "nginx" do
action [:enable, :start]
end
execute "test -f /etc/rc3.d/S20nginx" # test
execute "test $(ps h -C nginx | wc -l) -gt 0" # test
service "nginx" do
action [:disable, :stop]
end
execute "test ! -f /etc/rc3.d/S20nginx" # test
execute "test $(ps h -C nginx | wc -l) -eq 0" # test
######
link "/tmp-link" do
to "/tmp"
end
execute "touch /tmp-link-force"
link "/tmp-link-force" do
to "/tmp"
force true
end
#####
local_ruby_block "greeting" do
block do
Itamae.logger.info "板前"
end
end
#####
package "git"
git "/tmp/git_repo" do
repository "https://github.com/ryotarai/infrataster.git"
revision "v0.1.0"
end
git "/tmp/git_repo_submodule" do
repository "https://github.com/mmasaki/fake_repo_including_submodule.git"
recursive true
end
#####
execute "echo -n \"$HOME\n$(pwd)\" > /tmp/created_by_itamae_user" do
user "itamae"
end
#####
execute "echo 'notify to resource in default2.rb'" do
notifies :create, "file[put file in default2.rb]"
end
#####
file "/tmp/never_exist3" do
action :create
end
file "/tmp/never_exist3" do
action :delete
end
#####
include_recipe "define/default.rb"
definition_example "name" do
key 'value'
end
#####
file "/tmp/never_exist4" do
action :nothing
end
file "/tmp/file1" do
content "Hello, World"
end
file "/tmp/file1" do
content "Hello, World"
notifies :create, "file[/tmp/never_exist4]"
end
#####
file "/tmp/file_create_without_content" do
content "Hello, World"
end
file "/tmp/file_create_without_content" do
owner "itamae"
group "itamae"
mode "600"
end
#####
execute 'true' do
verify 'true'
end
#####
execute 'echo 1 > /tmp/multi_delayed_notifies' do
notifies :run, "execute[echo 2 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 2 >> /tmp/multi_delayed_notifies' do
action :nothing
notifies :run, "execute[echo 3 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 3 >> /tmp/multi_delayed_notifies' do
action :nothing
notifies :run, "execute[echo 4 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 4 >> /tmp/multi_delayed_notifies' do
action :nothing
end
#####
execute 'echo 1 > /tmp/multi_immediately_notifies' do
notifies :run, "execute[echo 2 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 2 >> /tmp/multi_immediately_notifies' do
action :nothing
notifies :run, "execute[echo 3 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 3 >> /tmp/multi_immediately_notifies' do
action :nothing
notifies :run, "execute[echo 4 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 4 >> /tmp/multi_immediately_notifies' do
action :nothing
end
#####
execute 'echo -n 1 > /tmp/file_edit_notifies' do
action :nothing
end
file '/tmp/file_edit_sample' do
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_edit_sample' do
action :edit
owner 'itamae2'
group 'itamae2'
mode '400'
block do |content|
content.gsub!('world', 'Itamae')
end
notifies :run, "execute[echo -n 1 > /tmp/file_edit_notifies]"
end
file '/tmp/file_edit_keeping_mode_owner' do
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_edit_keeping_mode_owner' do
action :edit
block do |content|
content.gsub!('world', 'Itamae')
end
end
###
execute "touch -d 2016-05-02T01:23:45 /tmp/file_edit_with_content_change_updates_timestamp"
file "/tmp/file_edit_with_content_change_updates_timestamp" do
action :edit
block do |content|
content[0 .. -1] = "Hello, world"
end
end
###
execute "touch -d 2016-05-02T12:34:56 /tmp/file_edit_without_content_change_keeping_timestamp"
file "/tmp/file_edit_without_content_change_keeping_timestamp" do
action :edit
block do |content|
# no change
end
end
###
file '/tmp/file_without_content_change_updates_mode_and_owner' do
action :create
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_without_content_change_updates_mode_and_owner' do
action :create
content 'Hello, world' # no change
owner 'itamae2'
group 'itamae2'
mode '666'
end
###
execute "touch -d 2016-05-01T01:23:45 /tmp/file_with_content_change_updates_timestamp"
file "/tmp/file_with_content_change_updates_timestamp" do
content "Hello, world"
end
###
execute "echo 'Hello, world' > /tmp/file_without_content_change_keeping_timestamp ; touch -d 2016-05-01T12:34:56 /tmp/file_without_content_change_keeping_timestamp"
file "/tmp/file_without_content_change_keeping_timestamp" do
content "Hello, world\n"
end
###
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a recipe failed"
end
define :run_command_in_definition do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a definition failed"
end
end
execute "echo Hello" do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a resource failed"
end
end
local_ruby_block 'execute run_command' do
block do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in local_ruby_block failed"
end
end
end
###
v1 = node.memory.total
v2 = node[:memory][:total]
v3 = node['memory']['total']
unless v1 == v2 && v2 == v3 && v1 =~ /\A\d+kB\z/
raise "failed to fetch host inventory value (#{v1}, #{v2}, #{v3})"
end
Fix /tmp/file_edit_with_content_change_updates_timestamp recipe as /tmp/file_edit_sample using gsub!.
node.reverse_merge!({
message: "Hello, Itamae"
})
execute 'apt-get update'
execute 'deluser --remove-home itamae2' do
only_if "id itamae2"
end
include_recipe "./included.rb"
include_recipe "./included.rb" # including the same recipe is expected to be skipped.
user "create itamae user" do
uid 123
username "itamae"
password "$1$ltOY8bZv$iZ57f1KAp8jwKViNm3pze."
home '/home/foo'
shell '/bin/sh'
end
user "update itamae user" do
uid 1234
username "itamae"
password "$1$TQz9gPMl$nHYrsA5W2ZdZ0Yn021BQH1"
home '/home/itamae'
shell '/bin/dash'
end
directory "/home/itamae" do
mode "755"
owner "itamae"
group "itamae"
end
user "create itamae2 user with create home directory" do
username "itamae2"
create_home true
home "/home/itamae2"
shell "/bin/sh"
end
######
package 'dstat' do
action :install
end
package 'sl' do
version '3.03-17'
end
package 'resolvconf' do
action :remove
end
######
package "ruby"
gem_package 'tzinfo' do
version '1.1.0'
end
gem_package 'tzinfo' do
version '1.2.2'
end
gem_package 'bundler' do
options ['--no-ri', '--no-rdoc']
end
######
execute "echo -n > /tmp/notifies"
execute "echo -n 1 >> /tmp/notifies" do
action :nothing
end
execute "echo -n 2 >> /tmp/notifies" do
notifies :run, "execute[echo -n 1 >> /tmp/notifies]"
end
execute "echo -n 3 >> /tmp/notifies" do
action :nothing
end
execute "echo -n 4 >> /tmp/notifies" do
notifies :run, "execute[echo -n 3 >> /tmp/notifies]", :immediately
end
######
execute "echo -n > /tmp/subscribes"
execute "echo -n 1 >> /tmp/subscribes" do
action :nothing
subscribes :run, "execute[echo -n 2 >> /tmp/subscribes]"
end
execute "echo -n 2 >> /tmp/subscribes"
execute "echo -n 3 >> /tmp/subscribes" do
action :nothing
subscribes :run, "execute[echo -n 4 >> /tmp/subscribes]", :immediately
end
execute "echo -n 4 >> /tmp/subscribes"
######
remote_file "/tmp/remote_file" do
source "hello.txt"
end
remote_file "/tmp/remote_file_auto" do
source :auto
end
######
directory "/tmp/directory" do
mode "700"
owner "itamae"
group "itamae"
end
directory "/tmp/directory_never_exist1" do
action :create
end
directory "/tmp/directory_never_exist1" do
action :delete
end
template "/tmp/template" do
source "hello.erb"
variables goodbye: "Good bye"
end
template "/tmp/template_auto" do
source :auto
variables goodbye: "Good bye"
end
file "/tmp/file" do
content "Hello World"
mode "777"
end
execute "echo 'Hello Execute' > /tmp/execute"
file "/tmp/never_exist1" do
only_if "exit 1"
end
file "/tmp/never_exist2" do
not_if "exit 0"
end
######
http_request "/tmp/http_request.html" do
url "https://httpbin.org/get?from=itamae"
end
http_request "/tmp/http_request_delete.html" do
action :delete
url "https://httpbin.org/delete?from=itamae"
end
http_request "/tmp/http_request_post.html" do
action :post
message "love=sushi"
url "https://httpbin.org/post?from=itamae"
end
http_request "/tmp/http_request_put.html" do
action :put
message "love=sushi"
url "https://httpbin.org/put?from=itamae"
end
http_request "/tmp/http_request_headers.html" do
headers "User-Agent" => "Itamae"
url "https://httpbin.org/get"
end
http_request "/tmp/http_request_redirect.html" do
redirect_limit 1
url "https://httpbin.org/redirect-to?url=https%3A%2F%2Fhttpbin.org%2Fget%3Ffrom%3Ditamae"
end
######
service "cron" do
action :stop
end
execute "ps -C cron > /tmp/cron_stopped; true"
service "cron" do
action :start
end
execute "ps -C cron > /tmp/cron_running; true"
######
package "nginx" do
options "--force-yes"
end
service "nginx" do
action [:enable, :start]
end
execute "test -f /etc/rc3.d/S20nginx" # test
execute "test $(ps h -C nginx | wc -l) -gt 0" # test
service "nginx" do
action [:disable, :stop]
end
execute "test ! -f /etc/rc3.d/S20nginx" # test
execute "test $(ps h -C nginx | wc -l) -eq 0" # test
######
link "/tmp-link" do
to "/tmp"
end
execute "touch /tmp-link-force"
link "/tmp-link-force" do
to "/tmp"
force true
end
#####
local_ruby_block "greeting" do
block do
Itamae.logger.info "板前"
end
end
#####
package "git"
git "/tmp/git_repo" do
repository "https://github.com/ryotarai/infrataster.git"
revision "v0.1.0"
end
git "/tmp/git_repo_submodule" do
repository "https://github.com/mmasaki/fake_repo_including_submodule.git"
recursive true
end
#####
execute "echo -n \"$HOME\n$(pwd)\" > /tmp/created_by_itamae_user" do
user "itamae"
end
#####
execute "echo 'notify to resource in default2.rb'" do
notifies :create, "file[put file in default2.rb]"
end
#####
file "/tmp/never_exist3" do
action :create
end
file "/tmp/never_exist3" do
action :delete
end
#####
include_recipe "define/default.rb"
definition_example "name" do
key 'value'
end
#####
file "/tmp/never_exist4" do
action :nothing
end
file "/tmp/file1" do
content "Hello, World"
end
file "/tmp/file1" do
content "Hello, World"
notifies :create, "file[/tmp/never_exist4]"
end
#####
file "/tmp/file_create_without_content" do
content "Hello, World"
end
file "/tmp/file_create_without_content" do
owner "itamae"
group "itamae"
mode "600"
end
#####
execute 'true' do
verify 'true'
end
#####
execute 'echo 1 > /tmp/multi_delayed_notifies' do
notifies :run, "execute[echo 2 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 2 >> /tmp/multi_delayed_notifies' do
action :nothing
notifies :run, "execute[echo 3 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 3 >> /tmp/multi_delayed_notifies' do
action :nothing
notifies :run, "execute[echo 4 >> /tmp/multi_delayed_notifies]"
end
execute 'echo 4 >> /tmp/multi_delayed_notifies' do
action :nothing
end
#####
execute 'echo 1 > /tmp/multi_immediately_notifies' do
notifies :run, "execute[echo 2 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 2 >> /tmp/multi_immediately_notifies' do
action :nothing
notifies :run, "execute[echo 3 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 3 >> /tmp/multi_immediately_notifies' do
action :nothing
notifies :run, "execute[echo 4 >> /tmp/multi_immediately_notifies]", :immediately
end
execute 'echo 4 >> /tmp/multi_immediately_notifies' do
action :nothing
end
#####
execute 'echo -n 1 > /tmp/file_edit_notifies' do
action :nothing
end
file '/tmp/file_edit_sample' do
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_edit_sample' do
action :edit
owner 'itamae2'
group 'itamae2'
mode '400'
block do |content|
content.gsub!('world', 'Itamae')
end
notifies :run, "execute[echo -n 1 > /tmp/file_edit_notifies]"
end
file '/tmp/file_edit_keeping_mode_owner' do
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_edit_keeping_mode_owner' do
action :edit
block do |content|
content.gsub!('world', 'Itamae')
end
end
###
execute "f=/tmp/file_edit_with_content_change_updates_timestamp && echo 'Hello, world' > $f && touch -d 2016-05-02T01:23:45 $f"
file "/tmp/file_edit_with_content_change_updates_timestamp" do
action :edit
block do |content|
content.gsub!('world', 'Itamae')
end
end
###
execute "touch -d 2016-05-02T12:34:56 /tmp/file_edit_without_content_change_keeping_timestamp"
file "/tmp/file_edit_without_content_change_keeping_timestamp" do
action :edit
block do |content|
# no change
end
end
###
file '/tmp/file_without_content_change_updates_mode_and_owner' do
action :create
content 'Hello, world'
owner 'itamae'
group 'itamae'
mode '444'
end
file '/tmp/file_without_content_change_updates_mode_and_owner' do
action :create
content 'Hello, world' # no change
owner 'itamae2'
group 'itamae2'
mode '666'
end
###
execute "touch -d 2016-05-01T01:23:45 /tmp/file_with_content_change_updates_timestamp"
file "/tmp/file_with_content_change_updates_timestamp" do
content "Hello, world"
end
###
execute "echo 'Hello, world' > /tmp/file_without_content_change_keeping_timestamp ; touch -d 2016-05-01T12:34:56 /tmp/file_without_content_change_keeping_timestamp"
file "/tmp/file_without_content_change_keeping_timestamp" do
content "Hello, world\n"
end
###
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a recipe failed"
end
define :run_command_in_definition do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a definition failed"
end
end
execute "echo Hello" do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in a resource failed"
end
end
local_ruby_block 'execute run_command' do
block do
unless run_command("echo -n Hello").stdout == "Hello"
raise "run_command in local_ruby_block failed"
end
end
end
###
v1 = node.memory.total
v2 = node[:memory][:total]
v3 = node['memory']['total']
unless v1 == v2 && v2 == v3 && v1 =~ /\A\d+kB\z/
raise "failed to fetch host inventory value (#{v1}, #{v2}, #{v3})"
end
|
require 'spec_helper'
describe 'Emojis (JavaScript fixtures)' do
include JavaScriptFixturesHelpers
before(:all) do
clean_frontend_fixtures('emojis/')
end
it 'emojis/emojis.json' do |example|
# Copying the emojis.json from the public folder
fixture_file_name = File.expand_path('emojis/emojis.json', JavaScriptFixturesHelpers::FIXTURE_PATH)
FileUtils.mkdir_p(File.dirname(fixture_file_name))
FileUtils.cp(Rails.root.join('public/-/emojis/1/emojis.json'), fixture_file_name)
end
end
Fix for mismatch in EE on FIXTURE_PATHS to FIXTURE_PATH
Signed-off-by: Rémy Coutable <4ea0184b9df19e0786dd00b28e6daa4d26baeb3e@rymai.me>
require 'spec_helper'
describe 'Emojis (JavaScript fixtures)' do
include JavaScriptFixturesHelpers
before(:all) do
clean_frontend_fixtures('emojis/')
end
it 'emojis/emojis.json' do |example|
JavaScriptFixturesHelpers::FIXTURE_PATHS.each do |fixture_path|
# Copying the emojis.json from the public folder
fixture_file_name = File.expand_path('emojis/emojis.json', fixture_path)
FileUtils.mkdir_p(File.dirname(fixture_file_name))
FileUtils.cp(Rails.root.join('public/-/emojis/1/emojis.json'), fixture_file_name)
end
end
end
|
require "spec_helper"
describe Kotoba::Document do
let(:document) { Kotoba::Document.new(Kotoba.config.to_h) }
before do
Kotoba.config do |c|
c.layout do |l|
l.width = 10.cm
l.height = 20.cm
l.margin do |m|
m.top = 1.cm
m.bottom = 2.cm
m.outer = 3.cm
m.inner = 4.cm
end
end
end
end
after { Kotoba.clear_config! }
describe ".add_recurring_elements!" do
before do
2.times { document.start_new_page }
end
it "should open all pages" do
[1, 2, 3].each do |i|
document.should_receive(:go_to_page).with(i).and_call_original
end
document.add_recurring_elements!
end
it "should add recurring elements to the open page" do
[:header, :footer].each do |v|
document.should_receive(:add_recurring_element).with(v).
exactly(3).times.and_call_original
end
document.add_recurring_elements!
end
describe "page numbering" do
before do
Kotoba.config.layout.header.page_numbering do |n|
n.active = true
end
end
it "should increment page numbers" do
counters = document.instance_variable_get("@page_counters")
counters.should be_nil
document.add_recurring_elements!
counters = document.instance_variable_get("@page_counters")
counters[counters.keys.first].should == { number: 4, total: 3 }
end
it "should add page numbering" do
[1, 2, 3].each do |i|
document.should_receive(:text).with(i.to_s, kind_of(Hash))
end
document.add_recurring_elements!
end
context "with start_count_at set" do
before do
Kotoba.config.layout.header.page_numbering do |n|
n.active = true
n.start_count_at = 10
end
end
it "should start counting at start_count_at value" do
[10, 11, 12].each do |i|
document.should_receive(:text).with(i.to_s, kind_of(Hash))
end
document.add_recurring_elements!
end
end
end
end
describe ".add_recurring_element" do
describe "custom content" do
context "header" do
before do
Kotoba.config.layout.header.content do |p|
p.text "Test header"
end
end
it "should position the header" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(20.cm)],
{
top: be_instance_of(Proc),
height: be_within(0.001).of(1.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
end
it "should call the custom content block" do
document.should_receive(:text).with("Test header")
end
after { document.add_recurring_element(:header) }
end
context "footer" do
before do
Kotoba.config.layout.footer.content do |p|
p.text "Test footer"
end
end
it "should position the footer" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(2.cm)],
{
top: kind_of(Proc),
height: be_within(0.001).of(2.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
end
it "should call the custom content block" do
document.should_receive(:text).with("Test footer")
end
after { document.add_recurring_element(:footer) }
end
end
describe "page numbering" do
before do
Kotoba.config.layout.footer.page_numbering do |n|
n.active = true
end
end
it "should position the page numbering" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(2.cm)],
{
top: kind_of(Proc),
height: be_within(0.001).of(2.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
document.add_recurring_element(:footer)
end
end
end
describe ".set_page_counter" do
let(:numbering) { Kotoba::Layout::RecurringElement::PageNumbering.new }
subject { document.send(:set_page_counter, numbering) }
context "with page numbers based on start_count_at" do
before do
document.stub(page_count: 3).stub(page_number: 1)
numbering.start_count_at = 5
end
its([:number]) { should == 5 }
its([:total]) { should == 7 }
end
context "with prawn page numbers" do
before do
document.stub(page_count: 2).stub(page_number: 1)
numbering.start_count_at = 0
end
its([:number]) { should == 1 }
its([:total]) { should == 2 }
end
end
describe ".header_top_position" do
subject { document.send(:header_top_position) }
it { should == 17.cm }
end
describe ".footer_top_position" do
subject { document.send(:footer_top_position) }
it { should == 2.cm }
end
describe ".left_position" do
subject { document.send(:left_position) }
context "on odd page number" do
before { document.stub(:page_number => 1) }
it "should left align based on default location of page" do
should == 4.cm
end
end
context "on even page number" do
before { document.stub(:page_number => 2) }
it "should left align based on difference in inner and outer margin" do
should == 3.cm
end
end
end
describe "page size" do
before do
Kotoba.clear_config!
Kotoba.config do |c|
@first_layout = c.layout_for 1 do |l|
l.width = 10.cm
l.height = 10.cm
end
@second_layout = c.layout_for 2 do |l|
l.width = 20.cm
l.height = 20.cm
end
@default_layout = c.layout do |l|
l.width = 30.cm
l.height = 30.cm
end
@third_layout = c.layout_for 4..5 do |l|
l.orientation = :landscape
l.width = 40.cm
l.height = 50.cm
end
end
end
describe ".layout" do
context "with layout for specific page" do
it "should retrieve layout for page" do
# First specific layout
document.page_number.should == 1
document.layout.should == @first_layout
document.start_new_page
# Second specific layout
document.page_number.should == 2
document.layout.should == @second_layout
document.start_new_page
# Default layout
document.page_number.should == 3
document.layout.should == @default_layout
document.start_new_page
# Ranged layout
document.page_number.should == 4
document.layout.should == @third_layout
end
end
end
describe ".render" do
before do
document.text "first page"
document.start_new_page
document.text "second page"
document.start_new_page
document.text "third page"
document.start_new_page
document.text "fourth page"
document.start_new_page
document.text "fifth page"
end
subject { PDF::Inspector::Page.analyze(document.render).pages }
it "should use the configured layout per page" do
subject[0][:size].should == [10.cm, 10.cm]
subject[1][:size].should == [20.cm, 20.cm]
subject[2][:size].should == [30.cm, 30.cm]
end
it "should put page in correct orientation" do
subject[3][:size].should == [40.cm, 50.cm]
subject[4][:size].should == [40.cm, 50.cm]
end
end
end
describe "metadata" do
let(:document) { Kotoba::Document.new(Kotoba.config.to_h) }
before do
Time.stub(:now => "stubbed time")
Kotoba.config do |c|
c.title = "Test title"
c.subject = "Test subject"
c.keywords = "Test keywords"
c.creator = "The creator"
c.authors = ["Tom de Bruijn", "John Doe"]
end
reader = read_document(document)
@metadata = PDF::Reader.new(reader).info
end
subject { @metadata }
it "should add standard values" do
subject[:CreationDate].should == "stubbed time"
subject[:Title].should == "Test title"
subject[:Subject].should == "Test subject"
subject[:Keywords].should == "Test keywords"
subject[:Creator].should == "The creator"
subject[:Author].should == "Tom de Bruijn, John Doe"
subject[:Producer].should == "Kotoba"
end
context "with custom metadata" do
before(:all) { Kotoba.config.metadata = {:Grok => "Test property"} }
its([:Grok]) { should == "Test property" }
end
end
describe "document outline" do
let(:headings) do
[
{ name: "Chapter 1", page: 1, level: 1, children: [] },
{ name: "Chapter 2", page: 2, level: 1, children: [
{ name: "Chapter 3", page: 3, level: 2, children: [] }
]
}
]
end
before do
document.headings = headings
3.times { document.start_new_page }
end
describe ".outline!" do
it "should call outline generation method" do
document.should_receive(:outline_chapter_headings).with(kind_of(Array))
document.outline!
end
end
describe ".outline_chapter_headings" do
before { document.send(:outline_chapter_headings, headings) }
it "should add a chapter to the outline" do
find_chapter_by_title(document, "Chapter 1").should_not be_nil
end
it "should add a parent chapter to the outline" do
find_chapter_by_title(document, "Chapter 2").should_not be_nil
end
it "should add nested chapters to the outline" do
find_chapter_by_title(document, "Chapter 3").should_not be_nil
end
end
end
end
# Renders and reads the document
#
# @param [Prawn::Document]
#
def read_document(document)
StringIO.new(document.render, "r+")
end
# Renders the Prawn document to a PDF which is then read to extract
# details about the end result
#
# @param document [Prawn::Document]
# @return [PDF::Reader::ObjectHash] PDF as an object
#
def find_objects(document)
string = read_document(document)
PDF::Reader::ObjectHash.new(string)
end
# Outline titles are stored as UTF-16. This method accepts a UTF-8 outline title
# and returns the PDF Object that contains an outline with that name
# https://github.com/prawnpdf/prawn/blob/master/spec/outline_spec.rb#L410
#
def find_chapter_by_title(document, title)
hash = find_objects(document)
hash.values.select do |o|
if o.is_a?(Hash) && o[:Title]
title_codepoints = o[:Title].unpack("n*")
title_codepoints.shift
utf8_title = title_codepoints.pack("U*")
utf8_title == title ? o : nil
end
end
end
No separate test the protected method for outline
Speed up test by not rendering the pdf for every spec
require "spec_helper"
describe Kotoba::Document do
let(:document) { Kotoba::Document.new(Kotoba.config.to_h) }
before do
Kotoba.config do |c|
c.layout do |l|
l.width = 10.cm
l.height = 20.cm
l.margin do |m|
m.top = 1.cm
m.bottom = 2.cm
m.outer = 3.cm
m.inner = 4.cm
end
end
end
end
after { Kotoba.clear_config! }
describe ".add_recurring_elements!" do
before do
2.times { document.start_new_page }
end
it "should open all pages" do
[1, 2, 3].each do |i|
document.should_receive(:go_to_page).with(i).and_call_original
end
document.add_recurring_elements!
end
it "should add recurring elements to the open page" do
[:header, :footer].each do |v|
document.should_receive(:add_recurring_element).with(v).
exactly(3).times.and_call_original
end
document.add_recurring_elements!
end
describe "page numbering" do
before do
Kotoba.config.layout.header.page_numbering do |n|
n.active = true
end
end
it "should increment page numbers" do
counters = document.instance_variable_get("@page_counters")
counters.should be_nil
document.add_recurring_elements!
counters = document.instance_variable_get("@page_counters")
counters[counters.keys.first].should == { number: 4, total: 3 }
end
it "should add page numbering" do
[1, 2, 3].each do |i|
document.should_receive(:text).with(i.to_s, kind_of(Hash))
end
document.add_recurring_elements!
end
context "with start_count_at set" do
before do
Kotoba.config.layout.header.page_numbering do |n|
n.active = true
n.start_count_at = 10
end
end
it "should start counting at start_count_at value" do
[10, 11, 12].each do |i|
document.should_receive(:text).with(i.to_s, kind_of(Hash))
end
document.add_recurring_elements!
end
end
end
end
describe ".add_recurring_element" do
describe "custom content" do
context "header" do
before do
Kotoba.config.layout.header.content do |p|
p.text "Test header"
end
end
it "should position the header" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(20.cm)],
{
top: be_instance_of(Proc),
height: be_within(0.001).of(1.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
end
it "should call the custom content block" do
document.should_receive(:text).with("Test header")
end
after { document.add_recurring_element(:header) }
end
context "footer" do
before do
Kotoba.config.layout.footer.content do |p|
p.text "Test footer"
end
end
it "should position the footer" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(2.cm)],
{
top: kind_of(Proc),
height: be_within(0.001).of(2.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
end
it "should call the custom content block" do
document.should_receive(:text).with("Test footer")
end
after { document.add_recurring_element(:footer) }
end
end
describe "page numbering" do
before do
Kotoba.config.layout.footer.page_numbering do |n|
n.active = true
end
end
it "should position the page numbering" do
document.should_receive(:canvas).and_call_original
document.should_receive(:bounding_box).with(
[be_within(0.001).of(4.cm), be_within(0.001).of(2.cm)],
{
top: kind_of(Proc),
height: be_within(0.001).of(2.cm),
width: be_within(0.001).of(3.cm)
}
).and_call_original
document.add_recurring_element(:footer)
end
end
end
describe ".set_page_counter" do
let(:numbering) { Kotoba::Layout::RecurringElement::PageNumbering.new }
subject { document.send(:set_page_counter, numbering) }
context "with page numbers based on start_count_at" do
before do
document.stub(page_count: 3).stub(page_number: 1)
numbering.start_count_at = 5
end
its([:number]) { should == 5 }
its([:total]) { should == 7 }
end
context "with prawn page numbers" do
before do
document.stub(page_count: 2).stub(page_number: 1)
numbering.start_count_at = 0
end
its([:number]) { should == 1 }
its([:total]) { should == 2 }
end
end
describe ".header_top_position" do
subject { document.send(:header_top_position) }
it { should == 17.cm }
end
describe ".footer_top_position" do
subject { document.send(:footer_top_position) }
it { should == 2.cm }
end
describe ".left_position" do
subject { document.send(:left_position) }
context "on odd page number" do
before { document.stub(:page_number => 1) }
it "should left align based on default location of page" do
should == 4.cm
end
end
context "on even page number" do
before { document.stub(:page_number => 2) }
it "should left align based on difference in inner and outer margin" do
should == 3.cm
end
end
end
describe "page size" do
before do
Kotoba.clear_config!
Kotoba.config do |c|
@first_layout = c.layout_for 1 do |l|
l.width = 10.cm
l.height = 10.cm
end
@second_layout = c.layout_for 2 do |l|
l.width = 20.cm
l.height = 20.cm
end
@default_layout = c.layout do |l|
l.width = 30.cm
l.height = 30.cm
end
@third_layout = c.layout_for 4..5 do |l|
l.orientation = :landscape
l.width = 40.cm
l.height = 50.cm
end
end
end
describe ".layout" do
context "with layout for specific page" do
it "should retrieve layout for page" do
# First specific layout
document.page_number.should == 1
document.layout.should == @first_layout
document.start_new_page
# Second specific layout
document.page_number.should == 2
document.layout.should == @second_layout
document.start_new_page
# Default layout
document.page_number.should == 3
document.layout.should == @default_layout
document.start_new_page
# Ranged layout
document.page_number.should == 4
document.layout.should == @third_layout
end
end
end
describe ".render" do
before do
document.text "first page"
document.start_new_page
document.text "second page"
document.start_new_page
document.text "third page"
document.start_new_page
document.text "fourth page"
document.start_new_page
document.text "fifth page"
end
subject { PDF::Inspector::Page.analyze(document.render).pages }
it "should use the configured layout per page" do
subject[0][:size].should == [10.cm, 10.cm]
subject[1][:size].should == [20.cm, 20.cm]
subject[2][:size].should == [30.cm, 30.cm]
end
it "should put page in correct orientation" do
subject[3][:size].should == [40.cm, 50.cm]
subject[4][:size].should == [40.cm, 50.cm]
end
end
end
describe "metadata" do
let(:document) { Kotoba::Document.new(Kotoba.config.to_h) }
before do
Time.stub(:now => "stubbed time")
Kotoba.config do |c|
c.title = "Test title"
c.subject = "Test subject"
c.keywords = "Test keywords"
c.creator = "The creator"
c.authors = ["Tom de Bruijn", "John Doe"]
end
reader = read_document(document)
@metadata = PDF::Reader.new(reader).info
end
subject { @metadata }
it "should add standard values" do
subject[:CreationDate].should == "stubbed time"
subject[:Title].should == "Test title"
subject[:Subject].should == "Test subject"
subject[:Keywords].should == "Test keywords"
subject[:Creator].should == "The creator"
subject[:Author].should == "Tom de Bruijn, John Doe"
subject[:Producer].should == "Kotoba"
end
context "with custom metadata" do
before(:all) { Kotoba.config.metadata = {:Grok => "Test property"} }
its([:Grok]) { should == "Test property" }
end
end
describe "document outline" do
before :all do
document = Kotoba::Document.new
document.headings = [
{ name: "Chapter 1", page: 1, level: 1, children: [] },
{ name: "Chapter 2", page: 2, level: 1, children: [
{ name: "Chapter 3", page: 3, level: 2, children: [] }
]
}
]
3.times { document.start_new_page }
document.outline!
@objects = find_objects(document)
end
it "should add a chapter to the outline" do
find_chapter_by_title(@objects, "Chapter 1").should_not be_nil
end
it "should add a parent chapter to the outline" do
find_chapter_by_title(@objects, "Chapter 2").should_not be_nil
end
it "should add nested chapters to the outline" do
find_chapter_by_title(@objects, "Chapter 3").should_not be_nil
end
end
end
# Renders and reads the document
#
# @param [Prawn::Document]
#
def read_document(document)
StringIO.new(document.render, "r+")
end
# Renders the Prawn document to a PDF which is then read to extract
# details about the end result
#
# @param document [Prawn::Document]
# @return [PDF::Reader::ObjectHash] PDF as an object
#
def find_objects(document)
string = read_document(document)
PDF::Reader::ObjectHash.new(string)
end
# Outline titles are stored as UTF-16. This method accepts a UTF-8 outline title
# and returns the PDF Object that contains an outline with that name
# https://github.com/prawnpdf/prawn/blob/master/spec/outline_spec.rb#L410
#
def find_chapter_by_title(objects, title)
objects.values.select do |o|
if o.is_a?(Hash) && o[:Title]
title_codepoints = o[:Title].unpack("n*")
title_codepoints.shift
utf8_title = title_codepoints.pack("U*")
utf8_title == title ? o : nil
end
end
end
|
Add model spec.
require 'spec_helper'
module Overriaktion
describe Model do
subject { model }
let(:model) { Model.new }
it 'grabs an instance of the client' do
subject.send(:client).should be_an_instance_of(Client)
end
context 'with an attribute' do
let(:model) { Model.new(:id => 1) }
specify { subject.id.should == 1 }
end
end
end
|
require "spec_helper"
describe ::SocialSnippet::Repository::Models::Package do
let(:repo_name) { "my-repo" }
let(:rev_hash) { "commit-id" }
let(:package) do
::SocialSnippet::Repository::Models::Package.new(
:repo_name => repo_name,
:rev_hash => rev_hash,
)
end
before { ::SocialSnippet::Repository::Models::Package.core = fake_core }
describe "#paths" do
context "add file" do
before { package.add_file "file", "file-data" }
subject { package.paths }
it { should include "file" }
context "check filesystem" do
subject { ::FileTest.file? fake_core.config.package_path(repo_name, rev_hash, "file") }
it { should be_truthy }
end
end
context "add dir" do
before { package.add_directory "dir" }
subject { package.paths }
it { should include "dir/" }
context "check filesystem" do
subject { ::FileTest.directory? fake_core.config.package_path(repo_name, rev_hash, "dir") }
it { should be_truthy }
end
context "add dir/file" do
before { package.add_file "dir/file", "dir/file-data" }
subject { package.paths }
it { should include "dir/file" }
context "check filesystem" do
subject { ::FileTest.file? fake_core.config.package_path(repo_name, rev_hash, "dir/file") }
it { should be_truthy }
end
end
end
context "add dir/" do
before { package.add_directory "dir/" }
subject { package.paths }
it { should include "dir/" }
context "check filesystem" do
subject { ::FileTest.directory? fake_core.config.package_path(repo_name, rev_hash, "dir") }
it { should be_truthy }
end
end
end # files
describe "#load_snippet_json" do
context "add snippet.json" do
before do
package.add_file "snippet.json", {
:name => "package-name",
:desc => "package-desc",
:main => "package-main"
}.to_json
end
context "check snippet.json" do
it { expect(package.snippet_json["name"]).to eq "package-name" }
it { expect(package.snippet_json["desc"]).to eq "package-desc" }
it { expect(package.snippet_json["main"]).to eq "package-main" }
end
end
end #load_snippet_json
describe "#glob" do
context "prepare files" do
before do
package.add_file "file1.cpp", ""
package.add_file "file2.rb", ""
package.add_file "file3.cpp", ""
package.add_directory "subdir"
package.add_file "subdir/file4.cpp", ""
package.add_file "subdir/file5.rb", ""
end
context "glob *.cpp" do
subject { package.glob "*.cpp" }
it { should include "file1.cpp" }
it { should_not include "file2.rb" }
it { should include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should_not include "subdir/file5.rb" }
end
context "glob subdir/*.rb" do
subject { package.glob "subdir/*.rb" }
it { should_not include "file1.cpp" }
it { should_not include "file2.rb" }
it { should_not include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should include "subdir/file5.rb" }
end
end
end #glob
describe "serialization" do
context "prepare files" do
before do
package.add_file "file1.cpp", ""
package.add_file "file2.rb", ""
package.add_file "file3.cpp", ""
package.add_directory "subdir"
package.add_file "subdir/file4.cpp", ""
package.add_file "subdir/file5.rb", ""
end
context "save package" do
before { package.save! }
context "load package" do
let(:loaded_package) do
::SocialSnippet::Repository::Models::Package.find_by(
:repo_name => "my-repo",
:rev_hash => "commit-id",
)
end
context "glob *.cpp" do
subject { loaded_package.glob "*.cpp" }
it { should include "file1.cpp" }
it { should_not include "file2.rb" }
it { should include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should_not include "subdir/file5.rb" }
end
context "glob subdir/*.rb" do
subject { loaded_package.glob "subdir/*.rb" }
it { should_not include "file1.cpp" }
it { should_not include "file2.rb" }
it { should_not include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should include "subdir/file5.rb" }
end
end
end
end # save package
end # serialization
end # ::SocialSnippet::Repository::Models::Package
package_spec: fix tests
require "spec_helper"
describe ::SocialSnippet::Repository::Models::Package do
let(:repo_name) { "my-repo" }
let(:rev_hash) { "commit-id" }
let(:package) do
::SocialSnippet::Repository::Models::Package.new(
:repo_name => repo_name,
:rev_hash => rev_hash,
)
end
before { ::SocialSnippet::Repository::Models::Package.core = fake_core }
context "add snippet.json" do
before do
package.add_system_file "snippet.json", {
:name => "package-name",
:desc => "package-desc",
:main => "package-main"
}.to_json
end
context "check snippet.json" do
it { expect(package.snippet_json["name"]).to eq "package-name" }
it { expect(package.snippet_json["desc"]).to eq "package-desc" }
it { expect(package.snippet_json["main"]).to eq "package-main" }
end
describe "#paths" do
context "add file" do
before { package.add_file "file", "file-data" }
subject { package.paths }
it { should include "file" }
context "check filesystem" do
subject { ::FileTest.file? fake_core.config.package_path(repo_name, rev_hash, ::File.join("package-main", "file")) }
it { should be_truthy }
end
end
context "add dir" do
before { package.add_directory "dir" }
subject { package.paths }
it { should include "dir/" }
context "check filesystem" do
subject { ::FileTest.directory? fake_core.config.package_path(repo_name, rev_hash, ::File.join("package-main", "dir")) }
it { should be_truthy }
end
context "add dir/file" do
before { package.add_file "dir/file", "dir/file-data" }
subject { package.paths }
it { should include "dir/file" }
context "check filesystem" do
subject { ::FileTest.file? fake_core.config.package_path(repo_name, rev_hash, ::File.join("package-main", "dir/file")) }
it { should be_truthy }
end
end
end
context "add dir/" do
before { package.add_directory "dir/" }
subject { package.paths }
it { should include "dir/" }
context "check filesystem" do
subject { ::FileTest.directory? fake_core.config.package_path(repo_name, rev_hash, ::File.join("package-main", "dir")) }
it { should be_truthy }
end
end
end # files
describe "#glob" do
context "prepare files" do
before do
package.add_file "file1.cpp", ""
package.add_file "file2.rb", ""
package.add_file "file3.cpp", ""
package.add_directory "subdir"
package.add_file "subdir/file4.cpp", ""
package.add_file "subdir/file5.rb", ""
end
context "glob *.cpp" do
subject { package.glob "*.cpp" }
it { should include "file1.cpp" }
it { should_not include "file2.rb" }
it { should include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should_not include "subdir/file5.rb" }
end
context "glob subdir/*.rb" do
subject { package.glob "subdir/*.rb" }
it { should_not include "file1.cpp" }
it { should_not include "file2.rb" }
it { should_not include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should include "subdir/file5.rb" }
end
end
end #glob
describe "serialization" do
context "prepare files" do
before do
package.add_file "file1.cpp", ""
package.add_file "file2.rb", ""
package.add_file "file3.cpp", ""
package.add_directory "subdir"
package.add_file "subdir/file4.cpp", ""
package.add_file "subdir/file5.rb", ""
end
context "save package" do
before { package.save! }
context "load package" do
let(:loaded_package) do
::SocialSnippet::Repository::Models::Package.find_by(
:repo_name => "my-repo",
:rev_hash => "commit-id",
)
end
context "glob *.cpp" do
subject { loaded_package.glob "*.cpp" }
it { should include "file1.cpp" }
it { should_not include "file2.rb" }
it { should include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should_not include "subdir/file5.rb" }
end
context "glob subdir/*.rb" do
subject { loaded_package.glob "subdir/*.rb" }
it { should_not include "file1.cpp" }
it { should_not include "file2.rb" }
it { should_not include "file3.cpp" }
it { should_not include "subdir/file4.cpp" }
it { should include "subdir/file5.rb" }
end
end
end
end # save package
end # serialization
end # add snippet.json
end # ::SocialSnippet::Repository::Models::Package
|
require 'spec_helper'
module Build
describe Converter do
## Need to enable more than one connection on travis (pool)
# context '#run!' do
# it 'should produce the same output when executed in parallel' do
# threads = (0..2).map do
# Thread.new do
# Converter.run!('moment-timezone', 'latest')
# end
# end
# values = threads.map(&:join).map(&:value)
# expect(values[0]).to_not eq(nil)
# expect(values[1]).to_not eq(nil)
# expect(values[2]).to_not eq(nil)
# expect(values[1]).to eq(values[0])
# expect(values[2]).to eq(values[0])
# end
# it 'should include builded component in first each request' do
# threads = (0..2).map do
# Thread.new do
# Converter.run!('flight', 'latest')
# end
# end
# values = threads.map(&:join).map(&:value)
# expect(values[0].component.name).to eq("flight")
# expect(values[1].component.name).to eq("flight")
# expect(values[2].component.name).to eq("flight")
# end
# end
#
context '#run!' do
it 'should fill component dependencies' do
Converter.run!('angular-route', '1.2.2')
_, angular_route = Component.get('angular-route', '1.2.2')
expect(angular_route.reload.dependencies.keys).to_not be_empty
end
it 'should not mess with component dependencies afterwards' do
Converter.run!('angular-route', '1.2.2')
Converter.run!('angular-route', '1.2.3')
Converter.run!('angular-route', 'latest')
_, angular_route = Component.get('angular-route', '1.2.3')
expect(angular_route.reload.dependencies.keys).to_not be_empty
end
it 'leaves component fields if component.rebuild = false' do
Converter.run!('angular-route', '1.2.2')
Version.all.each do |c|
c.update_attributes(
:dependencies => { "yolo" => "1.2.3" }
)
end
Converter.run!('angular-route', '1.2.2')
Version.all.reload.map(&:dependencies).each do |d|
expect(d).to eq("yolo" => "1.2.3")
end
end
it 'updates component fields if component.rebuild = true' do
Converter.run!('angular-route', '1.2.2')
Version.all.each do |c|
c.update_attributes(
:rebuild => true,
:dependencies => { "yolo" => "1.2.3" }
)
end
Converter.run!('angular-route', '1.2.2')
Version.all.each do |d|
expect(d.dependencies).to_not eq("yolo" => "1.2.3")
expect(d.rebuild).to eq(false)
end
end
end
context '#install!' do
it 'installs component and return all dependencies but not persists' do
expect {
Converter.install! 'jquery', '2.1.0' do |dependencies|
expect(dependencies.size).to eq(2)
expect(dependencies.first).to be_a(BowerComponent)
expect(Dir.exists?(dependencies.first.component_dir)).to eq(true)
end
}.to_not change { Component.count + Version.count }
end
end
context '#convert!' do
it 'converts component to new temp directory and yields it' do
Converter.install!('jquery') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
expect(Dir.exist?(dir.to_s)).to be_true
expect(paths.all? { |p| File.exist?(dir.join(p)) }).to be_true
expect(mains.all? { |p| File.exist?(dir.join(p)) }).to be_true
end
end
end
it 'raises an BuildError if converted component has no files in it' do
Dir.mktmpdir do |dir|
# Probably replace with fixtures
Utils.sh(dir, 'git init')
Utils.sh(dir, 'touch .gitignore')
Utils.sh(dir, 'git add -f .gitignore')
Utils.sh(dir, 'git config user.email "you@example.com"')
Utils.sh(dir, 'git config user.name "Your Name"')
Utils.sh(dir, 'git commit -m init')
component = BowerComponent.new(dir, {
'endpoint' => { 'name' => 'foobar', 'source' => 'https://github.com/sheerun/foobar' },
'pkgMeta' => { 'name' => 'foobar' }
})
expect {
Converter.convert!(component) do; end
}.to raise_error(BuildError)
end
end
end
context '#build!' do
it 'builds gem to given directory and returns path to it' do
Dir.mktmpdir do |tmpdir|
gem_path = Converter.install!('jquery') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
Converter.build!(dependencies.first, dir, tmpdir)
end
end
expect(File.exist?(gem_path.to_s)).to be_true
end
end
end
context '#index!' do
it 'moves generated gems to data_dir and reindexes' do
Dir.mktmpdir do |install_dir|
Dir.mktmpdir do |tmpdir|
gem_path = Converter.install!('jquery') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
Converter.build!(dependencies.first, dir, tmpdir)
end
end
Converter.index!
end
end
end
end
context '#process!' do
it 'processes given bower component' do
Converter.process!('jquery', '2.0.3') do |version_paths|
version_paths.each do |version, path|
expect(version).to be_a(Version)
expect(path).to be_a(Path)
end
end
end
end
end
end
fix(chore) Freeze jquery versions in converter spec
require 'spec_helper'
module Build
describe Converter do
## Need to enable more than one connection on travis (pool)
# context '#run!' do
# it 'should produce the same output when executed in parallel' do
# threads = (0..2).map do
# Thread.new do
# Converter.run!('moment-timezone', 'latest')
# end
# end
# values = threads.map(&:join).map(&:value)
# expect(values[0]).to_not eq(nil)
# expect(values[1]).to_not eq(nil)
# expect(values[2]).to_not eq(nil)
# expect(values[1]).to eq(values[0])
# expect(values[2]).to eq(values[0])
# end
# it 'should include builded component in first each request' do
# threads = (0..2).map do
# Thread.new do
# Converter.run!('flight', 'latest')
# end
# end
# values = threads.map(&:join).map(&:value)
# expect(values[0].component.name).to eq("flight")
# expect(values[1].component.name).to eq("flight")
# expect(values[2].component.name).to eq("flight")
# end
# end
#
context '#run!' do
it 'should fill component dependencies' do
Converter.run!('angular-route', '1.2.2')
_, angular_route = Component.get('angular-route', '1.2.2')
expect(angular_route.reload.dependencies.keys).to_not be_empty
end
it 'should not mess with component dependencies afterwards' do
Converter.run!('angular-route', '1.2.2')
Converter.run!('angular-route', '1.2.3')
Converter.run!('angular-route', 'latest')
_, angular_route = Component.get('angular-route', '1.2.3')
expect(angular_route.reload.dependencies.keys).to_not be_empty
end
it 'leaves component fields if component.rebuild = false' do
Converter.run!('angular-route', '1.2.2')
Version.all.each do |c|
c.update_attributes(
:dependencies => { "yolo" => "1.2.3" }
)
end
Converter.run!('angular-route', '1.2.2')
Version.all.reload.map(&:dependencies).each do |d|
expect(d).to eq("yolo" => "1.2.3")
end
end
it 'updates component fields if component.rebuild = true' do
Converter.run!('angular-route', '1.2.2')
Version.all.each do |c|
c.update_attributes(
:rebuild => true,
:dependencies => { "yolo" => "1.2.3" }
)
end
Converter.run!('angular-route', '1.2.2')
Version.all.each do |d|
expect(d.dependencies).to_not eq("yolo" => "1.2.3")
expect(d.rebuild).to eq(false)
end
end
end
context '#install!' do
it 'installs component and return all dependencies but not persists' do
expect {
Converter.install! 'jquery', '2.1.0' do |dependencies|
expect(dependencies.size).to eq(2)
expect(dependencies.first).to be_a(BowerComponent)
expect(Dir.exists?(dependencies.first.component_dir)).to eq(true)
end
}.to_not change { Component.count + Version.count }
end
end
context '#convert!' do
it 'converts component to new temp directory and yields it' do
Converter.install!('jquery', '2.0.3') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
expect(Dir.exist?(dir.to_s)).to be_true
expect(paths.all? { |p| File.exist?(dir.join(p)) }).to be_true
expect(mains.all? { |p| File.exist?(dir.join(p)) }).to be_true
end
end
end
it 'raises an BuildError if converted component has no files in it' do
Dir.mktmpdir do |dir|
# Probably replace with fixtures
Utils.sh(dir, 'git init')
Utils.sh(dir, 'touch .gitignore')
Utils.sh(dir, 'git add -f .gitignore')
Utils.sh(dir, 'git config user.email "you@example.com"')
Utils.sh(dir, 'git config user.name "Your Name"')
Utils.sh(dir, 'git commit -m init')
component = BowerComponent.new(dir, {
'endpoint' => { 'name' => 'foobar', 'source' => 'https://github.com/sheerun/foobar' },
'pkgMeta' => { 'name' => 'foobar' }
})
expect {
Converter.convert!(component) do; end
}.to raise_error(BuildError)
end
end
end
context '#build!' do
it 'builds gem to given directory and returns path to it' do
Dir.mktmpdir do |tmpdir|
gem_path = Converter.install!('jquery', '2.0.3') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
Converter.build!(dependencies.first, dir, tmpdir)
end
end
expect(File.exist?(gem_path.to_s)).to be_true
end
end
end
context '#index!' do
it 'moves generated gems to data_dir and reindexes' do
Dir.mktmpdir do |install_dir|
Dir.mktmpdir do |tmpdir|
gem_path = Converter.install!('jquery', '2.0.3') do |dependencies|
Converter.convert!(dependencies.first) do |dir, paths, mains|
Converter.build!(dependencies.first, dir, tmpdir)
end
end
Converter.index!
end
end
end
end
context '#process!' do
it 'processes given bower component' do
Converter.process!('jquery', '2.0.3') do |version_paths|
version_paths.each do |version, path|
expect(version).to be_a(Version)
expect(path).to be_a(Path)
end
end
end
end
end
end
|
added tests
require 'rails_helper'
describe FolderSearcher do
describe '.search' do
let!(:some_access_user) { create(:user, dl_resources: true, dl_locked_resources: false) }
let!(:all_access_user) { create(:user, dl_resources: true, dl_locked_resources: true) }
let(:restricted_search) { FolderSearcher.search(some_access_user, params: params) }
let(:unrestricted_search) { FolderSearcher.search(all_access_user, params: params) }
let!(:dog_folder_unlocked) {create(:folder, name: 'dog', locked: false)}
let!(:doggie_file) { create(:attachment, attachment_file_name: 'Doggies.pdf', attachable_id: dog_folder_unlocked.id) }
let!(:dog_folder_locked) {create(:folder, name: 'dog', locked: true)}
let!(:doggie2_file) { create(:attachment, attachment_file_name: 'Doggies2.pdf', attachable_id: dog_folder_locked.id) }
let(:params) { { search: 'Doggies' } }
context 'normal user searches for file' do
it 'and results include the correct file in unlocked folder' do
expect(restricted_search).to match_array(doggie_file)
end
it 'and results do not include file found in locked folder' do
expect(restricted_search).not_to match_array(doggie2_file)
end
end
context 'all_access user searches for file' do
it 'and results include all correct files' do
expect(unrestricted_search).to match_array([doggie_file, doggie2_file])
end
end
end
end
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Jekyll
class LinkSampleBlock < Liquid::Block
def initialize(tag_name, markup, tokens)
super
@file = markup
end
def render(context)
page = context.environments.first["page"]
path = context.registers[:site].source;
relpath = File.dirname(page["path"]).sub("_en/", "").sub("fundamentals/", "")
String filepath = File.join(relpath, @file).sub("/_code", "")
url = File.join(context.registers[:site].config["sample_link_base"], filepath).strip
out = super(context)
"<a href=\"#{url}\">#{out}</a>"
end
end
class LinkSampleButton < Liquid::Block
def initialize(tag_name, markup, tokens)
super
@file = markup
end
def render(context)
page = context.environments.first["page"]
path = context.registers[:site].source;
relpath = File.dirname(page["path"]).sub("_en/", "").sub("fundamentals/", "")
String filepath = File.join(relpath, @file).sub("/_code", "")
url = File.join(context.registers[:site].config["sample_link_base"], filepath).strip
out = super(context)
"<a class=\"button--primary\" href=\"#{url}\">#{out}</a>"
end
end
class IncludeCodeTag < Liquid::Tag
include Liquid::StandardFilters
@@comment_formats = {
"html" => ["<!--", "-->"],
"css" => ["\\\/\\\*", "\\\*\\\/"],
"javascript" => ["\\\/\\\*", "\\\*\\\/"],
}
def initialize(tag_name, markup, tokens)
super
@options = {}
@file, @section, @lang = markup.strip.split(' ', 3)
if @lang.nil?
@lang = "html"
end
if @@comment_formats[@lang].nil?
Jekyll.logger.warn "Include_code doesn't know #{@lang}"
end
@character = '/'
end
def getmatcher_tag(lang, section, tag)
startc, endc = @@comment_formats[lang]
"#{startc} \\/\\/ \\[#{tag} #{section}\\] #{endc}\n?"
end
def getmatch(contents, lang, section)
start = getmatcher_tag(lang, section, "START")
endt = getmatcher_tag(lang, section, "END")
contents.match(/#{start}(.*)#{endt}/im)[1]
end
def render(context)
page = context.environments.first["page"]
site = context.registers[:site]
path = site.source;
lang = site.config["lang"]
if !lang && page.has_key?('langcode')
lang = page["langcode"]
elsif !lang
lang = "en"
end
String filepath = File.join(File.dirname(page["path"]), @file)
if lang != "en"
filepath.sub!("_" + lang + "/", "_en/")
end
String file = File.join(path, filepath)
contents = File.read(file)
snippet = getmatch(contents, @lang, @section)
@@comment_formats.each do |lang, parms|
match = getmatcher_tag(lang, "[^\\]]+", "\\w+")
snippet.gsub!(/#{match}/mi, "")
end
render_codehighlighter(context, snippet, filepath)
end
def render_codehighlighter(context, code, filepath)
require 'pygments'
# TODO(ianbarber): This is a bit of a fudge. We should know the definitive sample
# path. I think we may want to have a central shared "code sample" object that is
# knows how to get such paths for this and the sample_builder.
filepath.sub!("_code/", "")
offset = false
snippet = ""
initial = code.lines.first[/\A */].size
# Indenter
# TODO(ianbarber): Look for multiples of original offset rather than absolute spaces.
# paulk edit: updated logic. gets first line, works out indent. then uses that as initial offset
code.each_line {|s|
#Jekyll.logger.warn " #{initial} #{offset} #{(initial + offset)} #{s.lstrip.rstrip}"
if s.size >= initial
snippet += (" " * 4)
snippet += s.slice(initial..s.size).rstrip
end
snippet += "\n"
}
@options[:encoding] = 'utf-8'
highlighted_code = Pygments.highlight(snippet, :lexer => @lang, :options => @options)
if highlighted_code.nil?
Jekyll.logger.error "There was an error highlighting your code."
end
if @lang == 'css'
@character = '}'
end
relpath = filepath.sub("_en/fundamentals/", "")
<<-HTML
</div>
</div>
<div class="highlight-module highlight-module--code highlight-module--right">
<div class="highlight-module__container">
<code class='html'>#{highlighted_code.strip}</code>
<a class="highlight-module__cta button--primary" href="#{context.registers[:site].baseurl}/fundamentals/resources/samples/#{relpath}">Try full sample</a>
</div>
</div>
<div class="container">
<div class="content" markdown="1">
HTML
end
end
end
Liquid::Template.register_tag('include_code', Jekyll::IncludeCodeTag)
Liquid::Template.register_tag('link_sample', Jekyll::LinkSampleBlock)
Liquid::Template.register_tag('link_sample_button', Jekyll::LinkSampleButton)
Fixes #842 - uses correct path for samples from config
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Jekyll
class LinkSampleBlock < Liquid::Block
def initialize(tag_name, markup, tokens)
super
@file = markup
end
def render(context)
page = context.environments.first["page"]
path = context.registers[:site].source;
relpath = File.dirname(page["path"]).sub("_en/", "").sub("fundamentals/", "")
String filepath = File.join(relpath, @file).sub("/_code", "")
url = File.join(context.registers[:site].config["sample_link_base"], filepath).strip
out = super(context)
"<a href=\"#{url}\">#{out}</a>"
end
end
class LinkSampleButton < Liquid::Block
def initialize(tag_name, markup, tokens)
super
@file = markup
end
def render(context)
page = context.environments.first["page"]
path = context.registers[:site].source;
relpath = File.dirname(page["path"]).sub("_en/", "").sub("fundamentals/", "")
String filepath = File.join(relpath, @file).sub("/_code", "")
url = File.join(context.registers[:site].config["sample_link_base"], filepath).strip
out = super(context)
"<a class=\"button--primary\" href=\"#{url}\">#{out}</a>"
end
end
class IncludeCodeTag < Liquid::Tag
include Liquid::StandardFilters
@@comment_formats = {
"html" => ["<!--", "-->"],
"css" => ["\\\/\\\*", "\\\*\\\/"],
"javascript" => ["\\\/\\\*", "\\\*\\\/"],
}
def initialize(tag_name, markup, tokens)
super
@options = {}
@file, @section, @lang = markup.strip.split(' ', 3)
if @lang.nil?
@lang = "html"
end
if @@comment_formats[@lang].nil?
Jekyll.logger.warn "Include_code doesn't know #{@lang}"
end
@character = '/'
end
def getmatcher_tag(lang, section, tag)
startc, endc = @@comment_formats[lang]
"#{startc} \\/\\/ \\[#{tag} #{section}\\] #{endc}\n?"
end
def getmatch(contents, lang, section)
start = getmatcher_tag(lang, section, "START")
endt = getmatcher_tag(lang, section, "END")
contents.match(/#{start}(.*)#{endt}/im)[1]
end
def render(context)
page = context.environments.first["page"]
site = context.registers[:site]
path = site.source;
lang = site.config["lang"]
if !lang && page.has_key?('langcode')
lang = page["langcode"]
elsif !lang
lang = "en"
end
String filepath = File.join(File.dirname(page["path"]), @file)
if lang != "en"
filepath.sub!("_" + lang + "/", "_en/")
end
String file = File.join(path, filepath)
contents = File.read(file)
snippet = getmatch(contents, @lang, @section)
@@comment_formats.each do |lang, parms|
match = getmatcher_tag(lang, "[^\\]]+", "\\w+")
snippet.gsub!(/#{match}/mi, "")
end
render_codehighlighter(context, snippet, filepath)
end
def render_codehighlighter(context, code, filepath)
require 'pygments'
# TODO(ianbarber): This is a bit of a fudge. We should know the definitive sample
# path. I think we may want to have a central shared "code sample" object that is
# knows how to get such paths for this and the sample_builder.
filepath.sub!("_code/", "")
offset = false
snippet = ""
initial = code.lines.first[/\A */].size
# Indenter
# TODO(ianbarber): Look for multiples of original offset rather than absolute spaces.
# paulk edit: updated logic. gets first line, works out indent. then uses that as initial offset
code.each_line {|s|
#Jekyll.logger.warn " #{initial} #{offset} #{(initial + offset)} #{s.lstrip.rstrip}"
if s.size >= initial
snippet += (" " * 4)
snippet += s.slice(initial..s.size).rstrip
end
snippet += "\n"
}
@options[:encoding] = 'utf-8'
highlighted_code = Pygments.highlight(snippet, :lexer => @lang, :options => @options)
if highlighted_code.nil?
Jekyll.logger.error "There was an error highlighting your code."
end
if @lang == 'css'
@character = '}'
end
relpath = filepath.sub("_en/fundamentals/", "")
<<-HTML
</div>
</div>
<div class="highlight-module highlight-module--code highlight-module--right">
<div class="highlight-module__container">
<code class='html'>#{highlighted_code.strip}</code>
<a class="highlight-module__cta button--primary" href="#{context.registers[:site].config["sample_link_base"]}#{relpath}">Try full sample</a>
</div>
</div>
<div class="container">
<div class="content" markdown="1">
HTML
end
end
end
Liquid::Template.register_tag('include_code', Jekyll::IncludeCodeTag)
Liquid::Template.register_tag('link_sample', Jekyll::LinkSampleBlock)
Liquid::Template.register_tag('link_sample_button', Jekyll::LinkSampleButton)
|
require 'spec_helper'
if defined? Mongoid
describe Kaminari::MongoidExtension do
before(:each) do
41.times do
User.create!({:salary => 1})
end
end
describe '#page' do
context 'page 1' do
subject { User.page 1 }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 1 }
its(:limit_value) { should == 25 }
its(:num_pages) { should == 2 }
it { should skip(0) }
end
context 'page 2' do
subject { User.page 2 }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 2 }
its(:limit_value) { should == 25 }
its(:num_pages) { should == 2 }
it { should skip 25 }
end
context 'page "foobar"' do
subject { User.page 'foobar' }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 1 }
its(:limit_value) { should == 25 }
its(:num_pages) { should == 2 }
it { should skip 0 }
end
shared_examples 'complete valid pagination' do
if Mongoid::VERSION =~ /^3/
its(:selector) { should == {'salary' => 1} }
else
its(:selector) { should == {:salary => 1} }
end
its(:current_page) { should == 2 }
its(:limit_value) { should == 25 }
its(:num_pages) { should == 2 }
it { should skip 25 }
end
context 'with criteria before' do
subject { User.where(:salary => 1).page 2 }
it_should_behave_like 'complete valid pagination'
end
context 'with criteria after' do
subject { User.page(2).where(:salary => 1) }
it_should_behave_like 'complete valid pagination'
end
end
describe '#per' do
subject { User.page(2).per(10) }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 2 }
its(:limit_value) { should == 10 }
its(:num_pages) { should == 5 }
it { should skip 10 }
end
describe '#page in embedded documents' do
before do
@mongo_developer = MongoMongoidExtensionDeveloper.new
@mongo_developer.frameworks.new(:name => "rails", :language => "ruby")
@mongo_developer.frameworks.new(:name => "merb", :language => "ruby")
@mongo_developer.frameworks.new(:name => "sinatra", :language => "ruby")
@mongo_developer.frameworks.new(:name => "cakephp", :language => "php")
@mongo_developer.frameworks.new(:name => "tornado", :language => "python")
end
context 'page 1' do
subject { @mongo_developer.frameworks.page(1).per(1) }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 5 }
its(:limit_value) { should == 1 }
its(:current_page) { should == 1 }
its(:num_pages) { should == 5 }
end
context 'with criteria after' do
subject { @mongo_developer.frameworks.page(1).per(2).where(:language => "ruby") }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 3 }
its(:limit_value) { should == 2 }
its(:current_page) { should == 1 }
its(:num_pages) { should == 2 }
end
context 'with criteria before' do
subject { @mongo_developer.frameworks.where(:language => "ruby").page(1).per(2) }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 3 }
its(:limit_value) { should == 2 }
its(:current_page) { should == 1 }
its(:num_pages) { should == 2 }
end
end
end
end
num_pages => total_pages
require 'spec_helper'
if defined? Mongoid
describe Kaminari::MongoidExtension do
before(:each) do
41.times do
User.create!({:salary => 1})
end
end
describe '#page' do
context 'page 1' do
subject { User.page 1 }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 1 }
its(:limit_value) { should == 25 }
its(:total_pages) { should == 2 }
it { should skip(0) }
end
context 'page 2' do
subject { User.page 2 }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 2 }
its(:limit_value) { should == 25 }
its(:total_pages) { should == 2 }
it { should skip 25 }
end
context 'page "foobar"' do
subject { User.page 'foobar' }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 1 }
its(:limit_value) { should == 25 }
its(:total_pages) { should == 2 }
it { should skip 0 }
end
shared_examples 'complete valid pagination' do
if Mongoid::VERSION =~ /^3/
its(:selector) { should == {'salary' => 1} }
else
its(:selector) { should == {:salary => 1} }
end
its(:current_page) { should == 2 }
its(:limit_value) { should == 25 }
its(:total_pages) { should == 2 }
it { should skip 25 }
end
context 'with criteria before' do
subject { User.where(:salary => 1).page 2 }
it_should_behave_like 'complete valid pagination'
end
context 'with criteria after' do
subject { User.page(2).where(:salary => 1) }
it_should_behave_like 'complete valid pagination'
end
end
describe '#per' do
subject { User.page(2).per(10) }
it { should be_a Mongoid::Criteria }
its(:current_page) { should == 2 }
its(:limit_value) { should == 10 }
its(:total_pages) { should == 5 }
it { should skip 10 }
end
describe '#page in embedded documents' do
before do
@mongo_developer = MongoMongoidExtensionDeveloper.new
@mongo_developer.frameworks.new(:name => "rails", :language => "ruby")
@mongo_developer.frameworks.new(:name => "merb", :language => "ruby")
@mongo_developer.frameworks.new(:name => "sinatra", :language => "ruby")
@mongo_developer.frameworks.new(:name => "cakephp", :language => "php")
@mongo_developer.frameworks.new(:name => "tornado", :language => "python")
end
context 'page 1' do
subject { @mongo_developer.frameworks.page(1).per(1) }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 5 }
its(:limit_value) { should == 1 }
its(:current_page) { should == 1 }
its(:total_pages) { should == 5 }
end
context 'with criteria after' do
subject { @mongo_developer.frameworks.page(1).per(2).where(:language => "ruby") }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 3 }
its(:limit_value) { should == 2 }
its(:current_page) { should == 1 }
its(:total_pages) { should == 2 }
end
context 'with criteria before' do
subject { @mongo_developer.frameworks.where(:language => "ruby").page(1).per(2) }
it { should be_a Mongoid::Criteria }
its(:total_count) { should == 3 }
its(:limit_value) { should == 2 }
its(:current_page) { should == 1 }
its(:total_pages) { should == 2 }
end
end
end
end
|
describe ProfilesSearch, type: :model do
let!(:profile) { FactoryGirl.create(:published, firstname: 'Gertrud', lastname: 'Mueller', twitter: 'Apfel', city: 'Berlin', languages: 'Englisch') }
let!(:profile1) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', languages: 'Polnisch', twitter: 'Birne') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
describe 'results' do
it 'does not return unpublished profiles' do
expect(ProfilesSearch.new(quick: 'Fred').results).to be_empty
end
context 'quick search' do
it 'does not return profiles that do not match the given search string' do
expect(ProfilesSearch.new(quick: 'Horstin').results).to be_empty
end
it 'does return profiles that match the lastname' do
expect(ProfilesSearch.new(quick: 'Muell').results).to eq [profile]
end
it 'does return profiles that match the twittername' do
expect(ProfilesSearch.new(quick: 'apfel').results).to eq [profile]
end
it 'does return profiles that match the topic' do
profile.topic_list.add("obst")
profile.save!
expect(ProfilesSearch.new(quick: 'obst').results).to eq [profile]
end
it 'does return nothing if only quick is given and empty' do
expect(ProfilesSearch.new(quick: '').results).to be_empty
end
end
context 'when doing a detailed search' do
it 'does return profiles that match the given city search string' do
expect(ProfilesSearch.new({city: 'Berli'}).results).to eq [profile]
end
it 'does return profiles that match the given language search string' do
expect(ProfilesSearch.new({languages: 'en'}).results).to eq [profile]
end
it 'does return profiles that match the given name search string' do
expect(ProfilesSearch.new({name: 'Gertrud'}).results).to eq [profile]
end
it 'does return profiles that match the given twitter search string' do
expect(ProfilesSearch.new({twitter: 'Apf'}).results).to eq [profile]
end
it 'does return profiles that match the given topic search string' do
profile.topic_list.add("obst")
profile.save!
expect(ProfilesSearch.new({topics: 'obst'}).results).to eq [profile]
end
it 'returns any profile that matches one of the given languages' do
expect(ProfilesSearch.new({languages: ['en', 'ar']}).results).to eq [profile]
end
it 'returns any profile that matches one of the given topics'
end
end
end
add a if statement to get topic search working
seems like sql with tags.name filters out everything if topics are
empty
describe ProfilesSearch, type: :model do
let!(:profile) { FactoryGirl.create(:published, firstname: 'Gertrud', lastname: 'Mueller', twitter: 'Apfel', city: 'Berlin', languages: 'Englisch') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', languages: 'Polnisch', twitter: 'Birne') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
let!(:profile_language_de) { ProfileLanguage.create!(profile: profile, iso_639_1: 'de') }
let!(:profile_language_en) { ProfileLanguage.create!(profile: profile, iso_639_1: 'en') }
let!(:profile_not_matched) { FactoryGirl.create(:published, firstname: 'Claudia', email: 'claudia@test.de', city: 'Paris', twitter: 'Birne') }
let!(:profile_not_published) { FactoryGirl.create(:unpublished, firstname: 'Fred') }
describe 'results' do
it 'does not return unpublished profiles' do
expect(ProfilesSearch.new(quick: 'Fred').results).to be_empty
end
context 'quick search' do
it 'does not return profiles that do not match the given search string' do
expect(ProfilesSearch.new(quick: 'Horstin').results).to be_empty
end
it 'does return profiles that match the lastname' do
expect(ProfilesSearch.new(quick: 'Muell').results).to eq [profile]
end
it 'does return profiles that match the twittername' do
expect(ProfilesSearch.new(quick: 'apfel').results).to eq [profile]
end
it 'does return profiles that match the topic' do
profile.topic_list.add("obst")
profile.save!
expect(ProfilesSearch.new(quick: 'obst').results).to eq [profile]
end
it 'does return nothing if only quick is given and empty' do
expect(ProfilesSearch.new(quick: '').results).to be_empty
end
end
context 'when doing a detailed search' do
it 'does return profiles that match the given city search string' do
expect(ProfilesSearch.new({city: 'Berli'}).results).to eq [profile]
end
it 'does return profiles that match the given language search string' do
expect(ProfilesSearch.new({languages: 'en'}).results).to eq [profile]
end
it 'does return profiles that match the given name search string' do
expect(ProfilesSearch.new({name: 'Gertrud'}).results).to eq [profile]
end
it 'does return profiles that match the given twitter search string' do
expect(ProfilesSearch.new({twitter: 'Apf'}).results).to eq [profile]
end
it 'does return profiles that match the given topic search string' do
profile.topic_list.add("obst")
profile.save!
expect(ProfilesSearch.new({topics: 'obst'}).results).to eq [profile]
end
it 'returns any profile that matches one of the given languages' do
expect(ProfilesSearch.new({languages: ['en', 'ar']}).results).to eq [profile]
end
it 'returns any profile that matches one of the given topics'
end
end
end
|
require 'spec_helper'
describe SubmissionRule do
it { is_expected.to belong_to(:assignment) }
context '#calculate_collection_time' do
let(:assignment) { create(:assignment) }
it 'should return something other than nil at the end' do
expect(assignment.submission_rule.calculate_collection_time)
.to_not be_nil
end
it 'should return some date value at the end' do
expect(assignment.submission_rule
.calculate_collection_time.to_date)
.to be_kind_of(Date)
end
# in accuracy range of 10 minutes
it 'should return a correct time value at the end' do
time_returned = assignment.submission_rule.calculate_collection_time
time_now = Time.now
time_difference = (time_now - time_returned).abs
expect(time_difference)
.to be < 600
end
end
context '#calculate_grouping_collection_time' do
let(:assignment) { create(:assignment) }
let(:grouping_with_inviter) { create(:grouping_with_inviter) }
it 'should return something other than nil at the end' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter))
.to_not be_nil
end
it 'should return some date value at the end' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter).to_date)
.to be_kind_of(Date)
end
# in accuracy range of 10 minutes
it 'should return a correct time value at the end' do
time_returned = assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter)
time_now = Time.now
time_difference = (time_now - time_returned).abs
expect(time_difference)
.to be < 600
end
# test that is triggered when grouping.inviter.section exists
it 'should return date value if grouping.inviter.section is not nil' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter).to_date)
.to be_kind_of(Date)
end
end
context 'when Section Due Dates are enabled' do
before :each do
@assignment = create(:assignment, section_due_dates_type: true)
end
context 'and Assignment Due Date is in the past' do
before :each do
@assignment.update_attributes(due_date: 1.days.ago)
@section = create(:section)
end
context 'and Section Due Date is in the past' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.ago)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Section Due Date is in the future' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.from_now)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
context 'and Assignment Due Date is in the future' do
before :each do
@assignment.update_attributes(due_date: 1.days.from_now)
@section = create(:section)
end
context 'and Section Due Date is in the past' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.ago)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Section Due Date is in the future' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.from_now)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
end
context 'when Section Due Dates are disabled' do
before :each do
@assignment = create(:assignment, section_due_dates_type: false)
end
context 'and Assignment Due Date is in the past' do
before :each do
@assignment.update_attributes(due_date: 1.days.ago)
@section = create(:section)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Assignment Due Date is in the future' do
before :each do
@assignment.update_attributes(due_date: 1.days.from_now)
@section = create(:section)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create()
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
end
Changes as requested by @houndci-bot
require 'spec_helper'
describe SubmissionRule do
it { is_expected.to belong_to(:assignment) }
context '#calculate_collection_time' do
let(:assignment) { create(:assignment) }
it 'should return something other than nil at the end' do
expect(assignment.submission_rule.calculate_collection_time)
.to_not be_nil
end
it 'should return some date value at the end' do
expect(assignment.submission_rule
.calculate_collection_time.to_date)
.to be_kind_of(Date)
end
# in accuracy range of 10 minutes
it 'should return a correct time value at the end' do
time_returned = assignment.submission_rule.calculate_collection_time
time_now = Time.now
time_difference = (time_now - time_returned).abs
expect(time_difference)
.to be < 600
end
end
context '#calculate_grouping_collection_time' do
let(:assignment) { create(:assignment) }
let(:grouping_with_inviter) { create(:grouping_with_inviter) }
it 'should return something other than nil at the end' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter))
.to_not be_nil
end
it 'should return some date value at the end' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter).to_date)
.to be_kind_of(Date)
end
# in accuracy range of 10 minutes
it 'should return a correct time value at the end' do
time_returned = assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter)
time_now = Time.now
time_difference = (time_now - time_returned).abs
expect(time_difference)
.to be < 600
end
# test that is triggered when grouping.inviter.section exists
it 'should return date value if grouping.inviter.section is not nil' do
expect(assignment.submission_rule
.calculate_grouping_collection_time(grouping_with_inviter).to_date)
.to be_kind_of(Date)
end
end
context 'when Section Due Dates are enabled' do
before :each do
@assignment = create(:assignment, section_due_dates_type: true)
end
context 'and Assignment Due Date is in the past' do
before :each do
@assignment.update_attributes(due_date: 1.days.ago)
@section = create(:section)
end
context 'and Section Due Date is in the past' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.ago)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Section Due Date is in the future' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.from_now)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
context 'and Assignment Due Date is in the future' do
before :each do
@assignment.update_attributes(due_date: 1.days.from_now)
@section = create(:section)
end
context 'and Section Due Date is in the past' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.ago)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Section Due Date is in the future' do
before :each do
@section_due_date = SectionDueDate.create(section: @section,
assignment: @assignment,
due_date: 2.days.from_now)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (2.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
end
context 'when Section Due Dates are disabled' do
before :each do
@assignment = create(:assignment, section_due_dates_type: false)
end
context 'and Assignment Due Date is in the past' do
before :each do
@assignment.update_attributes(due_date: 1.days.ago)
@section = create(:section)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq true
end
end
context '#can_collect_all_now?' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq true
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return true' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq true
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.ago - time_returned).abs
expect(time_difference).to be < 600
end
end
end
context 'and Assignment Due Date is in the future' do
before :each do
@assignment.update_attributes(due_date: 1.days.from_now)
@section = create(:section)
@inviter_with_section = Student.create(section: @section)
@inviter_without_section = Student.create
@grouping_with_section = Grouping.create(inviter: @inviter_with_section)
@grouping_without_section = Grouping.create(inviter: @inviter_without_section)
end
context '#can_collect_now?(section)' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_now?(@section))
.to eq false
end
end
context '#can_collect_all_now?' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_all_now?)
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) with section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_with_section))
.to eq false
end
end
context '#can_collect_grouping_now?(grouping) without section' do
it 'should return false' do
expect(@assignment.submission_rule
.can_collect_grouping_now?(@grouping_without_section))
.to eq false
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(section)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time(@section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#get_collection_time(nil) (i.e. global due date)' do
it 'should return correct date value' do
time_returned = @assignment.submission_rule
.get_collection_time
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) with section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_with_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
# in accuracy range of 10 minutes
context '#calculate_grouping_collection_time(grouping) without section' do
it 'should return correct date value' do
time_returned = @assignment
.submission_rule
.calculate_grouping_collection_time(@grouping_without_section)
time_difference = (1.days.from_now - time_returned).abs
expect(time_difference).to be < 600
end
end
end
end
end
|
require 'orc/util/option_parser'
class MockOptionParser < Orc::Util::OptionParser
attr_reader :argv
def initialize(argv)
@argv = argv
super()
end
end
class Orc::Util::OptionParser
attr_accessor :options, :commands, :option_parser
end
class Orc::Util::OptionParser::TestOption < Orc::Util::OptionParser::Base
attr_accessor :have_run, :factory
def required
[:environment, :application, :version]
end
def execute(factory)
@have_run = true
@factory = factory
end
def self.command_options
['-z', '--zzzz', 'ZZZ']
end
end
describe Orc::Util::OptionParser do
it 'can be constructed' do
Orc::Util::OptionParser.new
end
it 'Can parse and execute fake option' do
p = MockOptionParser.new(['-z', '--environment', 'foo', '--application', 'bar', '--version', '2.5'])
Orc::Util::OptionParser::TestOption.setup_command_options(p.options, p.option_parser, p.commands)
p.parse
expect($options).to eql(:environment => 'foo', :application => 'bar', :version => '2.5')
expect(p.commands.size).to eql(1)
command = p.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::TestOption')
expect(command.have_run).to eql(nil)
p.execute
expect(command.have_run).to eql(true)
expect(command.factory.class.name).to eql('Orc::Factory')
end
# FIXME: We need tests for each set of command options
# on the command line - how do you locally override @ARGV
# inside a test to be able to test this?
it 'parses options from argv and passes them to option class constructor' do
parser = MockOptionParser.new(['--environment', 'foo', '--application', 'bar', '-r']).parse
expect($options).to eql(:environment => 'foo', :application => 'bar')
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::ResolveRequest')
expect(command.options).to eql(:environment => 'foo', :application => 'bar')
end
it 'Works with just --status and --environment' do
parser = MockOptionParser.new(['--status', '--environment', 'bar']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::StatusRequest')
end
it 'Works for DeployRequest' do
parser = MockOptionParser.new(['--deploy', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::DeployRequest')
end
it 'Works for PromotionRequest' do
parser = MockOptionParser.new(['-u', '--promote-from', 'baz', '--environment', 'bar', '--application', 'MyApp']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::PromotionRequest')
end
it 'Works for InstallRequest' do
parser = MockOptionParser.new(['--install', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::InstallRequest')
end
it 'Works for LimitedInstallRequest' do
parser = MockOptionParser.new(['--limited-install', '--environment', 'bar', '--application', 'MyApp',
'--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::LimitedInstallRequest')
end
it 'Works for InstallRequest with groups' do
parser = MockOptionParser.new(['--install', '--environment', 'bar', '--application', 'MyApp', '--version', '1',
'--group', 'blue']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.options[:group]).to eql 'blue'
expect(command.class.name).to eql('Orc::Util::OptionParser::InstallRequest')
end
it 'Works for SwapRequest' do
parser = MockOptionParser.new(['--swap', '--environment', 'bar', '--application', 'MyApp']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::SwapRequest')
end
it 'Works for DeployRequest' do
parser = MockOptionParser.new(['--deploy', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::DeployRequest')
end
it 'Works for RollingRestartRequest' do
parser = MockOptionParser.new(['--rolling-restart', '--environment', 'bar', '--application', 'MyApp',
'--group', 'blue']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::RollingRestartRequest')
end
it 'will exit 1 when invalid option provided' do
begin
MockOptionParser.new(['--socks', '--environment', 'bar', '--application', 'MyApp', '--group', 'blue']).parse
rescue SystemExit => e
expect(e.status).to eql(1)
end
end
end
Remove the help about rolling-restart, since it's not implemented.
(also remove test)
require 'orc/util/option_parser'
class MockOptionParser < Orc::Util::OptionParser
attr_reader :argv
def initialize(argv)
@argv = argv
super()
end
end
class Orc::Util::OptionParser
attr_accessor :options, :commands, :option_parser
end
class Orc::Util::OptionParser::TestOption < Orc::Util::OptionParser::Base
attr_accessor :have_run, :factory
def required
[:environment, :application, :version]
end
def execute(factory)
@have_run = true
@factory = factory
end
def self.command_options
['-z', '--zzzz', 'ZZZ']
end
end
describe Orc::Util::OptionParser do
it 'can be constructed' do
Orc::Util::OptionParser.new
end
it 'Can parse and execute fake option' do
p = MockOptionParser.new(['-z', '--environment', 'foo', '--application', 'bar', '--version', '2.5'])
Orc::Util::OptionParser::TestOption.setup_command_options(p.options, p.option_parser, p.commands)
p.parse
expect($options).to eql(:environment => 'foo', :application => 'bar', :version => '2.5')
expect(p.commands.size).to eql(1)
command = p.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::TestOption')
expect(command.have_run).to eql(nil)
p.execute
expect(command.have_run).to eql(true)
expect(command.factory.class.name).to eql('Orc::Factory')
end
# FIXME: We need tests for each set of command options
# on the command line - how do you locally override @ARGV
# inside a test to be able to test this?
it 'parses options from argv and passes them to option class constructor' do
parser = MockOptionParser.new(['--environment', 'foo', '--application', 'bar', '-r']).parse
expect($options).to eql(:environment => 'foo', :application => 'bar')
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::ResolveRequest')
expect(command.options).to eql(:environment => 'foo', :application => 'bar')
end
it 'Works with just --status and --environment' do
parser = MockOptionParser.new(['--status', '--environment', 'bar']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::StatusRequest')
end
it 'Works for DeployRequest' do
parser = MockOptionParser.new(['--deploy', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::DeployRequest')
end
it 'Works for PromotionRequest' do
parser = MockOptionParser.new(['-u', '--promote-from', 'baz', '--environment', 'bar', '--application', 'MyApp']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::PromotionRequest')
end
it 'Works for InstallRequest' do
parser = MockOptionParser.new(['--install', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::InstallRequest')
end
it 'Works for LimitedInstallRequest' do
parser = MockOptionParser.new(['--limited-install', '--environment', 'bar', '--application', 'MyApp',
'--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::LimitedInstallRequest')
end
it 'Works for InstallRequest with groups' do
parser = MockOptionParser.new(['--install', '--environment', 'bar', '--application', 'MyApp', '--version', '1',
'--group', 'blue']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.options[:group]).to eql 'blue'
expect(command.class.name).to eql('Orc::Util::OptionParser::InstallRequest')
end
it 'Works for SwapRequest' do
parser = MockOptionParser.new(['--swap', '--environment', 'bar', '--application', 'MyApp']).parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::SwapRequest')
end
it 'Works for DeployRequest' do
parser = MockOptionParser.new(['--deploy', '--environment', 'bar', '--application', 'MyApp', '--version', '1']).
parse
expect(parser.commands.size).to eql(1)
command = parser.commands[0]
expect(command.class.name).to eql('Orc::Util::OptionParser::DeployRequest')
end
it 'will exit 1 when invalid option provided' do
begin
MockOptionParser.new(['--socks', '--environment', 'bar', '--application', 'MyApp', '--group', 'blue']).parse
rescue SystemExit => e
expect(e.status).to eql(1)
end
end
end
|
require "spec_helper"
describe Protocop::Fields::Macros do
describe ".optional" do
before(:all) do
class Request
include Protocop::Message
optional :string, :name, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:name]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "does not require the field" do
expect(field).to_not be_required
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:string) do
message.name = "testing"
end
it "sets the string in the message" do
expect(message.name).to eq("testing")
end
it "returns the set string" do
expect(string).to eq("testing")
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
describe ".repeated" do
context "when the field is not packed" do
before(:all) do
class Request
include Protocop::Message
repeated :string, :names, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:names]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field as repeated" do
expect(field).to be_repeated
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:strings) do
message.names = [ "test", "testing" ]
end
it "sets the strings in the message" do
expect(message.names).to eq([ "test", "testing" ])
end
it "returns the set string" do
expect(strings).to eq([ "test", "testing" ])
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
end
describe ".required" do
context "when providing an embedded message" do
before(:all) do
class Command
include Protocop::Message
required :string, :name, 1
end
class Request
include Protocop::Message
required Command, :command, 2
end
end
after(:all) do
Object.__send__(:remove_const, :Command)
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:command]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Embedded)
end
it "sets the field number" do
expect(field.number).to eq(2)
end
it "sets the field type" do
expect(field.type).to eq(Command)
end
it "sets the field to required" do
expect(field).to be_required
end
end
context "when defining an enum" do
before(:all) do
class Request
include Protocop::Message
module Type
QUERY = 0
COUNT = 1
end
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
context "when providing a default" do
before(:all) do
Request.required(Request::Type, :message_type, 1, default: Request::Type::COUNT)
end
let(:field) do
Request.fields[:message_type]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Enum)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field type" do
expect(field.type).to eq(Request::Type)
end
it "sets the default options" do
expect(field.default).to eq(Request::Type::COUNT)
end
end
context "when not providing a default" do
before(:all) do
Request.required(Request::Type, :mess_type, 1)
end
let(:field) do
Request.fields[:mess_type]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Enum)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field type" do
expect(field.type).to eq(Request::Type)
end
it "does not set a default option" do
expect(field.default).to be_nil
end
end
end
context "when defining a field with a default" do
before(:all) do
class Request
include Protocop::Message
required :string, :name, 1, default: "testing"
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
context "when getting the field value" do
let(:message) do
Request.new
end
it "returns the default" do
expect(message.name).to eq("testing")
end
end
end
context "when defining a string field" do
before(:all) do
class Request
include Protocop::Message
required :string, :name, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:name]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:string) do
message.name = "testing"
end
it "sets the string in the message" do
expect(message.name).to eq("testing")
end
it "returns the set string" do
expect(string).to eq("testing")
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
end
end
Add spec for packed options
require "spec_helper"
describe Protocop::Fields::Macros do
describe ".optional" do
before(:all) do
class Request
include Protocop::Message
optional :string, :name, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:name]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "does not require the field" do
expect(field).to_not be_required
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:string) do
message.name = "testing"
end
it "sets the string in the message" do
expect(message.name).to eq("testing")
end
it "returns the set string" do
expect(string).to eq("testing")
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
describe ".repeated" do
context "when the field is not packed" do
before(:all) do
class Request
include Protocop::Message
repeated :string, :names, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:names]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field as repeated" do
expect(field).to be_repeated
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:strings) do
message.names = [ "test", "testing" ]
end
it "sets the strings in the message" do
expect(message.names).to eq([ "test", "testing" ])
end
it "returns the set string" do
expect(strings).to eq([ "test", "testing" ])
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
context "when the field is packed" do
before(:all) do
class Request
include Protocop::Message
repeated :string, :names, 1, packed: true
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:names]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field as repeated" do
expect(field).to be_repeated
end
it "sets the field as packed" do
expect(field).to be_packed
end
end
end
describe ".required" do
context "when providing an embedded message" do
before(:all) do
class Command
include Protocop::Message
required :string, :name, 1
end
class Request
include Protocop::Message
required Command, :command, 2
end
end
after(:all) do
Object.__send__(:remove_const, :Command)
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:command]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Embedded)
end
it "sets the field number" do
expect(field.number).to eq(2)
end
it "sets the field type" do
expect(field.type).to eq(Command)
end
it "sets the field to required" do
expect(field).to be_required
end
end
context "when defining an enum" do
before(:all) do
class Request
include Protocop::Message
module Type
QUERY = 0
COUNT = 1
end
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
context "when providing a default" do
before(:all) do
Request.required(Request::Type, :message_type, 1, default: Request::Type::COUNT)
end
let(:field) do
Request.fields[:message_type]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Enum)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field type" do
expect(field.type).to eq(Request::Type)
end
it "sets the default options" do
expect(field.default).to eq(Request::Type::COUNT)
end
end
context "when not providing a default" do
before(:all) do
Request.required(Request::Type, :mess_type, 1)
end
let(:field) do
Request.fields[:mess_type]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::Enum)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
it "sets the field type" do
expect(field.type).to eq(Request::Type)
end
it "does not set a default option" do
expect(field.default).to be_nil
end
end
end
context "when defining a field with a default" do
before(:all) do
class Request
include Protocop::Message
required :string, :name, 1, default: "testing"
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
context "when getting the field value" do
let(:message) do
Request.new
end
it "returns the default" do
expect(message.name).to eq("testing")
end
end
end
context "when defining a string field" do
before(:all) do
class Request
include Protocop::Message
required :string, :name, 1
end
end
after(:all) do
Object.__send__(:remove_const, :Request)
end
let(:field) do
Request.fields[:name]
end
it "adds the field to the class" do
expect(field).to be_a(Protocop::Fields::String)
end
it "sets the field number" do
expect(field.number).to eq(1)
end
context "when setting the string via the setter" do
let(:message) do
Request.new
end
let!(:string) do
message.name = "testing"
end
it "sets the string in the message" do
expect(message.name).to eq("testing")
end
it "returns the set string" do
expect(string).to eq("testing")
end
it "provides access to the fields from the instance" do
expect(message.fields).to eq(Request.fields)
end
end
end
end
end
|
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper'))
describe DataMapper::Property, 'Object type' do
before :all do
module ::Blog
class Article
include DataMapper::Resource
property :id, Serial
property :title, String
property :meta, Object, :nullable => false
end
end
@model = Blog::Article
@property = @model.properties[:meta]
end
subject { @property }
it { should respond_to(:typecast) }
describe '#typecast' do
before do
@value = { 'lang' => 'en_CA' }
end
subject { @property.typecast(@value) }
it { should equal(@value) }
end
it { should respond_to(:value) }
describe '#value' do
describe 'with a value' do
subject { @property.value('lang' => 'en_CA') }
it { should == "BAh7BiIJbGFuZyIKZW5fQ0E=\n" }
end
describe 'with nil' do
subject { @property.value(nil) }
it { should be_nil }
end
end
it { should respond_to(:valid?) }
describe '#valid?' do
describe 'with a valid primitive' do
subject { @property.valid?('lang' => 'en_CA') }
it { should be_true }
end
describe 'with nil and property is nullable' do
before do
@property = @model.property(:meta, Object, :nullable => true)
end
subject { @property.valid?(nil) }
it { should be_true }
end
describe 'with nil and property is not nullable' do
subject { @property.valid?(nil) }
it { should be_false }
end
describe 'with nil and property is not nullable, but validity is negated' do
subject { @property.valid?(nil, true) }
it { should be_true }
end
end
describe 'persistable' do
supported_by :all do
before :all do
@do_adapter = defined?(DataMapper::Adapters::DataObjectsAdapter) && @adapter.kind_of?(DataMapper::Adapters::DataObjectsAdapter)
end
before :all do
@resource = @model.create(:title => 'Test', :meta => { 'lang' => 'en_CA' })
end
subject { @resource.reload.meta }
it 'should load the correct value' do
pending_if 'Fix adapters to use different serialization methods', !@do_adapter do
should == { 'lang' => 'en_CA' }
end
end
end
end
end
Marshal behaves differently on 1.8 and 1.9
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'spec_helper'))
describe DataMapper::Property, 'Object type' do
before :all do
module ::Blog
class Article
include DataMapper::Resource
property :id, Serial
property :title, String
property :meta, Object, :nullable => false
end
end
@model = Blog::Article
@property = @model.properties[:meta]
end
subject { @property }
it { should respond_to(:typecast) }
describe '#typecast' do
before do
@value = { 'lang' => 'en_CA' }
end
subject { @property.typecast(@value) }
it { should equal(@value) }
end
it { should respond_to(:value) }
describe '#value' do
describe 'with a value' do
subject { @property.value('lang' => 'en_CA') }
if RUBY_VERSION >= '1.9'
it { should == "BAh7BkkiCWxhbmcGOg1lbmNvZGluZyINVVMtQVNDSUlJIgplbl9DQQY7AEAH\n" }
else
it { should == "BAh7BiIJbGFuZyIKZW5fQ0E=\n" }
end
end
describe 'with nil' do
subject { @property.value(nil) }
it { should be_nil }
end
end
it { should respond_to(:valid?) }
describe '#valid?' do
describe 'with a valid primitive' do
subject { @property.valid?('lang' => 'en_CA') }
it { should be_true }
end
describe 'with nil and property is nullable' do
before do
@property = @model.property(:meta, Object, :nullable => true)
end
subject { @property.valid?(nil) }
it { should be_true }
end
describe 'with nil and property is not nullable' do
subject { @property.valid?(nil) }
it { should be_false }
end
describe 'with nil and property is not nullable, but validity is negated' do
subject { @property.valid?(nil, true) }
it { should be_true }
end
end
describe 'persistable' do
supported_by :all do
before :all do
@do_adapter = defined?(DataMapper::Adapters::DataObjectsAdapter) && @adapter.kind_of?(DataMapper::Adapters::DataObjectsAdapter)
end
before :all do
@resource = @model.create(:title => 'Test', :meta => { 'lang' => 'en_CA' })
end
subject { @resource.reload.meta }
it 'should load the correct value' do
pending_if 'Fix adapters to use different serialization methods', !@do_adapter do
should == { 'lang' => 'en_CA' }
end
end
end
end
end
|
# frozen_string_literal: true
RSpec.describe "double checking sources", :realworld => true do
it "finds already-installed gems" do
create_file("rails.gemspec", <<-RUBY)
Gem::Specification.new do |s|
s.name = "rails"
s.version = "5.1.4"
s.summary = ""
s.description = ""
s.author = ""
s.add_dependency "actionpack", "5.1.4"
end
RUBY
create_file("actionpack.gemspec", <<-RUBY)
Gem::Specification.new do |s|
s.name = "actionpack"
s.version = "5.1.4"
s.summary = ""
s.description = ""
s.author = ""
s.add_dependency "rack", "~> 2.0.0"
end
RUBY
cmd = <<-RUBY
require "bundler"
require #{File.expand_path("../../support/artifice/vcr.rb", __FILE__).dump}
require "bundler/inline"
gemfile(true) do
source "https://rubygems.org"
gem "rails", path: "."
end
RUBY
ruby! cmd
ruby! cmd
end
end
skip old versions for rack dependency problem.
# frozen_string_literal: true
RSpec.describe "double checking sources", :realworld => true do
it "finds already-installed gems" do
create_file("rails.gemspec", <<-RUBY)
Gem::Specification.new do |s|
s.name = "rails"
s.version = "5.1.4"
s.summary = ""
s.description = ""
s.author = ""
s.add_dependency "actionpack", "5.1.4"
end
RUBY
create_file("actionpack.gemspec", <<-RUBY)
Gem::Specification.new do |s|
s.name = "actionpack"
s.version = "5.1.4"
s.summary = ""
s.description = ""
s.author = ""
s.add_dependency "rack", "~> 2.0.0"
end
RUBY
cmd = <<-RUBY
require "bundler"
require #{File.expand_path("../../support/artifice/vcr.rb", __FILE__).dump}
require "bundler/inline"
gemfile(true) do
source "https://rubygems.org"
gem "rails", path: "."
end
RUBY
ruby! cmd
ruby! cmd
end if RUBY_VERSION >= "2.2" # rails 5.x and rack 2.x only supports >= Ruby 2.2.
end
|
RSpec.describe "Snapshots API" do
describe "as a subcollection of VMs" do
describe "GET /api/vms/:c_id/snapshots" do
it "can list the snapshots of a VM" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :read, :get))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
_other_snapshot = FactoryGirl.create(:snapshot)
run_get("#{vms_url(vm.id)}/snapshots")
expected = {
"count" => 2,
"name" => "snapshots",
"subcount" => 1,
"resources" => [
{"href" => a_string_matching("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")}
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not list snapshots unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_get("#{vms_url(vm.id)}/snapshots")
expect(response).to have_http_status(:forbidden)
end
end
end
describe "GET /api/vms/:c_id/snapshots/:s_id" do
it "can show a VM's snapshot" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :read, :get))
vm = FactoryGirl.create(:vm_vmware)
create_time = Time.zone.parse("2017-01-11T00:00:00Z")
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm, :create_time => create_time)
run_get("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expected = {
"create_time" => create_time.iso8601,
"href" => a_string_matching("#{vms_url(vm.id)}/snapshots/#{snapshot.id}"),
"id" => snapshot.id,
"vm_or_template_id" => vm.id
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not show a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_get("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:forbidden)
end
describe "POST /api/vms/:c_id/snapshots" do
it "can queue the creation of a snapshot" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
run_post("#{vms_url(vm.id)}/snapshots", :name => "Alice's snapshot")
expected = {
"results" => [
a_hash_including(
"success" => true,
"message" => "Creating snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice's VM'",
"task_id" => anything,
"task_href" => a_string_matching(tasks_url)
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if snapshotting is not supported" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
vm = FactoryGirl.create(:vm_vmware)
run_post("#{vms_url(vm.id)}/snapshots", :name => "Alice's snapsnot")
expected = {
"results" => [
a_hash_including(
"success" => false,
"message" => "The VM is not connected to a Host"
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if a name is not provided" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
run_post("#{vms_url(vm.id)}/snapshots", :description => "Alice's snapshot")
expected = {
"results" => [
a_hash_including(
"success" => false,
"message" => "Must specify a name for the snapshot"
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not create a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
run_post("#{vms_url(vm.id)}/snapshots", :description => "Alice's snapshot")
expect(response).to have_http_status(:forbidden)
end
end
describe "POST /api/vms/:c_id/snapshots/:s_id with delete action" do
it "can queue a snapshot for deletion" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
snapshot = FactoryGirl.create(:snapshot, :name => "Alice's snapshot", :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expected = {
"message" => "Deleting snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if deleting is not supported" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :post))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expected = {
"success" => false,
"message" => "The VM is not connected to a Host"
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not delete a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expect(response).to have_http_status(:forbidden)
end
end
describe "POST /api/vms/:c_id/snapshots with delete action" do
it "can queue multiple snapshots for deletion" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice and Bob's VM", :host => host, :ext_management_system => ems)
snapshot1 = FactoryGirl.create(:snapshot, :name => "Alice's snapshot", :vm_or_template => vm)
snapshot2 = FactoryGirl.create(:snapshot, :name => "Bob's snapshot", :vm_or_template => vm)
run_post(
"#{vms_url(vm.id)}/snapshots",
:action => "delete",
:resources => [
{:href => "#{vms_url(vm.id)}/snapshots/#{snapshot1.id}"},
{:href => "#{vms_url(vm.id)}/snapshots/#{snapshot2.id}"}
]
)
expected = {
"results" => a_collection_containing_exactly(
a_hash_including(
"message" => "Deleting snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice and Bob's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
),
a_hash_including(
"message" => "Deleting snapshot Bob's snapshot for Vm id:#{vm.id} name:'Alice and Bob's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
)
)
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
end
describe "DELETE /api/vms/:c_id/snapshots/:s_id" do
it "can delete a snapshot" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_delete("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:no_content)
end
it "will not delete a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_delete("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:forbidden)
end
end
end
end
Fix nesting of describes in snapshots spec
RSpec.describe "Snapshots API" do
describe "as a subcollection of VMs" do
describe "GET /api/vms/:c_id/snapshots" do
it "can list the snapshots of a VM" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :read, :get))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
_other_snapshot = FactoryGirl.create(:snapshot)
run_get("#{vms_url(vm.id)}/snapshots")
expected = {
"count" => 2,
"name" => "snapshots",
"subcount" => 1,
"resources" => [
{"href" => a_string_matching("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")}
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not list snapshots unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_get("#{vms_url(vm.id)}/snapshots")
expect(response).to have_http_status(:forbidden)
end
end
describe "GET /api/vms/:c_id/snapshots/:s_id" do
it "can show a VM's snapshot" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :read, :get))
vm = FactoryGirl.create(:vm_vmware)
create_time = Time.zone.parse("2017-01-11T00:00:00Z")
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm, :create_time => create_time)
run_get("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expected = {
"create_time" => create_time.iso8601,
"href" => a_string_matching("#{vms_url(vm.id)}/snapshots/#{snapshot.id}"),
"id" => snapshot.id,
"vm_or_template_id" => vm.id
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not show a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_get("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:forbidden)
end
end
describe "POST /api/vms/:c_id/snapshots" do
it "can queue the creation of a snapshot" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
run_post("#{vms_url(vm.id)}/snapshots", :name => "Alice's snapshot")
expected = {
"results" => [
a_hash_including(
"success" => true,
"message" => "Creating snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice's VM'",
"task_id" => anything,
"task_href" => a_string_matching(tasks_url)
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if snapshotting is not supported" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
vm = FactoryGirl.create(:vm_vmware)
run_post("#{vms_url(vm.id)}/snapshots", :name => "Alice's snapsnot")
expected = {
"results" => [
a_hash_including(
"success" => false,
"message" => "The VM is not connected to a Host"
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if a name is not provided" do
api_basic_authorize(subcollection_action_identifier(:vms, :snapshots, :create))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
run_post("#{vms_url(vm.id)}/snapshots", :description => "Alice's snapshot")
expected = {
"results" => [
a_hash_including(
"success" => false,
"message" => "Must specify a name for the snapshot"
)
]
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not create a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
run_post("#{vms_url(vm.id)}/snapshots", :description => "Alice's snapshot")
expect(response).to have_http_status(:forbidden)
end
end
describe "POST /api/vms/:c_id/snapshots/:s_id with delete action" do
it "can queue a snapshot for deletion" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice's VM", :host => host, :ext_management_system => ems)
snapshot = FactoryGirl.create(:snapshot, :name => "Alice's snapshot", :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expected = {
"message" => "Deleting snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "renders a failed action response if deleting is not supported" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :post))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expected = {
"success" => false,
"message" => "The VM is not connected to a Host"
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
it "will not delete a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_post("#{vms_url(vm.id)}/snapshots/#{snapshot.id}", :action => "delete")
expect(response).to have_http_status(:forbidden)
end
end
describe "POST /api/vms/:c_id/snapshots with delete action" do
it "can queue multiple snapshots for deletion" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
ems = FactoryGirl.create(:ext_management_system)
host = FactoryGirl.create(:host, :ext_management_system => ems)
vm = FactoryGirl.create(:vm_vmware, :name => "Alice and Bob's VM", :host => host, :ext_management_system => ems)
snapshot1 = FactoryGirl.create(:snapshot, :name => "Alice's snapshot", :vm_or_template => vm)
snapshot2 = FactoryGirl.create(:snapshot, :name => "Bob's snapshot", :vm_or_template => vm)
run_post(
"#{vms_url(vm.id)}/snapshots",
:action => "delete",
:resources => [
{:href => "#{vms_url(vm.id)}/snapshots/#{snapshot1.id}"},
{:href => "#{vms_url(vm.id)}/snapshots/#{snapshot2.id}"}
]
)
expected = {
"results" => a_collection_containing_exactly(
a_hash_including(
"message" => "Deleting snapshot Alice's snapshot for Vm id:#{vm.id} name:'Alice and Bob's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
),
a_hash_including(
"message" => "Deleting snapshot Bob's snapshot for Vm id:#{vm.id} name:'Alice and Bob's VM'",
"success" => true,
"task_href" => a_string_matching(tasks_url),
"task_id" => anything
)
)
}
expect(response.parsed_body).to include(expected)
expect(response).to have_http_status(:ok)
end
end
describe "DELETE /api/vms/:c_id/snapshots/:s_id" do
it "can delete a snapshot" do
api_basic_authorize(action_identifier(:snapshots, :delete, :subresource_actions, :delete))
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_delete("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:no_content)
end
it "will not delete a snapshot unless authorized" do
api_basic_authorize
vm = FactoryGirl.create(:vm_vmware)
snapshot = FactoryGirl.create(:snapshot, :vm_or_template => vm)
run_delete("#{vms_url(vm.id)}/snapshots/#{snapshot.id}")
expect(response).to have_http_status(:forbidden)
end
end
end
end
|
require "spec_helper"
describe "User profiles" do
let(:user) { FactoryGirl.create(:stewie_with_profile) }
it "should display Stewie's profile" do
visit user_profile_path(user)
current_path.should == "/users/#{user.id}-#{user.name.parameterize}/profile"
page.should have_content(user.name)
end
context "edit", as: :site_user do
before do
visit edit_user_profile_path(current_user)
end
it "should upload a picture" do
attach_file "Picture", profile_photo_path
click_on "Save"
current_user.profile.picture.should be_true
end
it "should set the website address" do
fill_in "Website", with: "www.example.net"
click_on "Save"
current_user.profile.website.should == "http://www.example.net"
end
it "should set the biography" do
fill_in "About", with: lorem_ipsum
click_on "Save"
current_user.profile.about.should == lorem_ipsum
end
end
context "permissions" do
include_context "signed in as a site user"
it "should let you edit your own profile" do
visit edit_user_profile_path(current_user)
page.should have_content("Edit Profile")
end
it "should prevent you editing someone elses" do
visit edit_user_profile_path(user)
page.should have_content("You are not authorised to access that page.")
end
end
end
Tests for adding users to your groups
require "spec_helper"
describe "User profiles" do
let(:user) { FactoryGirl.create(:stewie_with_profile) }
it "should display Stewie's profile" do
visit user_profile_path(user)
current_path.should == "/users/#{user.id}-#{user.name.parameterize}/profile"
page.should have_content(user.name)
end
context "edit", as: :site_user do
before do
visit edit_user_profile_path(current_user)
end
it "should upload a picture" do
attach_file "Picture", profile_photo_path
click_on "Save"
current_user.profile.picture.should be_true
end
it "should set the website address" do
fill_in "Website", with: "www.example.net"
click_on "Save"
current_user.profile.website.should == "http://www.example.net"
end
it "should set the biography" do
fill_in "About", with: lorem_ipsum
click_on "Save"
current_user.profile.about.should == lorem_ipsum
end
end
context "permissions" do
include_context "signed in as a site user"
it "should let you edit your own profile" do
visit edit_user_profile_path(current_user)
page.should have_content("Edit Profile")
end
it "should prevent you editing someone elses" do
visit edit_user_profile_path(user)
page.should have_content("You are not authorised to access that page.")
end
end
context "adding to group" do
include_context "signed in as a committee member"
let(:user) { FactoryGirl.create(:meg) }
before do
visit user_profile_path(user)
end
it "should let you add the user to your group" do
page.should have_content("Add to group")
select "Member", from: "Membership type"
click_on "Invite member"
page.should have_content("Members of #{current_group.name}")
page.should have_content(user.name)
end
end
end
|
initial spec for /participants
require File.dirname(__FILE__) + '/../spec_helper'
describe "GET /_ruote/participants" do
describe "without any participant" do
it_has_an_engine_with_no_participants
it "should give an empty list (HTML)" do
get "/_ruote/participants"
last_response.should be_ok
end
it "should give an empty array (JSON)" do
get "/_ruote/participants.json"
last_response.should be_ok
body = last_response.json_body
body.should have_key("participants")
body["participants"].should be_empty
end
end
describe "with participant" do
it_has_an_engine
it "should give participant information back (HTML)" do
get "/_ruote/participants"
last_response.should be_ok
end
it "should give participant information back (JSON)" do
get "/_ruote/participants.json"
last_response.should be_ok
body = last_response.json_body
body["participants"].should_not be_empty
end
end
end
describe "GET /_ruote/participants/:name", :type => :with_engine do
describe "without registered participants" do
it_has_an_engine_with_no_participants
it "should 404 correctly (HTML)" do
get "/_ruote/participants/foo"
last_response.should_not be_ok
last_response.status.should be(404)
last_response.should match(/Resource not found/)
end
it "should 404 correctly (JSON)" do
get "/_ruote/participants/foo.json"
last_response.should_not be_ok
last_response.status.should be(404)
last_response.json_body.keys.should include("error")
last_response.json_body['error'].should == { "code" => 404, "message" => "Resource not found" }
end
end
describe "with registered participants" do
it_has_an_engine_with_no_participants
before :each do
@name = 'foo'
RuoteKit.engine.register do
participant @name, Ruote::NoOpParticipant
catchall Ruote::StorageParticipant
end
end
it "should give participant information back (HTML)" do
get "/_ruote/participants/#{@name}"
last_response.should be_ok
end
it "should give process information back (JSON)" do
get "/_ruote/participants/#{@name}.json"
last_response.should be_ok
body = last_response.json_body
body.should have_key('participant')
end
end
end |
require 'spec_helper'
RSpec.describe GamesService do
describe '#get_players_for' do
context 'elo points' do
let(:group) { FactoryGirl.create(:group) }
let(:players) do
[
FactoryGirl.create(:player, group: group, name: 'Richard'),
FactoryGirl.create(:player, group: group, name: 'Irenäus'),
FactoryGirl.create(:player, group: group, name: 'Alexander'),
FactoryGirl.create(:player, group: group, name: 'Christoph'),
FactoryGirl.create(:player, group: group, name: 'Philip'),
]
end
def create_game(pl1, pl2, pl3, pl4, g1, g2)
game = FactoryGirl.create(:game, status: 1)
game.teams << FactoryGirl.create(:team, game: game, players: [players[pl1], players[pl2]], goals: g1)
game.teams << FactoryGirl.create(:team, game: game, players: [players[pl3], players[pl4]], goals: g2)
game
end
it 'example 1' do
game = create_game 0,1,2,3, 5,3
GamesService.finish_game(game)
binding.pry
end
end
end
end
removed binding from spec
require 'spec_helper'
RSpec.describe GamesService do
describe '#get_players_for' do
context 'elo points' do
let(:group) { FactoryGirl.create(:group) }
let(:players) do
[
FactoryGirl.create(:player, group: group, name: 'Richard'),
FactoryGirl.create(:player, group: group, name: 'Irenäus'),
FactoryGirl.create(:player, group: group, name: 'Alexander'),
FactoryGirl.create(:player, group: group, name: 'Christoph'),
FactoryGirl.create(:player, group: group, name: 'Philip'),
]
end
def create_game(pl1, pl2, pl3, pl4, g1, g2)
game = FactoryGirl.create(:game, status: 1)
game.teams << FactoryGirl.create(:team, game: game, players: [players[pl1], players[pl2]], goals: g1)
game.teams << FactoryGirl.create(:team, game: game, players: [players[pl3], players[pl4]], goals: g2)
game
end
it 'example 1' do
game = create_game 0,1,2,3, 5,3
GamesService.finish_game(game)
#binding.pry
end
end
end
end
|
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
require File.join(File.dirname(__FILE__), '../spec_helper')
describe Astute::DeploymentEngine do
include SpecHelpers
class Engine < Astute::DeploymentEngine; end
let(:ctx) { mock_ctx }
let(:deployer) { Engine.new(ctx) }
describe '#attrs_ha' do
def only_controllers(nodes)
nodes.select { |node| node['role'] == 'controller' }
end
it 'should set last_controller' do
attrs = deployer.attrs_ha(Fixtures.ha_nodes, {})
attrs['last_controller'].should == only_controllers(Fixtures.ha_nodes).last['fqdn'].split(/\./)[0]
end
it 'should assign primary-controller role for first node' do
attrs = deployer.attrs_ha(Fixtures.ha_nodes, {})
primary = attrs['nodes'].select { |node| node['role'] == 'primary-controller' }[0]
primary.should_not be_nil
primary['fqdn'].should == only_controllers(Fixtures.ha_nodes).first['fqdn']
end
end
end
Fix review comments
* rename spec
* use find instead of select
* add spec for case if primary-controller already assigned
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
require File.join(File.dirname(__FILE__), '../spec_helper')
describe Astute::DeploymentEngine do
include SpecHelpers
class Engine < Astute::DeploymentEngine; end
let(:ctx) { mock_ctx }
let(:deployer) { Engine.new(ctx) }
describe '#attrs_ha' do
def only_controllers(nodes)
nodes.select { |node| node['role'] == 'controller' }
end
it 'should set last_controller' do
attrs = deployer.attrs_ha(Fixtures.ha_nodes, {})
attrs['last_controller'].should == only_controllers(Fixtures.ha_nodes).last['fqdn'].split(/\./)[0]
end
it 'should assign primary-controller role for first node if primary-controller not set directly' do
attrs = deployer.attrs_ha(Fixtures.ha_nodes, {})
primary = attrs['nodes'].find { |node| node['role'] == 'primary-controller' }
primary.should_not be_nil
primary['fqdn'].should == only_controllers(Fixtures.ha_nodes).first['fqdn']
end
it 'should not assign primary-controller role for first node if primary-controller set directly' do
nodes = Fixtures.ha_nodes
last_node = only_controllers(nodes).last
last_node['role'] = 'primary-controller'
attrs = deployer.attrs_ha(deep_copy(nodes), {})
primary = attrs['nodes'].select { |node| node['role'] == 'primary-controller' }
primary.length.should == 1
primary[0]['fqdn'].should == last_node['fqdn']
end
end
end
|
# encoding: utf-8
require 'spec_helper'
describe 'apache-hardening::hardening' do
let(:chef_run) { ChefSpec::SoloRunner.converge(described_recipe) }
before do
stub_command('/usr/sbin/apache2 -t')
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'")
end
it 'creates hardening.conf with correct permissions' do
expect(chef_run).to create_template(File.join(chef_run.node['apache']['dir'], '/conf-enabled/', 'hardening.conf')).with(
user: 'root',
group: chef_run.node['apache']['root_group'],
mode: '0640'
)
end
# Since we cannot test the definition apache_module itself, we have to test for a side effect to happen
# We check for the not present module file in mods_enabled
it 'disables specified apache modules' do
chef_run.node['apache_hardening']['modules_to_disable'].each do |module_to_disable|
expect(chef_run).to_not create_file_if_missing("#{chef_run.node['apache']['dir']}/mods-enabled/#{module_to_disable}.load")
end
end
it 'checks that the correct alias.conf.erb template is being used' do
chef_run.run_context.resource_collection.each do |resource|
next unless resource.name == "#{chef_run.node['apache']['dir']}/mods-available/alias.conf"
expect(resource.cookbook).to eq('apache-hardening')
end
end
it 'changes all the already created resources' do
it 'makes sure that it does not flap on o-rw' do
chef_run.run_context.resource_collection.each do |resource|
next unless resource.respond_to? :mode
expect(resource.mode).to eq('0640') if resource.name == 'apache2.conf'
expect(resource.mode).to eq('0640') if resource.name =~ /#{chef_run.node['apache']['dir']}/ && resource.mode == '0755'
expect(resource.mode).to eq('0750') if resource.name =~ /#{chef_run.node['apache']['dir']}/ && resource.mode == '0644'
end
end
it 'executes "remove world readable files"' do
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'").and_return(false)
expect(chef_run).to run_execute('remove world readable files')
end
it 'does not execute "remove world readable files"' do
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'").and_return(true)
expect(chef_run).to_not run_execute('remove world readable files')
end
end
be more verbose on what to test
TelekomLabs-DCO-1.1-Signed-off-by: Edmund Haselwanter <me@ehaselwanter.com> (github: ehaselwanter)
# encoding: utf-8
require 'spec_helper'
describe 'apache-hardening::hardening' do
let(:chef_run) { ChefSpec::SoloRunner.converge(described_recipe) }
before do
stub_command('/usr/sbin/apache2 -t')
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'")
end
it 'creates hardening.conf with correct permissions' do
expect(chef_run).to create_template(File.join(chef_run.node['apache']['dir'], '/conf-enabled/', 'hardening.conf')).with(
user: 'root',
group: chef_run.node['apache']['root_group'],
mode: '0640'
)
end
# Since we cannot test the definition apache_module itself, we have to test for a side effect to happen
# We check for the not present module file in mods_enabled
it 'disables specified apache modules' do
chef_run.node['apache_hardening']['modules_to_disable'].each do |module_to_disable|
expect(chef_run).to_not create_file_if_missing("#{chef_run.node['apache']['dir']}/mods-enabled/#{module_to_disable}.load")
end
end
it 'checks that the correct alias.conf.erb template is being used' do
chef_run.run_context.resource_collection.each do |resource|
next unless resource.name == "#{chef_run.node['apache']['dir']}/mods-available/alias.conf"
expect(resource.cookbook).to eq('apache-hardening')
end
end
it 'makes sure that it does not flap on o-rw' do
chef_run.run_context.resource_collection.each do |resource|
next unless resource.respond_to? :mode
expect(resource.mode).to eq('0640') if resource.name == 'apache2.conf'
expect(resource.mode).to eq('0640') if resource.name =~ /#{chef_run.node['apache']['dir']}/ && resource.mode == '0755'
expect(resource.mode).to eq('0750') if resource.name =~ /#{chef_run.node['apache']['dir']}/ && resource.mode == '0644'
end
end
it 'executes "remove world readable files"' do
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'").and_return(false)
expect(chef_run).to run_execute('remove world readable files')
end
it 'does not execute "remove world readable files"' do
stub_command("find /etc/apache2 -perm -o+r -type f -o -perm -o+w -type f | wc -l | egrep '^0$'").and_return(true)
expect(chef_run).to_not run_execute('remove world readable files')
end
end
|
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
require 'spec_helper'
require 'aeolus_image'
require 'pp'
describe Api::ImagesController do
render_views
before(:each) do
@admin_permission = FactoryGirl.create :admin_permission
@admin = @admin_permission.user
mock_warden(@admin)
end
context "XML format responses for " do
before do
send_and_accept_xml
end
describe "#index" do
before do
@os = mock(:OS, :name => 'fedora', :version => '15', :arch => 'x86_64')
@image = mock(Aeolus::Image::Warehouse::Image,
:id => '5',
:os => @os,
:name => 'test',
:description => 'test image')
Aeolus::Image::Warehouse::Image.stub(:all).and_return([@image])
Aeolus::Image::Warehouse::ImageBuild.stub(:find_all_by_image_uuid).and_return([])
get :index
end
it { response.should be_success }
it { response.headers['Content-Type'].should include("application/xml") }
it {
resp = Hash.from_xml(response.body)
resp['images']['image']['name'].should == @image.name
resp['images']['image']['id'].should == @image.id
resp['images']['image']['os'].should == @image.os.name
resp['images']['image']['arch'].should == @image.os.arch
resp['images']['image']['os_version'].should == @image.os.version
resp['images']['image']['description'].should == @image.description
}
end
describe "#show" do
before do
@os = mock(:OS, :name => 'fedora', :version => '15', :arch => 'x86_64')
@image = mock(Aeolus::Image::Warehouse::Image,
:id => '5',
:os => @os,
:name => 'test',
:description => 'test image')
Aeolus::Image::Warehouse::Image.stub(:find).and_return(@image)
Aeolus::Image::Warehouse::ImageBuild.stub(:find_all_by_image_id).and_return([])
get :show, :id => '5'
end
it { response.should be_success}
it { response.headers['Content-Type'].should include("application/xml") }
it {
resp = Hash.from_xml(response.body)
resp['image']['name'].should == @image.name
resp['image']['id'].should == @image.id
resp['image']['os'].should == @image.os.name
resp['image']['arch'].should == @image.os.arch
resp['image']['os_version'].should == @image.os.version
resp['image']['description'].should == @image.description
}
end
end
end
Fix 2 example failures in api image#show test
#
# Copyright (C) 2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
require 'spec_helper'
require 'aeolus_image'
require 'pp'
describe Api::ImagesController do
render_views
before(:each) do
@admin_permission = FactoryGirl.create :admin_permission
@admin = @admin_permission.user
mock_warden(@admin)
end
context "XML format responses for " do
before do
send_and_accept_xml
end
describe "#index" do
before do
@os = mock(:OS, :name => 'fedora', :version => '15', :arch => 'x86_64')
@image = mock(Aeolus::Image::Warehouse::Image,
:id => '5',
:os => @os,
:name => 'test',
:description => 'test image')
Aeolus::Image::Warehouse::Image.stub(:all).and_return([@image])
Aeolus::Image::Warehouse::ImageBuild.stub(:find_all_by_image_uuid).and_return([])
get :index
end
it { response.should be_success }
it { response.headers['Content-Type'].should include("application/xml") }
it {
resp = Hash.from_xml(response.body)
resp['images']['image']['name'].should == @image.name
resp['images']['image']['id'].should == @image.id
resp['images']['image']['os'].should == @image.os.name
resp['images']['image']['arch'].should == @image.os.arch
resp['images']['image']['os_version'].should == @image.os.version
resp['images']['image']['description'].should == @image.description
}
end
describe "#show" do
before do
@os = mock(:OS, :name => 'fedora', :version => '15', :arch => 'x86_64')
@image = mock(Aeolus::Image::Warehouse::Image,
:id => '5',
:os => @os,
:name => 'test',
:description => 'test image')
Aeolus::Image::Warehouse::Image.stub(:find).and_return(@image)
Aeolus::Image::Warehouse::ImageBuild.stub(:find_all_by_image_uuid).and_return([])
get :show, :id => '5'
end
it { response.should be_success}
it { response.headers['Content-Type'].should include("application/xml") }
it {
resp = Hash.from_xml(response.body)
resp['image']['name'].should == @image.name
resp['image']['id'].should == @image.id
resp['image']['os'].should == @image.os.name
resp['image']['arch'].should == @image.os.arch
resp['image']['os_version'].should == @image.os.version
resp['image']['description'].should == @image.description
}
end
end
end
|
Import students from csv file
Ed Birmingham provided a csv with the students for the 2016-2017
school year. A script was written to load those students into the
network application.
require 'csv'
require 'set'
if Rails.env.production?
cohorts = {
"Educate Local" => 14,
"GUB" => 3,
"AOE" => 6,
"AOHS" => 7,
"AOAC" => 4,
"AOUE" => 10,
"AOHT" => 11,
"AOBF" => 2
}
graduating_classes = {
"2022" => 7,
"2021" => 6,
"2020" => 5,
"2019" => 3,
"2018" => 2,
"2017" => 1
}
schools = {
"Hudson K-8" => 12,
"Huffman MS" => 8,
"Ossie Ware MS" => 13,
"Phillips Academy" => 15,
"Wilkerson MS" => 10,
"WJ Christan K-8" => 14,
"Wylam K-8" => 9,
"WJ Christian K-8" => 14,
"Carver HS" => 6,
"Huffman HS" => 7,
"Jackson-Olin HS" => 4,
"Parker HS" => 5,
"Ramsay HS" => 1,
"Wenonah HS" => 3,
"Woodlawn HS" => 2
}
else
cohorts = {
"Educate Local" => 4,
"GUB" => 1,
"AOE" => 4,
"AOHS" => 2,
"AOAC" => 4,
"AOUE" => 3,
"AOHT" => 4,
"AOBF" => 4
}
graduating_classes = {
"2022" => 7,
"2021" => 6,
"2020" => 5,
"2019" => 4,
"2018" => 3,
"2017" => 2
}
schools = {
"Hudson K-8" => 3,
"Huffman MS" => 2,
"Ossie Ware MS" => 2,
"Phillips Academy" => 2,
"Wilkerson MS" => 2,
"WJ Christan K-8" => 2,
"Wylam K-8" => 2,
"WJ Christian K-8" => 2,
"Carver HS" => 1,
"Huffman HS" => 2,
"Jackson-Olin HS" => 2,
"Parker HS" => 2,
"Ramsay HS" => 4,
"Wenonah HS" => 2,
"Woodlawn HS" => 2
}
end
values = Set.new
file_names = %w{ students_7th students_8th students_9th students_10th students_11th students_12th }
file_names.each do |file_name|
index = 1
CSV.foreach("tmp/#{file_name}.csv", headers: true) do |row|
if row["School"].present?
index += 1
Member.create!(
first_name: row["First Name"],
last_name: row["Last Name"],
school_id: schools[row["School"]],
cohort_ids: [cohorts[row["Cohort"]]],
graduating_class_id: graduating_classes[row["Graduating Class"].split('-').last],
mongo_id: "csv.#{file_name}.#{index}"
)
# values.add row["School"]
# puts row["School"] if schools[row["School"]].blank?
# values.add row["Graduating Class"].split('-').last
# puts row["Graduating Class"].split('-').last if graduating_classes[row["Graduating Class"].split('-').last].blank?
# values.add row["Cohort"]
# puts row["Cohort"] if cohorts[row["Cohort"]].blank?
# puts row["First Name"]
# puts row["Last Name"]
# puts "csv.#{file_name}.#{index}"
end
end
end |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'delayed/backend/version'
Gem::Specification.new do |s|
s.require_paths = ["lib"]
s.name = "delayed_job_sqs"
s.version = Delayed::Backend::Sqs.version
s.authors = ["Eric Hankinson", "Matthew Szenher"]
s.email = ["eric.hankinson@gmail.com", "mszenher@mdsol.com"]
s.description = "Amazon SQS backend for delayed_job"
s.summary = "Amazon SQS backend for delayed_job"
s.homepage = "https://github.com/kumichou/delayed_job_sqs"
s.license = "MIT"
s.files = `git ls-files`.split($/)
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.add_dependency('aws-sdk', '>= 1.11.1')
s.add_dependency('delayed_job', '>= 3.0.0')
s.add_development_dependency('rspec', '~> 2.7.0')
s.add_development_dependency('debugger', '1.6.0')
s.add_development_dependency('simplecov', '0.7.1')
end
use fake_sqs in testing
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'delayed/backend/version'
Gem::Specification.new do |s|
s.require_paths = ["lib"]
s.name = "delayed_job_sqs"
s.version = Delayed::Backend::Sqs.version
s.authors = ["Eric Hankinson", "Matthew Szenher"]
s.email = ["eric.hankinson@gmail.com", "mszenher@mdsol.com"]
s.description = "Amazon SQS backend for delayed_job"
s.summary = "Amazon SQS backend for delayed_job"
s.homepage = "https://github.com/kumichou/delayed_job_sqs"
s.license = "MIT"
s.files = `git ls-files`.split($/)
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.add_dependency('aws-sdk', '>= 1.11.1')
s.add_dependency('delayed_job', '>= 3.0.0')
s.add_development_dependency('rspec', '~> 2.7.0')
s.add_development_dependency('debugger', '1.6.0')
s.add_development_dependency('simplecov', '0.7.1')
s.add_development_dependency('fake_sqs', '0.1.0')
end
|
Gem::Specification.new do |gem|
gem.name = "delayed_job_web"
gem.version = "1.4.2"
gem.author = "Erick Schmitt"
gem.email = "ejschmitt@gmail.com"
gem.homepage = "https://github.com/ejschmitt/delayed_job_web"
gem.summary = "Web interface for delayed_job inspired by resque"
gem.description = gem.summary
gem.license = "MIT"
gem.executables = ["delayed_job_web"]
gem.files = [
"Gemfile",
"LICENSE.txt",
"README.markdown",
"Rakefile",
"delayed_job_web.gemspec"
] + %x{ git ls-files }.split("\n").select { |d| d =~ %r{^(lib|test|bin)} }
gem.extra_rdoc_files = [
"LICENSE.txt",
"README.markdown"
]
gem.add_runtime_dependency "sinatra", [">= 1.4.4"]
gem.add_runtime_dependency "activerecord", ["> 3.0.0"]
gem.add_runtime_dependency "delayed_job", ["> 2.0.3"]
gem.add_development_dependency "minitest", ["~> 4.2"]
gem.add_development_dependency "rack-test", ["~> 0.6"]
gem.add_development_dependency "rails", ["~> 4.0"]
end
Explicitly require latest 'rack-protection' gem to address CVE-2018-7212
Gem::Specification.new do |gem|
gem.name = "delayed_job_web"
gem.version = "1.4.2"
gem.author = "Erick Schmitt"
gem.email = "ejschmitt@gmail.com"
gem.homepage = "https://github.com/ejschmitt/delayed_job_web"
gem.summary = "Web interface for delayed_job inspired by resque"
gem.description = gem.summary
gem.license = "MIT"
gem.executables = ["delayed_job_web"]
gem.files = [
"Gemfile",
"LICENSE.txt",
"README.markdown",
"Rakefile",
"delayed_job_web.gemspec"
] + %x{ git ls-files }.split("\n").select { |d| d =~ %r{^(lib|test|bin)} }
gem.extra_rdoc_files = [
"LICENSE.txt",
"README.markdown"
]
gem.add_runtime_dependency "sinatra", [">= 1.4.4"]
gem.add_runtime_dependency "rack-protection", ["~> 1.5"]
gem.add_runtime_dependency "activerecord", ["> 3.0.0"]
gem.add_runtime_dependency "delayed_job", ["> 2.0.3"]
gem.add_development_dependency "minitest", ["~> 4.2"]
gem.add_development_dependency "rack-test", ["~> 0.6"]
gem.add_development_dependency "rails", ["~> 4.0"]
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "delete_paranoid/version"
Gem::Specification.new do |s|
s.name = "delete_paranoid"
s.version = DeleteParanoid::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Ryan Sonnek"]
s.email = ["ryan@codecrate.com"]
s.homepage = "http://github.com/wireframe/delete_paranoid"
s.summary = %q{soft delete Rails ActiveRecord objects}
s.description = %q{flag database records as deleted and hide them from subsequent queries}
s.rubyforge_project = "delete_paranoid"
%w[activerecord].each do |lib|
dep = case ENV[lib]
when 'stable', nil then nil
when /(\d+\.)+\d+/ then "~> " + ENV[lib].sub("#{lib}-", '')
else ">= 3.0"
end
puts "#{lib} #{dep}"
s.add_runtime_dependency(lib, dep)
end
s.add_runtime_dependency(%q<activerecord>, [">= 3.0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<mocha>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<sqlite3-ruby>, ["~> 1.3.2"])
s.add_development_dependency(%q<timecop>, [">= 0"])
s.add_development_dependency(%q<rake>, [">= 0.9.2.2"])
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
Was accidently always including >= 3.0 activerecord.
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "delete_paranoid/version"
Gem::Specification.new do |s|
s.name = "delete_paranoid"
s.version = DeleteParanoid::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Ryan Sonnek"]
s.email = ["ryan@codecrate.com"]
s.homepage = "http://github.com/wireframe/delete_paranoid"
s.summary = %q{soft delete Rails ActiveRecord objects}
s.description = %q{flag database records as deleted and hide them from subsequent queries}
s.rubyforge_project = "delete_paranoid"
%w[activerecord].each do |lib|
dep = case ENV[lib]
when 'stable', nil then nil
when /(\d+\.)+\d+/ then "~> " + ENV[lib].sub("#{lib}-", '')
else ">= 3.0"
end
puts "#{lib} #{dep}"
s.add_runtime_dependency(lib, dep)
end
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<mocha>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 0"])
s.add_development_dependency(%q<sqlite3-ruby>, ["~> 1.3.2"])
s.add_development_dependency(%q<timecop>, [">= 0"])
s.add_development_dependency(%q<rake>, [">= 0.9.2.2"])
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
|
require 'nokogiri'
require 'csv'
require 'json'
require 'ruby-progressbar'
def normalizeDate(input)
return input.first.slice(/(\d*)(\D|$)/,1).to_i
end
def cleanTopic(input)
topics = input.split("\\")
case topics.count
when 1
return topics[0]
when 2...10
return "#{topics[0]} - #{topics[1]}"
else
end
end
INPUT = "output.json"
OUTPUT_ONE = "1800-1849.csv"
OUTPUT_TWO = "1850-1899.csv"
OUTPUT_THREE = "1900-1935.csv"
OUTPUT_FOUR = "1936-1965.csv"
OUTPUT_FIVE = "1965-2011.csv"
raw_data = JSON.parse(File.read(INPUT))
list_header = ["source","target","label","type","date"]
first_pass = Array.new
# Create full edge list
raw_data.each do |id, data|
unless data["Date"].nil?
date = normalizeDate(data["Date"])
end
label = "#{data["Title"]} - #{id}"
unless data["Topic"].nil?
data["Topic"].combination(2).each do |edge|
first_pass << [edge[0],edge[1],label,"Undirected",date]
end
end
end
# Initialize CSV files
one = CSV.open(OUTPUT_ONE,"w")
one << list_header
two = CSV.open(OUTPUT_TWO,"w")
two << list_header
three = CSV.open(OUTPUT_THREE,"w")
three << list_header
four = CSV.open(OUTPUT_FOUR,"w")
four << list_header
five = CSV.open(OUTPUT_FIVE,"w")
five << list_header
# Read out files
first_pass.each do |entry|
case entry[4]
when 1800...1849
one << entry
when 1850...1899
two << entry
when 1900...1935
three << entry
when 1936...1965
four << entry
when 1966...2011
five << entry
else
end
end
remove ruby-progressbar requirement
require 'nokogiri'
require 'csv'
require 'json'
def normalizeDate(input)
return input.first.slice(/(\d*)(\D|$)/,1).to_i
end
def cleanTopic(input)
topics = input.split("\\")
case topics.count
when 1
return topics[0]
when 2...10
return "#{topics[0]} - #{topics[1]}"
else
end
end
INPUT = "output.json"
OUTPUT_ONE = "1800-1849.csv"
OUTPUT_TWO = "1850-1899.csv"
OUTPUT_THREE = "1900-1935.csv"
OUTPUT_FOUR = "1936-1965.csv"
OUTPUT_FIVE = "1965-2011.csv"
raw_data = JSON.parse(File.read(INPUT))
list_header = ["source","target","label","type","date"]
first_pass = Array.new
# Create full edge list
raw_data.each do |id, data|
unless data["Date"].nil?
date = normalizeDate(data["Date"])
end
label = "#{data["Title"]} - #{id}"
unless data["Topic"].nil?
data["Topic"].combination(2).each do |edge|
first_pass << [edge[0],edge[1],label,"Undirected",date]
end
end
end
# Initialize CSV files
one = CSV.open(OUTPUT_ONE,"w")
one << list_header
two = CSV.open(OUTPUT_TWO,"w")
two << list_header
three = CSV.open(OUTPUT_THREE,"w")
three << list_header
four = CSV.open(OUTPUT_FOUR,"w")
four << list_header
five = CSV.open(OUTPUT_FIVE,"w")
five << list_header
# Read out files
first_pass.each do |entry|
case entry[4]
when 1800...1849
one << entry
when 1850...1899
two << entry
when 1900...1935
three << entry
when 1936...1965
four << entry
when 1966...2011
five << entry
else
end
end
|
# frozen_string_literal: true
# Lens for date ranges e.g. for reports.
class DateRangeLens < Lens::SelectLens
param_name :dates
i18n_key "date_range_lens"
attr_accessor :pairs, :range_builder
def initialize(*args)
super(*args)
self.range_builder = DateRangeBuilder.new(max_range: [options[:min_date], Time.zone.today])
end
def range
value.present? && Range.new(*value.split("-").map { |s| Date.parse(s) })
end
private
# Past 12 months (default, not in options array)
# This year
# 2017
# 2016
# 2015
# 2014
# This quarter
# 2018 Q1
# 2017 Q4
# 2017 Q3
# 2017 Q2
# All time
def possible_options
range_builder.add_years
range_builder.add_quarters(5)
range_builder.add_all_time
[:past_12].concat(range_builder.pairs)
end
end
Fix meal report lens bug
# frozen_string_literal: true
# Lens for date ranges e.g. for reports.
class DateRangeLens < Lens::SelectLens
param_name :dates
i18n_key "date_range_lens"
attr_accessor :pairs, :range_builder
def initialize(options:, **args)
self.range_builder = DateRangeBuilder.new(max_range: [options[:min_date], Time.zone.today])
super
end
def range
value.present? && Range.new(*value.split("-").map { |s| Date.parse(s) })
end
private
# Past 12 months (default, not in options array)
# This year
# 2017
# 2016
# 2015
# 2014
# This quarter
# 2018 Q1
# 2017 Q4
# 2017 Q3
# 2017 Q2
# All time
def possible_options
range_builder.add_years
range_builder.add_quarters(5)
range_builder.add_all_time
[:past_12].concat(range_builder.pairs)
end
end
|
class IncomeCalculation
def initialize(application, income = nil)
@application = application
@income = income
set_outcome(nil, nil)
end
def calculate
if calculation_inputs_present?
if income
calculate_using_amount
elsif thresholds_used?
calculate_using_thresholds
end
return_outcome_and_amount_and_thresholds
end
end
private
def calculation_inputs_present?
[
children,
@application.detail.fee,
!@application.applicant.married.nil?,
income || thresholds_used?
].all?
end
def calculate_using_amount
if income > thresholds.max_threshold
set_outcome('none', @application.detail.fee)
elsif applicants_maximum_contribution.zero?
set_outcome('full', 0)
elsif applicants_contribution_is_partial
set_outcome('part', minimum_payable_to_applicant.to_i)
elsif minimum_payable_to_applicant == @application.detail.fee
set_outcome('none', @application.detail.fee)
end
end
def calculate_using_thresholds
if max_threshold_exceeded
set_outcome('none', @application.detail.fee)
elsif min_threshold_exceeded == false
set_outcome('full', 0)
end
end
def set_outcome(outcome, amount)
@outcome = outcome
@amount = amount
end
def thresholds_used?
!min_threshold_exceeded.nil? || !max_threshold_exceeded.nil?
end
def children
@application.children || 0
end
def return_outcome_and_amount_and_thresholds
{
outcome: @outcome,
amount_to_pay: @amount,
min_threshold: thresholds.min_threshold,
max_threshold: thresholds.max_threshold
}
end
def income
@income ||= @application.income
end
def min_threshold_exceeded
@application.income_min_threshold_exceeded
end
def max_threshold_exceeded
@application.income_max_threshold_exceeded
end
def applicants_maximum_contribution
round_down_to_nearest_10((income - thresholds.min_threshold)) * 0.5
end
def thresholds
@thresholds ||= IncomeThresholds.new(@application.applicant.married?, children)
end
def applicants_contribution_is_partial
applicants_maximum_contribution.positive? &&
applicants_maximum_contribution < @application.detail.fee
end
def minimum_payable_to_applicant
[applicants_maximum_contribution, @application.detail.fee].min
end
def round_down_to_nearest_10(amount)
return 0 if amount < 0
rounded = amount.round(-1)
return rounded if rounded <= amount
rounded - 10
end
end
Refactoring.
class IncomeCalculation
def initialize(application, income = nil)
@application = application
@income = income
set_outcome(nil, nil)
end
def calculate
if calculation_inputs_present?
if income
calculate_using_amount
elsif thresholds_used?
calculate_using_thresholds
end
return_outcome_and_amount_and_thresholds
end
end
private
def calculation_inputs_present?
[
children,
@application.detail.fee,
!@application.applicant.married.nil?,
income || thresholds_used?
].all?
end
def calculate_using_amount
if can_applicant_pay_full_fee?
set_outcome('none', @application.detail.fee)
elsif applicants_maximum_contribution.zero?
set_outcome('full', 0)
elsif applicants_contribution_is_partial
set_outcome('part', minimum_payable_to_applicant.to_i)
end
end
def can_applicant_pay_full_fee?
income > thresholds.max_threshold ||
minimum_payable_to_applicant == @application.detail.fee
end
def calculate_using_thresholds
if max_threshold_exceeded
set_outcome('none', @application.detail.fee)
elsif min_threshold_exceeded == false
set_outcome('full', 0)
end
end
def set_outcome(outcome, amount)
@outcome = outcome
@amount = amount
end
def thresholds_used?
!min_threshold_exceeded.nil? || !max_threshold_exceeded.nil?
end
def children
@application.children || 0
end
def return_outcome_and_amount_and_thresholds
{
outcome: @outcome,
amount_to_pay: @amount,
min_threshold: thresholds.min_threshold,
max_threshold: thresholds.max_threshold
}
end
def income
@income ||= @application.income
end
def min_threshold_exceeded
@application.income_min_threshold_exceeded
end
def max_threshold_exceeded
@application.income_max_threshold_exceeded
end
def applicants_maximum_contribution
round_down_to_nearest_10((income - thresholds.min_threshold)) * 0.5
end
def thresholds
@thresholds ||= IncomeThresholds.new(@application.applicant.married?, children)
end
def applicants_contribution_is_partial
applicants_maximum_contribution.positive? &&
applicants_maximum_contribution < @application.detail.fee
end
def minimum_payable_to_applicant
[applicants_maximum_contribution, @application.detail.fee].min
end
def round_down_to_nearest_10(amount)
return 0 if amount.negative?
rounded = amount.round(-1)
return rounded if rounded <= amount
rounded - 10
end
end
|
# frozen_string_literal: true
require "digest"
class AnonymousBlock < ApplicationRecord
belongs_to :user
belongs_to :question, optional: true
def self.get_identifier(ip)
Digest::SHA2.new(512).hexdigest(Rails.application.secret_key_base + ip)
end
end
Allow anonymous blocks without an owner
# frozen_string_literal: true
require "digest"
class AnonymousBlock < ApplicationRecord
belongs_to :user, optional: true
belongs_to :question, optional: true
def self.get_identifier(ip)
Digest::SHA2.new(512).hexdigest(Rails.application.secret_key_base + ip)
end
end
|
require 'active_support/time_with_zone'
class AssignmentForm
attr_accessor :assignment, :assignment_questionnaires, :due_dates, :tag_prompt_deployments
attr_accessor :errors
DEFAULT_MAX_TEAM_SIZE = 1
def initialize(args = {})
@assignment = Assignment.new(args[:assignment])
if args[:assignment].nil?
@assignment.course = Course.find(args[:parent_id]) if args[:parent_id]
@assignment.instructor = @assignment.course.instructor if @assignment.course
@assignment.max_team_size = DEFAULT_MAX_TEAM_SIZE
end
@assignment.num_review_of_reviews = @assignment.num_metareviews_allowed
@assignment_questionnaires = Array(args[:assignment_questionnaires])
@due_dates = Array(args[:due_dates])
end
# create a form object for this assignment_id
def self.create_form_object(assignment_id)
assignment_form = AssignmentForm.new
assignment_form.assignment = Assignment.find(assignment_id)
assignment_form.assignment_questionnaires = AssignmentQuestionnaire.where(assignment_id: assignment_id)
assignment_form.due_dates = AssignmentDueDate.where(parent_id: assignment_id)
assignment_form.set_up_assignment_review
assignment_form.tag_prompt_deployments = TagPromptDeployment.where(assignment_id: assignment_id)
assignment_form
end
def update(attributes, user)
@has_errors = false
has_late_policy = false
if attributes[:assignment][:late_policy_id].to_i > 0
has_late_policy = true
else
attributes[:assignment][:late_policy_id] = nil
end
update_assignment(attributes[:assignment])
update_assignment_questionnaires(attributes[:assignment_questionnaire]) unless @has_errors
update_due_dates(attributes[:due_date], user) unless @has_errors
update_assigned_badges(attributes[:badge], attributes[:assignment]) unless @has_errors
set_badge_threshold_for_assignment(attributes[:assignment][:id], attributes[:badge]) if @assignment.has_badge?
add_simicheck_to_delayed_queue(attributes[:assignment][:simicheck])
# delete the old queued items and recreate new ones if the assignment has late policy.
if attributes[:due_date] and !@has_errors and has_late_policy
delete_from_delayed_queue
add_to_delayed_queue
end
update_tag_prompt_deployments(attributes[:tag_prompt_deployments])
!@has_errors
end
alias update_attributes update
# Code to update values of assignment
def update_assignment(attributes)
unless @assignment.update_attributes(attributes)
@errors = @assignment.errors.to_s
@has_errors = true
end
@assignment.num_review_of_reviews = @assignment.num_metareviews_allowed
@assignment.num_reviews = @assignment.num_reviews_allowed
end
# code to save assignment questionnaires
def update_assignment_questionnaires(attributes)
return false unless attributes
existing_aqs = AssignmentQuestionnaire.where(assignment_id: @assignment.id)
existing_aqs.each(&:delete)
attributes.each do |assignment_questionnaire|
if assignment_questionnaire[:id].nil? or assignment_questionnaire[:id].blank?
aq = AssignmentQuestionnaire.new(assignment_questionnaire)
unless aq.save
@errors = @assignment.errors.to_s
@has_errors = true
end
else
aq = AssignmentQuestionnaire.find(assignment_questionnaire[:id])
unless aq.update_attributes(assignment_questionnaire)
@errors = @assignment.errors.to_s
@has_errors = true
end
end
end
end
# s required by answer tagging
def update_tag_prompt_deployments(attributes)
unless attributes.nil?
attributes.each do |key, value|
TagPromptDeployment.where(id: value['deleted']).delete_all if value.key?('deleted')
# assume if tag_prompt is there, then id, question_type, answer_length_threshold must also be there since the inputs are coupled
next unless value.key?('tag_prompt')
for i in 0..value['tag_prompt'].count - 1
tag_dep = nil
if !(value['id'][i] == "undefined" or value['id'][i] == "null" or value['id'][i].nil?)
tag_dep = TagPromptDeployment.find(value['id'][i])
if tag_dep
tag_dep.update(assignment_id: @assignment.id,
questionnaire_id: key,
tag_prompt_id: value['tag_prompt'][i],
question_type: value['question_type'][i],
answer_length_threshold: value['answer_length_threshold'][i])
end
else
tag_dep = TagPromptDeployment.new(assignment_id: @assignment.id,
questionnaire_id: key,
tag_prompt_id: value['tag_prompt'][i],
question_type: value['question_type'][i],
answer_length_threshold: value['answer_length_threshold'][i]).save
end
end
end
end
end
# end required by answer tagging
# code to save due dates
def update_due_dates(attributes, user)
return false unless attributes
attributes.each do |due_date|
next if due_date[:due_at].blank?
# parse the dd and convert it to utc before saving it to db
# eg. 2015-06-22 12:05:00 -0400
current_local_time = Time.parse(due_date[:due_at][0..15])
tz = ActiveSupport::TimeZone[user.timezonepref].tzinfo
utc_time = tz.local_to_utc(Time.local(current_local_time.year,
current_local_time.month,
current_local_time.day,
current_local_time.strftime('%H').to_i,
current_local_time.strftime('%M').to_i,
current_local_time.strftime('%S').to_i))
due_date[:due_at] = utc_time
if due_date[:id].nil? or due_date[:id].blank?
dd = AssignmentDueDate.new(due_date)
@has_errors = true unless dd.save
else
dd = AssignmentDueDate.find(due_date[:id])
# get deadline for review
@has_errors = true unless dd.update_attributes(due_date)
end
@errors += @assignment.errors.to_s if @has_errors
end
end
# Adds badges to assignment badges table
def update_assigned_badges(badge, assignment)
assigned_badges = AssignmentBadge.where(assignment_id: assignment[:id]).select(:id).to_a
assigned_badges.each do |assigned_badge|
AssignmentBadge.delete(assigned_badge.id) unless badge[:id].include?(assigned_badge.id)
end
badge[:id].each do |badge_id|
AssignmentBadge.create_badge_without_threshold(badge_id[0], assignment[:id])
end
end
# Adds items to delayed_jobs queue for this assignment
def add_to_delayed_queue
duedates = AssignmentDueDate.where(parent_id: @assignment.id)
duedates.each do |due_date|
deadline_type = DeadlineType.find(due_date.deadline_type_id).name
diff_btw_time_left_and_threshold, min_left = get_time_diff_btw_due_date_and_now(due_date)
next unless diff_btw_time_left_and_threshold > 0
delayed_job = add_delayed_job(@assignment, deadline_type, due_date, diff_btw_time_left_and_threshold)
due_date.update_attribute(:delayed_job_id, delayed_job.id)
# If the deadline type is review, add a delayed job to drop outstanding review
add_delayed_job(@assignment, "drop_outstanding_reviews", due_date, min_left) if deadline_type == "review"
# If the deadline type is team_formation, add a delayed job to drop one member team
next unless deadline_type == "team_formation" and @assignment.team_assignment?
add_delayed_job(@assignment, "drop_one_member_topics", due_date, min_left)
end
end
def get_time_diff_btw_due_date_and_now(due_date)
due_at = due_date.due_at.to_s(:db)
Time.parse(due_at)
due_at = Time.parse(due_at)
time_left_in_min = find_min_from_now(due_at)
diff_btw_time_left_and_threshold = time_left_in_min - due_date.threshold * 60
[diff_btw_time_left_and_threshold, time_left_in_min]
end
# add DelayedJob into queue and return it
def add_delayed_job(assignment, deadline_type, due_date, min_left)
delayed_job = DelayedJob.enqueue(DelayedMailer.new(assignment.id, deadline_type, due_date.due_at.to_s(:db)),
1, min_left.minutes.from_now)
change_item_type(delayed_job.id)
delayed_job
end
# Deletes the job with id equal to "delayed_job_id" from the delayed_jobs queue
def delete_from_delayed_queue
djobs = Delayed::Job.where(['handler LIKE "%assignment_id: ?%"', @assignment.id])
for dj in djobs
dj.delete if !dj.nil? && !dj.id.nil?
end
end
# Change the item_type displayed in the log
def change_item_type(delayed_job_id)
log = Version.find_by(item_type: "Delayed::Backend::ActiveRecord::Job", item_id: delayed_job_id)
log.update_attribute(:item_type, "DelayedMailer") # Change the item type in the log
end
def delete(force = nil)
# delete from delayed_jobs queue related to this assignment
delete_from_delayed_queue
@assignment.delete(force)
end
# This functions finds the epoch time in seconds of the due_at parameter and finds the difference of it
# from the current time and returns this difference in minutes
def find_min_from_now(due_at)
curr_time = DateTime.now.in_time_zone(zone = 'UTC').to_s(:db)
curr_time = Time.parse(curr_time)
time_in_min = ((due_at - curr_time).to_i / 60)
# time_in_min = 1
time_in_min
end
# Save the assignment
def save
@assignment.save
end
# create a node for the assignment
def create_assignment_node
@assignment.create_node unless @assignment.nil?
end
# NOTE: many of these functions actually belongs to other models
#====setup methods for new and edit method=====#
def set_up_assignment_review
set_up_defaults
submissions = @assignment.find_due_dates('submission')
reviews = @assignment.find_due_dates('review')
@assignment.directory_path = nil if @assignment.directory_path.empty?
end
def staggered_deadline
@assignment.staggered_deadline = false if @assignment.staggered_deadline.nil?
end
def availability_flag
@assignment.availability_flag = false if @assignment.availability_flag.nil?
end
def micro_task
@assignment.microtask = false if @assignment.microtask.nil?
end
def reviews_visible_to_all
@assignment.reviews_visible_to_all = false if @assignment.reviews_visible_to_all.nil?
end
def review_assignment_strategy
@assignment.review_assignment_strategy = '' if @assignment.review_assignment_strategy.nil?
end
def require_quiz
if @assignment.require_quiz.nil?
@assignment.require_quiz = false
@assignment.num_quiz_questions = 0
end
end
# NOTE: unfortunately this method is needed due to bad data in db @_@
def set_up_defaults
staggered_deadline
availability_flag
micro_task
reviews_visible_to_all
review_assignment_strategy
require_quiz
end
def add_simicheck_to_delayed_queue(simicheck_delay)
delete_from_delayed_queue
if simicheck_delay.to_i >= 0
duedates = AssignmentDueDate.where(parent_id: @assignment.id)
duedates.each do |due_date|
next if DeadlineType.find(due_date.deadline_type_id).name != "submission"
change_item_type(enqueue_simicheck_task(due_date, simicheck_delay).id)
end
end
end
def enqueue_simicheck_task(due_date, simicheck_delay)
DelayedJob.enqueue(DelayedMailer.new(@assignment.id, "compare_files_with_simicheck", due_date.due_at.to_s(:db)),
1, find_min_from_now(Time.parse(due_date.due_at.to_s(:db)) + simicheck_delay.to_i.hours).minutes.from_now)
end
# Copies the inputted assignment into new one and returns the new assignment id
def self.copy(assignment_id, user)
Assignment.record_timestamps = false
old_assign = Assignment.find(assignment_id)
new_assign = old_assign.dup
user.set_instructor(new_assign)
new_assign.update_attribute('name', 'Copy of ' + new_assign.name)
new_assign.update_attribute('created_at', Time.now)
new_assign.update_attribute('updated_at', Time.now)
new_assign.update_attribute('directory_path', new_assign.directory_path + '_copy') if new_assign.directory_path.present?
new_assign.copy_flag = true
if new_assign.save
Assignment.record_timestamps = true
copy_assignment_questionnaire(old_assign, new_assign, user)
AssignmentDueDate.copy(old_assign.id, new_assign.id)
new_assign.create_node
new_assign_id = new_assign.id
# also copy topics from old assignment
topics = SignUpTopic.where(assignment_id: old_assign.id)
topics.each do |topic|
SignUpTopic.create(topic_name: topic.topic_name, assignment_id: new_assign_id, max_choosers: topic.max_choosers, category: topic.category, topic_identifier: topic.topic_identifier, micropayment: topic.micropayment)
end
else
new_assign_id = nil
end
new_assign_id
end
def self.copy_assignment_questionnaire(old_assign, new_assign, user)
old_assign.assignment_questionnaires.each do |aq|
AssignmentQuestionnaire.create(
assignment_id: new_assign.id,
questionnaire_id: aq.questionnaire_id,
user_id: user.id,
notification_limit: aq.notification_limit,
questionnaire_weight: aq.questionnaire_weight
)
end
end
def set_badge_threshold_for_assignment(assignment_id, badges)
badge_threshold_hash = {}
['Good Teammate', 'Good Reviewer'].each do |badge_name|
badge_threshold_hash[badge_name] = badges["badge_#{badge_name}_threshold"].to_i if badges and badges.key?("badge_#{badge_name}_threshold")
badge_threshold_hash[badge_name] = 95 if badge_threshold_hash[badge_name].nil? or badge_threshold_hash[badge_name].zero?
end
AssignmentBadge.save_badge_populate_awarded_badges(badge_threshold_hash, assignment_id)
end
end
added additional nil check
require 'active_support/time_with_zone'
class AssignmentForm
attr_accessor :assignment, :assignment_questionnaires, :due_dates, :tag_prompt_deployments
attr_accessor :errors
DEFAULT_MAX_TEAM_SIZE = 1
def initialize(args = {})
@assignment = Assignment.new(args[:assignment])
if args[:assignment].nil?
@assignment.course = Course.find(args[:parent_id]) if args[:parent_id]
@assignment.instructor = @assignment.course.instructor if @assignment.course
@assignment.max_team_size = DEFAULT_MAX_TEAM_SIZE
end
@assignment.num_review_of_reviews = @assignment.num_metareviews_allowed
@assignment_questionnaires = Array(args[:assignment_questionnaires])
@due_dates = Array(args[:due_dates])
end
# create a form object for this assignment_id
def self.create_form_object(assignment_id)
assignment_form = AssignmentForm.new
assignment_form.assignment = Assignment.find(assignment_id)
assignment_form.assignment_questionnaires = AssignmentQuestionnaire.where(assignment_id: assignment_id)
assignment_form.due_dates = AssignmentDueDate.where(parent_id: assignment_id)
assignment_form.set_up_assignment_review
assignment_form.tag_prompt_deployments = TagPromptDeployment.where(assignment_id: assignment_id)
assignment_form
end
def update(attributes, user)
@has_errors = false
has_late_policy = false
if attributes[:assignment][:late_policy_id].to_i > 0
has_late_policy = true
else
attributes[:assignment][:late_policy_id] = nil
end
update_assignment(attributes[:assignment])
update_assignment_questionnaires(attributes[:assignment_questionnaire]) unless @has_errors
update_due_dates(attributes[:due_date], user) unless @has_errors
update_assigned_badges(attributes[:badge], attributes[:assignment]) unless @has_errors
set_badge_threshold_for_assignment(attributes[:assignment][:id], attributes[:badge]) if @assignment.has_badge?
add_simicheck_to_delayed_queue(attributes[:assignment][:simicheck])
# delete the old queued items and recreate new ones if the assignment has late policy.
if attributes[:due_date] and !@has_errors and has_late_policy
delete_from_delayed_queue
add_to_delayed_queue
end
update_tag_prompt_deployments(attributes[:tag_prompt_deployments])
!@has_errors
end
alias update_attributes update
# Code to update values of assignment
def update_assignment(attributes)
unless @assignment.update_attributes(attributes)
@errors = @assignment.errors.to_s
@has_errors = true
end
@assignment.num_review_of_reviews = @assignment.num_metareviews_allowed
@assignment.num_reviews = @assignment.num_reviews_allowed
end
# code to save assignment questionnaires
def update_assignment_questionnaires(attributes)
return false unless attributes
existing_aqs = AssignmentQuestionnaire.where(assignment_id: @assignment.id)
existing_aqs.each(&:delete)
attributes.each do |assignment_questionnaire|
if assignment_questionnaire[:id].nil? or assignment_questionnaire[:id].blank?
aq = AssignmentQuestionnaire.new(assignment_questionnaire)
unless aq.save
@errors = @assignment.errors.to_s
@has_errors = true
end
else
aq = AssignmentQuestionnaire.find(assignment_questionnaire[:id])
unless aq.update_attributes(assignment_questionnaire)
@errors = @assignment.errors.to_s
@has_errors = true
end
end
end
end
# s required by answer tagging
def update_tag_prompt_deployments(attributes)
unless attributes.nil?
attributes.each do |key, value|
TagPromptDeployment.where(id: value['deleted']).delete_all if value.key?('deleted')
# assume if tag_prompt is there, then id, question_type, answer_length_threshold must also be there since the inputs are coupled
next unless value.key?('tag_prompt')
for i in 0..value['tag_prompt'].count - 1
tag_dep = nil
if !(value['id'][i] == "undefined" or value['id'][i] == "null" or value['id'][i].nil?)
tag_dep = TagPromptDeployment.find(value['id'][i])
if tag_dep
tag_dep.update(assignment_id: @assignment.id,
questionnaire_id: key,
tag_prompt_id: value['tag_prompt'][i],
question_type: value['question_type'][i],
answer_length_threshold: value['answer_length_threshold'][i])
end
else
tag_dep = TagPromptDeployment.new(assignment_id: @assignment.id,
questionnaire_id: key,
tag_prompt_id: value['tag_prompt'][i],
question_type: value['question_type'][i],
answer_length_threshold: value['answer_length_threshold'][i]).save
end
end
end
end
end
# end required by answer tagging
# code to save due dates
def update_due_dates(attributes, user)
return false unless attributes
attributes.each do |due_date|
next if due_date[:due_at].blank?
# parse the dd and convert it to utc before saving it to db
# eg. 2015-06-22 12:05:00 -0400
current_local_time = Time.parse(due_date[:due_at][0..15])
tz = ActiveSupport::TimeZone[user.timezonepref].tzinfo
utc_time = tz.local_to_utc(Time.local(current_local_time.year,
current_local_time.month,
current_local_time.day,
current_local_time.strftime('%H').to_i,
current_local_time.strftime('%M').to_i,
current_local_time.strftime('%S').to_i))
due_date[:due_at] = utc_time
if due_date[:id].nil? or due_date[:id].blank?
dd = AssignmentDueDate.new(due_date)
@has_errors = true unless dd.save
else
dd = AssignmentDueDate.find(due_date[:id])
# get deadline for review
@has_errors = true unless dd.update_attributes(due_date)
end
@errors += @assignment.errors.to_s if @has_errors
end
end
# Adds badges to assignment badges table
def update_assigned_badges(badge, assignment)
if !assignment.nil? and !badge.nil?
assigned_badges = AssignmentBadge.where(assignment_id: assignment[:id]).select(:id).to_a
assigned_badges.each do |assigned_badge|
AssignmentBadge.delete(assigned_badge.id) unless badge[:id].include?(assigned_badge.id)
end
badge[:id].each do |badge_id|
AssignmentBadge.create_badge_without_threshold(badge_id[0], assignment[:id])
end
end
end
# Adds items to delayed_jobs queue for this assignment
def add_to_delayed_queue
duedates = AssignmentDueDate.where(parent_id: @assignment.id)
duedates.each do |due_date|
deadline_type = DeadlineType.find(due_date.deadline_type_id).name
diff_btw_time_left_and_threshold, min_left = get_time_diff_btw_due_date_and_now(due_date)
next unless diff_btw_time_left_and_threshold > 0
delayed_job = add_delayed_job(@assignment, deadline_type, due_date, diff_btw_time_left_and_threshold)
due_date.update_attribute(:delayed_job_id, delayed_job.id)
# If the deadline type is review, add a delayed job to drop outstanding review
add_delayed_job(@assignment, "drop_outstanding_reviews", due_date, min_left) if deadline_type == "review"
# If the deadline type is team_formation, add a delayed job to drop one member team
next unless deadline_type == "team_formation" and @assignment.team_assignment?
add_delayed_job(@assignment, "drop_one_member_topics", due_date, min_left)
end
end
def get_time_diff_btw_due_date_and_now(due_date)
due_at = due_date.due_at.to_s(:db)
Time.parse(due_at)
due_at = Time.parse(due_at)
time_left_in_min = find_min_from_now(due_at)
diff_btw_time_left_and_threshold = time_left_in_min - due_date.threshold * 60
[diff_btw_time_left_and_threshold, time_left_in_min]
end
# add DelayedJob into queue and return it
def add_delayed_job(assignment, deadline_type, due_date, min_left)
delayed_job = DelayedJob.enqueue(DelayedMailer.new(assignment.id, deadline_type, due_date.due_at.to_s(:db)),
1, min_left.minutes.from_now)
change_item_type(delayed_job.id)
delayed_job
end
# Deletes the job with id equal to "delayed_job_id" from the delayed_jobs queue
def delete_from_delayed_queue
djobs = Delayed::Job.where(['handler LIKE "%assignment_id: ?%"', @assignment.id])
for dj in djobs
dj.delete if !dj.nil? && !dj.id.nil?
end
end
# Change the item_type displayed in the log
def change_item_type(delayed_job_id)
log = Version.find_by(item_type: "Delayed::Backend::ActiveRecord::Job", item_id: delayed_job_id)
log.update_attribute(:item_type, "DelayedMailer") # Change the item type in the log
end
def delete(force = nil)
# delete from delayed_jobs queue related to this assignment
delete_from_delayed_queue
@assignment.delete(force)
end
# This functions finds the epoch time in seconds of the due_at parameter and finds the difference of it
# from the current time and returns this difference in minutes
def find_min_from_now(due_at)
curr_time = DateTime.now.in_time_zone(zone = 'UTC').to_s(:db)
curr_time = Time.parse(curr_time)
time_in_min = ((due_at - curr_time).to_i / 60)
# time_in_min = 1
time_in_min
end
# Save the assignment
def save
@assignment.save
end
# create a node for the assignment
def create_assignment_node
@assignment.create_node unless @assignment.nil?
end
# NOTE: many of these functions actually belongs to other models
#====setup methods for new and edit method=====#
def set_up_assignment_review
set_up_defaults
submissions = @assignment.find_due_dates('submission')
reviews = @assignment.find_due_dates('review')
@assignment.directory_path = nil if @assignment.directory_path.empty?
end
def staggered_deadline
@assignment.staggered_deadline = false if @assignment.staggered_deadline.nil?
end
def availability_flag
@assignment.availability_flag = false if @assignment.availability_flag.nil?
end
def micro_task
@assignment.microtask = false if @assignment.microtask.nil?
end
def reviews_visible_to_all
@assignment.reviews_visible_to_all = false if @assignment.reviews_visible_to_all.nil?
end
def review_assignment_strategy
@assignment.review_assignment_strategy = '' if @assignment.review_assignment_strategy.nil?
end
def require_quiz
if @assignment.require_quiz.nil?
@assignment.require_quiz = false
@assignment.num_quiz_questions = 0
end
end
# NOTE: unfortunately this method is needed due to bad data in db @_@
def set_up_defaults
staggered_deadline
availability_flag
micro_task
reviews_visible_to_all
review_assignment_strategy
require_quiz
end
def add_simicheck_to_delayed_queue(simicheck_delay)
delete_from_delayed_queue
if simicheck_delay.to_i >= 0
duedates = AssignmentDueDate.where(parent_id: @assignment.id)
duedates.each do |due_date|
next if DeadlineType.find(due_date.deadline_type_id).name != "submission"
change_item_type(enqueue_simicheck_task(due_date, simicheck_delay).id)
end
end
end
def enqueue_simicheck_task(due_date, simicheck_delay)
DelayedJob.enqueue(DelayedMailer.new(@assignment.id, "compare_files_with_simicheck", due_date.due_at.to_s(:db)),
1, find_min_from_now(Time.parse(due_date.due_at.to_s(:db)) + simicheck_delay.to_i.hours).minutes.from_now)
end
# Copies the inputted assignment into new one and returns the new assignment id
def self.copy(assignment_id, user)
Assignment.record_timestamps = false
old_assign = Assignment.find(assignment_id)
new_assign = old_assign.dup
user.set_instructor(new_assign)
new_assign.update_attribute('name', 'Copy of ' + new_assign.name)
new_assign.update_attribute('created_at', Time.now)
new_assign.update_attribute('updated_at', Time.now)
new_assign.update_attribute('directory_path', new_assign.directory_path + '_copy') if new_assign.directory_path.present?
new_assign.copy_flag = true
if new_assign.save
Assignment.record_timestamps = true
copy_assignment_questionnaire(old_assign, new_assign, user)
AssignmentDueDate.copy(old_assign.id, new_assign.id)
new_assign.create_node
new_assign_id = new_assign.id
# also copy topics from old assignment
topics = SignUpTopic.where(assignment_id: old_assign.id)
topics.each do |topic|
SignUpTopic.create(topic_name: topic.topic_name, assignment_id: new_assign_id, max_choosers: topic.max_choosers, category: topic.category, topic_identifier: topic.topic_identifier, micropayment: topic.micropayment)
end
else
new_assign_id = nil
end
new_assign_id
end
def self.copy_assignment_questionnaire(old_assign, new_assign, user)
old_assign.assignment_questionnaires.each do |aq|
AssignmentQuestionnaire.create(
assignment_id: new_assign.id,
questionnaire_id: aq.questionnaire_id,
user_id: user.id,
notification_limit: aq.notification_limit,
questionnaire_weight: aq.questionnaire_weight
)
end
end
def set_badge_threshold_for_assignment(assignment_id, badges)
badge_threshold_hash = {}
['Good Teammate', 'Good Reviewer'].each do |badge_name|
badge_threshold_hash[badge_name] = badges["badge_#{badge_name}_threshold"].to_i if badges and badges.key?("badge_#{badge_name}_threshold")
badge_threshold_hash[badge_name] = 95 if badge_threshold_hash[badge_name].nil? or badge_threshold_hash[badge_name].zero?
end
AssignmentBadge.save_badge_populate_awarded_badges(badge_threshold_hash, assignment_id)
end
end
|
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# login :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# authentication_token :string(255)
# email :string(255) default(""), not null
# full_name :string(255)
# roles_mask :integer
# created_at :datetime
# updated_at :datetime
#
module Atmosphere
class User < ActiveRecord::Base
include Atmosphere::UserExt
# Include default devise modules. Others available are:
# :confirmable,
# :registerable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable,
:rememberable, :trackable, :recoverable,
:validatable, :omniauthable
validates :login, uniqueness: { case_sensitive: false }
include Atmosphere::LoginAndEmail
include Atmosphere::Nondeletable
has_many :appliance_sets,
dependent: :destroy,
class_name: 'Atmosphere::ApplianceSet'
has_many :user_keys,
dependent: :destroy,
class_name: 'Atmosphere::UserKey'
has_many :appliance_types,
class_name: 'Atmosphere::ApplianceType'
has_many :funds,
through: :user_funds,
class_name: 'Atmosphere::Fund'
has_many :user_funds,
dependent: :destroy,
class_name: 'Atmosphere::UserFund'
has_many :billing_logs,
dependent: :nullify,
class_name: 'Atmosphere::BillingLog'
before_save :check_fund_assignment
scope :with_vm, ->(vm) do
joins(appliance_sets: { appliances: :virtual_machines })
.where(atmosphere_virtual_machines: {id: vm.id})
end
include Gravtastic
gravtastic default: 'mm'
#roles
include RoleModel
roles :admin, :developer
def default_fund
# Return this user's default fund, if it exists.
dfs = self.user_funds.where(default: true)
dfs.blank? ? nil : dfs.first.fund
end
def generate_password
self.password = self.password_confirmation = Devise.friendly_token.first(8)
end
def to_s
"#{login} <#{email}>"
end
def admin?
has_role? :admin
end
def developer?
has_role? :developer
end
private
# Checks whether any fund has been assigned to this user.
# If not, assign the first available fund (if it exists) and make it this user's default fund
# This method is provided to ensure compatibility with old versions of Atmosphere which do not supply fund information when creating users.
# Once the platform APIs are updated, this method will be deprecated and should be removed.
def check_fund_assignment
if Atmosphere::UserFund.where(user: self).blank? and Atmosphere::Fund.all.count > 0
user_funds << Atmosphere::UserFund.new(user: self, fund: Atmosphere::Fund.first, default: true)
end
end
end
end
changed default user fund assignment to fire only on create
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# login :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# authentication_token :string(255)
# email :string(255) default(""), not null
# full_name :string(255)
# roles_mask :integer
# created_at :datetime
# updated_at :datetime
#
module Atmosphere
class User < ActiveRecord::Base
include Atmosphere::UserExt
# Include default devise modules. Others available are:
# :confirmable,
# :registerable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable,
:rememberable, :trackable, :recoverable,
:validatable, :omniauthable
validates :login, uniqueness: { case_sensitive: false }
include Atmosphere::LoginAndEmail
include Atmosphere::Nondeletable
has_many :appliance_sets,
dependent: :destroy,
class_name: 'Atmosphere::ApplianceSet'
has_many :user_keys,
dependent: :destroy,
class_name: 'Atmosphere::UserKey'
has_many :appliance_types,
class_name: 'Atmosphere::ApplianceType'
has_many :funds,
through: :user_funds,
class_name: 'Atmosphere::Fund'
has_many :user_funds,
dependent: :destroy,
class_name: 'Atmosphere::UserFund'
has_many :billing_logs,
dependent: :nullify,
class_name: 'Atmosphere::BillingLog'
after_create :check_fund_assignment
scope :with_vm, ->(vm) do
joins(appliance_sets: { appliances: :virtual_machines })
.where(atmosphere_virtual_machines: {id: vm.id})
end
include Gravtastic
gravtastic default: 'mm'
#roles
include RoleModel
roles :admin, :developer
def default_fund
# Return this user's default fund, if it exists.
dfs = self.user_funds.where(default: true)
dfs.blank? ? nil : dfs.first.fund
end
def generate_password
self.password = self.password_confirmation = Devise.friendly_token.first(8)
end
def to_s
"#{login} <#{email}>"
end
def admin?
has_role? :admin
end
def developer?
has_role? :developer
end
private
# Checks whether any fund has been assigned to this user.
# If not, assign the first available fund (if it exists) and make it this user's default fund
# This method is provided to ensure compatibility with old versions of Atmosphere which do not supply fund information when creating users.
# Once the platform APIs are updated, this method will be deprecated and should be removed.
def check_fund_assignment
if Atmosphere::UserFund.where(user: self).blank? and Atmosphere::Fund.all.count > 0
user_funds << Atmosphere::UserFund.new(user: self, fund: Atmosphere::Fund.first, default: true)
end
end
end
end
|
class BlogAssignment < ActiveRecord::Base
belongs_to :user
belongs_to :schedule
belongs_to :post
validates_uniqueness_of :due_date, :scope => [:user, :schedule]
validates_uniqueness_of :post, allow_blank: true
NULL_ATTRS = %w( post_id )
before_save :nil_if_blank
def self.due_tomorrow
where(due_date: Date.tomorrow)
end
def completed?
user.blog_posts_written_since_previous_assignment(due_date, schedule).find { |post| post.content_or_summary.length > 140 && !post.blog_assignment } || post
end
def posts_since_previous
posts = user.blog_posts_written_since_previous_assignment(due_date, schedule)
if posts.empty?
post ? [post] : posts
else
posts
end
end
def most_recent_post_since_previous
posts_since_previous.first
end
private
def nil_if_blank
NULL_ATTRS.each { |attr| self[attr] = nil if self[attr].blank? }
end
end
if a post has been assigned to a blog_assignment show that instead of other posts
class BlogAssignment < ActiveRecord::Base
belongs_to :user
belongs_to :schedule
belongs_to :post
validates_uniqueness_of :due_date, :scope => [:user, :schedule]
validates_uniqueness_of :post, allow_blank: true
NULL_ATTRS = %w( post_id )
before_save :nil_if_blank
def self.due_tomorrow
where(due_date: Date.tomorrow)
end
def completed?
user.blog_posts_written_since_previous_assignment(due_date, schedule).find { |post| post.content_or_summary.length > 140 && !post.blog_assignment } || post
end
def posts_since_previous
if post
[post]
else
user.blog_posts_written_since_previous_assignment(due_date, schedule)
end
end
def most_recent_post_since_previous
posts_since_previous.first
end
private
def nil_if_blank
NULL_ATTRS.each { |attr| self[attr] = nil if self[attr].blank? }
end
end
|
require 'singleton'
class BuilderStarter
@@run_builders_at_startup = true;
def self.run_builders_at_startup=(value)
@@run_builders_at_startup = value
end
def self.start_builders
if @@run_builders_at_startup
Projects.load_all.each do |project|
begin_builder(project.name)
end
end
end
def self.begin_builder(project_name)
if ruby_platform =~ /mswin32/
Thread.new(project_name) { |my_project_name| system("cruise.cmd build #{project_name}") }
else
fork
exec("#{RAILS_ROOT}/cruise build #{project_name}")
end
end
private
def self.ruby_platform
RUBY_PLATFORM
end
end
builders now start again on non-win 32
git-svn-id: 50a8ad97cae5d6ab203d2d535a9b32cd3858a8ac@196 c04ce798-636b-4ca8-9149-0f9336831111
require 'singleton'
class BuilderStarter
@@run_builders_at_startup = true;
def self.run_builders_at_startup=(value)
@@run_builders_at_startup = value
end
def self.start_builders
if @@run_builders_at_startup
Projects.load_all.each do |project|
begin_builder(project.name)
end
end
end
def self.begin_builder(project_name)
if ruby_platform =~ /mswin32/
Thread.new(project_name) { |my_project_name| system("cruise.cmd build #{project_name}") }
else
fork || exec("#{RAILS_ROOT}/cruise build #{project_name}")
end
end
private
def self.ruby_platform
RUBY_PLATFORM
end
end |
class CsvTransformer
def self.allowed_headers
%w(
email
location
latitude
longitude
organization_name
phone_number
tag_names
website
)
end
def self.transform(raw_input)
new(raw_input).transform
end
def initialize(raw_input)
@data = raw_input.data
end
def transform
{
email: email_attrs,
location: location_attrs,
organization_name: organization_name_attrs,
phone_number: phone_number_attrs,
tag_names: tag_names,
website: website_attrs
}.delete_if { |_, v| v.empty? }
end
private
def email_attrs
{
content: @data['email'].presence
}.compact
end
def location_attrs
{
content: @data['location'].presence,
latitude: @data['latitude'].presence,
longitude: @data['longitude'].presence
}.compact
end
def organization_name_attrs
{
content: @data['organization_name'].presence
}.compact
end
def phone_number_attrs
{
content: @data['phone_number'].presence
}.compact
end
def tag_names
@data['tag_names']&.split(',') || []
end
def website_attrs
{
content: @data['website'].presence
}.compact
end
end
Strip spaces from tag_names
Closes #114
class CsvTransformer
def self.allowed_headers
%w(
email
location
latitude
longitude
organization_name
phone_number
tag_names
website
)
end
def self.transform(raw_input)
new(raw_input).transform
end
def initialize(raw_input)
@data = raw_input.data
end
def transform
{
email: email_attrs,
location: location_attrs,
organization_name: organization_name_attrs,
phone_number: phone_number_attrs,
tag_names: tag_names,
website: website_attrs
}.delete_if { |_, v| v.empty? }
end
private
def email_attrs
{
content: @data['email'].presence
}.compact
end
def location_attrs
{
content: @data['location'].presence,
latitude: @data['latitude'].presence,
longitude: @data['longitude'].presence
}.compact
end
def organization_name_attrs
{
content: @data['organization_name'].presence
}.compact
end
def phone_number_attrs
{
content: @data['phone_number'].presence
}.compact
end
def tag_names
@data['tag_names']&.split(',')&.map(&:strip) || []
end
def website_attrs
{
content: @data['website'].presence
}.compact
end
end
|
require 'csv'
require 'active_support/all'
class DataDefinition < ActiveRecord::Base
def self.populate_from_file
data = Roo::Spreadsheet.open(ClinicalTrials::FileManager.data_dictionary)
header = data.first
dataOut = []
puts "about to populate data definitions table..."
(2..data.last_row).each do |i|
row = Hash[[header, data.row(i)].transpose]
puts row
if !row['table'].nil? and !row['column'].nil?
new(:db_section=>row['db section'].downcase,
:table_name=>row['table'].downcase,
:column_name=>row['column'].downcase,
:data_type=>row['data type'].downcase,
:source=>row['source'].downcase,
:ctti_note=>row['CTTI note'],
:nlm_link=>row['nlm doc'],
).save!
end
end
end
def self.populate_row_counts
# save count for each table where the primary key is id
where("column_name='id'").each{|row|
results=ActiveRecord::Base.connection.execute("select count(*) from #{row.table_name}")
row.row_count=results.getvalue(0,0) if results.ntuples == 1
row.save
}
# Studies table is an exception - primary key is nct_id
row=where("table_name='studies' and column_name='nct_id'").first
results=ActiveRecord::Base.connection.execute("select count(*) from #{row.table_name}")
row.row_count=results.getvalue(0,0) if results.ntuples == 1
row.save
end
def self.populate_enumerations
enums.each{|array|
results=ActiveRecord::Base.connection.execute("
SELECT DISTINCT #{array.last}, COUNT(*) AS cnt
FROM #{array.first}
GROUP BY #{array.last}
ORDER BY cnt ASC")
hash={}
cntr=results.ntuples - 1
while cntr >= 0 do
val=results.getvalue(cntr,0).to_s
val='-null-' if val.size==0
val_count=results.getvalue(cntr,1).to_s.reverse.gsub(/(\d{3})(?=\d)/, '\\1,').reverse
hash[val]=val_count
cntr=cntr-1
end
row=where("table_name=? and column_name=?",array.first,array.last).first
row.enumerations=hash.to_json
row.save
}
end
def self.enums
[
['studies','study_type'],
['studies','overall_status'],
['studies','last_known_status'],
['studies','phase'],
['studies','enrollment_type'],
['calculated_values','sponsor_type'],
['central_contacts','contact_type'],
['design_groups','group_type'],
['design_outcomes','outcome_type'],
['designs','observational_model'],
['designs','masking'],
['eligibilities','gender'],
['facility_contacts','contact_type'],
['id_information','id_type'],
['interventions','intervention_type'],
['responsible_parties','responsible_party_type'],
['sponsors','agency_class'],
['study_references','reference_type'],
['result_groups','result_type'],
['baseline_measurements','category'],
['baseline_measurements','param_type'],
['baseline_measurements','dispersion_type'],
['reported_events','event_type'],
]
end
end
aact-280: Make sure DataDefinition table is populated before trying to populate row counts or enumerations.
require 'csv'
require 'active_support/all'
class DataDefinition < ActiveRecord::Base
def self.populate_from_file
data = Roo::Spreadsheet.open(ClinicalTrials::FileManager.data_dictionary)
header = data.first
dataOut = []
puts "about to populate data definitions table..."
(2..data.last_row).each do |i|
row = Hash[[header, data.row(i)].transpose]
puts row
if !row['table'].nil? and !row['column'].nil?
new(:db_section=>row['db section'].downcase,
:table_name=>row['table'].downcase,
:column_name=>row['column'].downcase,
:data_type=>row['data type'].downcase,
:source=>row['source'].downcase,
:ctti_note=>row['CTTI note'],
:nlm_link=>row['nlm doc'],
).save!
end
end
end
def self.populate_row_counts
# save count for each table where the primary key is id
rows=where("column_name='id'")
populate_from_file if rows.size==0
rows.each{|row|
results=ActiveRecord::Base.connection.execute("select count(*) from #{row.table_name}")
row.row_count=results.getvalue(0,0) if results.ntuples == 1
row.save
}
# Studies table is an exception - primary key is nct_id
row=where("table_name='studies' and column_name='nct_id'").first
return if row.nil?
results=ActiveRecord::Base.connection.execute("select count(*) from #{row.table_name}")
row.row_count=results.getvalue(0,0) if results.ntuples == 1
row.save
end
def self.populate_enumerations
rows=where("column_name='id'")
populate_from_file if rows.size==0
enums.each{|array|
results=ActiveRecord::Base.connection.execute("
SELECT DISTINCT #{array.last}, COUNT(*) AS cnt
FROM #{array.first}
GROUP BY #{array.last}
ORDER BY cnt ASC")
hash={}
cntr=results.ntuples - 1
while cntr >= 0 do
val=results.getvalue(cntr,0).to_s
val='-null-' if val.size==0
val_count=results.getvalue(cntr,1).to_s.reverse.gsub(/(\d{3})(?=\d)/, '\\1,').reverse
hash[val]=val_count
cntr=cntr-1
end
row=where("table_name=? and column_name=?",array.first,array.last).first
row.enumerations=hash.to_json
row.save
}
end
def self.enums
[
['studies','study_type'],
['studies','overall_status'],
['studies','last_known_status'],
['studies','phase'],
['studies','enrollment_type'],
['calculated_values','sponsor_type'],
['central_contacts','contact_type'],
['design_groups','group_type'],
['design_outcomes','outcome_type'],
['designs','observational_model'],
['designs','masking'],
['eligibilities','gender'],
['facility_contacts','contact_type'],
['id_information','id_type'],
['interventions','intervention_type'],
['responsible_parties','responsible_party_type'],
['sponsors','agency_class'],
['study_references','reference_type'],
['result_groups','result_type'],
['baseline_measurements','category'],
['baseline_measurements','param_type'],
['baseline_measurements','dispersion_type'],
['reported_events','event_type'],
]
end
end
|
require 'redmine'
class LibrarySupport
include Configured
def self.redmine
Redmine.new LibrarySupport.url, LibrarySupport.api_key, :timeout => LibrarySupport.timeout
end
def self.submit_physical_delivery order, order_url, options = {}
Delayed::Worker.logger.info "submit_physical_delivery called."
title = (order.document['title_ts'].first || "")
user_name = (order.user || "").to_s
author = (order.document['author_ts'].first || "")
length_of_variable_fields = title.length + user_name.length
Delayed::Worker.logger.info "submit_physical_delivery: generating send_mail_link ..."
send_mail_link = send_mail_link_for order.user, {
'subject' => "Regarding your document request",
'body' => "\"#{title}\" by #{author}",
}
Delayed::Worker.logger.info "submit_physical_delivery: send_mail_link generated."
Delayed::Worker.logger.info "submit_physical_delivery: generating issue_description ..."
issue_description = []
issue_description << "View order in Findit: #{options[:findit_url]}\n"
issue_description << "#{phonebook_link_for(order.user) || order.user}, #{send_mail_link}, requested the following which is now ready for delivery:\n" if order.user
issue_description << '<pre>'
issue_description << "#{order.supplier.to_s.upcase} order ID:\n #{order.supplier_order_id}"
issue_description << order.document.reject {|k,vs| k.to_s == 'open_url' || vs.blank?}
.collect {|k,vs| "#{I18n.t "toshokan.catalog.show_field_labels.#{k}"}:\n #{vs.first}"}
.join("\n")
issue_description << '</pre>'
issue_description << 'Send by DTU Internal Mail to'
issue_description << "<pre>#{order.user.address.reject {|k,v| v.blank?}.collect {|k,v| v}.join("\n")}</pre>"
Delayed::Worker.logger.info "submit_physical_delivery: issue_description generated."
Delayed::Worker.logger.info "submit_physical_delivery: generating issue hash ..."
issue = {
:project_id => LibrarySupport.project_ids[:tib_orders_delivered_as_print],
:subject => "Delivery of " + "\"#{(title)[0..(226-([226, length_of_variable_fields].min))]}\"" + " requested by #{user_name}",
:description => issue_description.join("\n"),
:custom_fields => [
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(order.user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => options[:reordered] ? 'Yes' : 'No',
}),
]
}
Delayed::Worker.logger.info "submit_physical_delivery: issue hash generated."
Delayed::Worker.logger.info "submit_physical_delivery: creating redmine issue:\n#{issue} ..."
response = redmine.create_issue issue
if response.try :[], "issue"
Delayed::Worker.logger.info "submit_physical_delivery: redmine issue created."
Delayed::Worker.logger.info "submit_physical_delivery: adding order event ..."
is_redelivery = options[:reordered]
order.order_events << OrderEvent.new(:name => is_redelivery ? 'physical_redelivery_done' : 'physical_delivery_done', :data => response['issue']['id'])
Delayed::Worker.logger.info "submit_physical_delivery: order event added."
else
Delayed::Worker.logger.error "submit_physical_delivery: Error submitting physical delivery to library support Redmine. Redmine response:\n#{response || 'nil'}"
raise
end
end
def self.submit_failed_request order, order_url, options = {}
return unless order.user.dtu?
send_mail_link = send_mail_link_for order.user, {
'subject' => "Manual processing of your request: \"#{order.document['title_ts'].first}\"",
'body' => "Your request of \"#{order.document['title_ts'].first}\" has gone to manual processing",
}
issue_description = []
issue_description << "#{phonebook_link_for(order.user) || order.user}, #{send_mail_link}, requested the following but the request was cancelled from the supplier:\n" if order.user
issue_description << '<pre>'
issue_description << order.document.reject {|k,vs| k.to_s == 'open_url' || vs.blank?}
.collect {|k,vs| "#{I18n.t "toshokan.catalog.show_field_labels.#{k}"}:\n #{vs.first}"}
.join("\n")
issue_description << '</pre>'
issue_description << "\nCancel reason: #{options[:reason]}\n" if options[:reason]
issue_description << "\"View order in DTU Findit\":#{order_url}" if order.id
issue = {
:project_id => LibrarySupport.project_ids[:failed_requests],
:subject => "#{order.user} requests \"#{order.document['title_ts'].first}\""[0..254],
:description => issue_description.join("\n"),
:custom_fields => [
LibrarySupport.custom_fields[:failed_from].merge({
:value => failed_from_for(order.supplier),
}),
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(order.user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => options[:reordered] ? 'Yes' : 'No',
}),
]
}
Rails.logger.debug "Creating redmine issue:\n#{issue}"
response = redmine.create_issue issue
if response.try :[], "issue"
order.order_events << OrderEvent.new(:name => 'delivery_manual', :data => response['issue']['id'])
order.save!
else
Rails.logger.error "Error submitting failed order to library support Redmine. Redmine response:\n#{response || 'nil'}"
raise
end
end
def self.submit_assistance_request user, assistance_request, status_url, reordered = false
genre = assistance_request.genre
title = assistance_request.title
author = assistance_request.author
item_description = description_for assistance_request, genre
# Append indented user notes to item description
item_description += "\n\n== Notes from user ==========\n\n #{assistance_request.notes.gsub /\n/, "\n "}\n" unless assistance_request.notes.blank?
send_mail_link = send_mail_link_for user, {
'subject' => "Regarding your #{genre.to_s.gsub /_/, ' '} request",
'body' => "\"#{title}\" by #{author}",
}
issue_description = []
issue_description << "User #{phonebook_link_for user} ( CWIS: #{user.user_data['dtu']['matrikel_id']}; #{send_mail_link} ) requests the following:"
issue_description << "<pre>\n#{item_description}\n</pre>"
issue_description << I18n.t("toshokan.assistance_requests.forms.sections.physical_delivery.values.#{assistance_request.physical_delivery}")
if assistance_request.physical_delivery == 'internal_mail'
issue_description << "<pre>#{user.address.reject {|k,v| v.blank?}.collect {|k,v| v}.join("\n")}</pre>"
end
issue_description << "\"View order in DTU Findit\":#{status_url}" if assistance_request.id
redmine_project_id =
if assistance_request.book_suggest
:book_suggestions
elsif [:thesis, :report, :standard, :other].include? genre
:other
else
genre
end
issue = {
:project_id => LibrarySupport.project_ids[redmine_project_id],
:description => issue_description.join("\n\n"),
:subject => "#{user} requests \"#{title}\""[0..254],
:custom_fields => [
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => reordered ? 'Yes' : 'No',
}),
],
}
case assistance_request.auto_cancel
when '30'
issue[:due_date] = Time.now.to_date + 1.month
when '90'
issue[:due_date] = Time.now.to_date + 3.months
when '180'
issue[:due_date] = Time.now.to_date + 6.months
end
response = redmine.create_issue issue
if response
assistance_request.library_support_issue = response['issue']['id']
assistance_request.save!
order = Order.where('assistance_request_id = ?', assistance_request.id).first
if order
order.supplier_order_id = response['issue']['id']
order.order_events << OrderEvent.new(:name => 'delivery_manual', :data => response['issue']['id'])
order.save!
end
else
raise 'Error creating RedMine issue'
end
end
def self.phonebook_link_for user
"\"#{user}\":http://www.dtu.dk/Service/Telefonbog/Person?id=#{user.user_data['dtu']['matrikel_id']}&tab=1" if user.dtu?
end
def self.send_mail_link_for user, local_params = {}
return unless user.dtu?
params = {
'reply-to' => LibrarySupport.reply_to,
}.merge local_params
"\"Send email\":mailto:#{user.email}?#{params.collect {|k,v| "#{k}=#{URI.escape v}"}.join '&'}"
end
def self.description_for assistance_request, genre
sections =
case genre
when :journal_article
{
:article => [:title, :author, :doi],
:journal => [:title, :issn, :volume, :issue, :year, :pages],
}
when :conference_article
{
:article => [:title, :author, :doi],
:conference => [:title, :location, :isxn, :year, :pages],
}
when :book
{
:book => [:title, :year, :author, :edition, :doi, :isbn, :publisher],
}
when :thesis
{
:thesis => [:title, :author, :affiliation, :publisher, :type, :year, :pages],
}
when :report
{
:report => [:title, :author, :publisher, :doi, :number],
:host => [:title, :isxn, :volume, :issue, :year, :pages, :series],
}
when :standard
{
:standard => [:title, :subtitle, :publisher, :doi, :number, :isbn, :year, :pages],
}
when :other
{
:other => [:title, :author, :publisher, :doi],
:host => [:title, :isxn, :volume, :issue, :year, :pages, :series],
}
else
Rails.logger.error "Unknown assistance request genre: #{genre || 'nil'}"
raise ArgumentError.new "genre should be one of :journal_article, :conference_article or :book, but was #{genre || 'nil'}"
end
result = []
sections.each do |section, fields|
section_result = ["== #{section.capitalize} ==========\n"]
fields.each do |field|
value = assistance_request.send "#{section}_#{field}"
section_result << "#{field.capitalize}:\n #{value.gsub /\n/, "\n "}" unless value.blank? || value == '?'
end
result << section_result.join("\n") if section_result.size > 1
end
result.join "\n\n"
end
def self.failed_from_for supplier
{
:rd => 'Reprints Desk',
:dtu => 'DTU Library - local scan',
}[supplier]
end
def self.dtu_unit_for user
return unless user.dtu?
{
'stud' => 'Students',
'25' => 'DTU Aqua',
'NNFCB' => 'DTU Biosustain',
'54' => 'DTU Executive School of Business',
'59' => 'DTU Cen',
'28' => 'DTU Chemical Engineering',
'26' => 'DTU Chemistry',
'11' => 'DTU Civil Engineering',
'1' => 'DTU Compute',
'Danchip' => 'DTU Danchip',
'IHK' => 'DTU Diplom',
'31' => 'DTU Electrical Engineering',
'47' => 'DTU Energy Conversion',
'12' => 'DTU Environment',
'23' => 'DTU Food',
'34' => 'DTU Fotonik',
'2' => 'DTU Informatics',
'58' => 'DTU Library',
'42' => 'DTU Management Engineering',
'41' => 'DTU Mechanical Engineering',
'33' => 'DTU Nanotech',
'NATLAB' => 'DTU Natlab',
'48' => 'DTU Nutech',
'10' => 'DTU Physics',
'30' => 'DTU Space',
'27' => 'DTU Systems Biology',
'13' => 'DTU Transport',
'24' => 'DTU Vet',
'46' => 'DTU Wind Energi',
'7' => 'Others',
}[user.user_data["dtu"]["org_units"].first]
end
end
submit_physical_delivery: Save added order_event
require 'redmine'
class LibrarySupport
include Configured
def self.redmine
Redmine.new LibrarySupport.url, LibrarySupport.api_key, :timeout => LibrarySupport.timeout
end
def self.submit_physical_delivery order, order_url, options = {}
Delayed::Worker.logger.info "submit_physical_delivery called."
title = (order.document['title_ts'].first || "")
user_name = (order.user || "").to_s
author = (order.document['author_ts'].first || "")
length_of_variable_fields = title.length + user_name.length
Delayed::Worker.logger.info "submit_physical_delivery: generating send_mail_link ..."
send_mail_link = send_mail_link_for order.user, {
'subject' => "Regarding your document request",
'body' => "\"#{title}\" by #{author}",
}
Delayed::Worker.logger.info "submit_physical_delivery: send_mail_link generated."
Delayed::Worker.logger.info "submit_physical_delivery: generating issue_description ..."
issue_description = []
issue_description << "View order in Findit: #{options[:findit_url]}\n"
issue_description << "#{phonebook_link_for(order.user) || order.user}, #{send_mail_link}, requested the following which is now ready for delivery:\n" if order.user
issue_description << '<pre>'
issue_description << "#{order.supplier.to_s.upcase} order ID:\n #{order.supplier_order_id}"
issue_description << order.document.reject {|k,vs| k.to_s == 'open_url' || vs.blank?}
.collect {|k,vs| "#{I18n.t "toshokan.catalog.show_field_labels.#{k}"}:\n #{vs.first}"}
.join("\n")
issue_description << '</pre>'
issue_description << 'Send by DTU Internal Mail to'
issue_description << "<pre>#{order.user.address.reject {|k,v| v.blank?}.collect {|k,v| v}.join("\n")}</pre>"
Delayed::Worker.logger.info "submit_physical_delivery: issue_description generated."
Delayed::Worker.logger.info "submit_physical_delivery: generating issue hash ..."
issue = {
:project_id => LibrarySupport.project_ids[:tib_orders_delivered_as_print],
:subject => "Delivery of " + "\"#{(title)[0..(226-([226, length_of_variable_fields].min))]}\"" + " requested by #{user_name}",
:description => issue_description.join("\n"),
:custom_fields => [
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(order.user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => options[:reordered] ? 'Yes' : 'No',
}),
]
}
Delayed::Worker.logger.info "submit_physical_delivery: issue hash generated."
Delayed::Worker.logger.info "submit_physical_delivery: creating redmine issue:\n#{issue} ..."
response = redmine.create_issue issue
if response.try :[], "issue"
Delayed::Worker.logger.info "submit_physical_delivery: redmine issue created."
Delayed::Worker.logger.info "submit_physical_delivery: adding order event ..."
is_redelivery = options[:reordered]
order.order_events << OrderEvent.new(:name => is_redelivery ? 'physical_redelivery_done' : 'physical_delivery_done', :data => response['issue']['id'])
order.save!
Delayed::Worker.logger.info "submit_physical_delivery: order event added."
else
Delayed::Worker.logger.error "submit_physical_delivery: Error submitting physical delivery to library support Redmine. Redmine response:\n#{response || 'nil'}"
raise
end
end
def self.submit_failed_request order, order_url, options = {}
return unless order.user.dtu?
send_mail_link = send_mail_link_for order.user, {
'subject' => "Manual processing of your request: \"#{order.document['title_ts'].first}\"",
'body' => "Your request of \"#{order.document['title_ts'].first}\" has gone to manual processing",
}
issue_description = []
issue_description << "#{phonebook_link_for(order.user) || order.user}, #{send_mail_link}, requested the following but the request was cancelled from the supplier:\n" if order.user
issue_description << '<pre>'
issue_description << order.document.reject {|k,vs| k.to_s == 'open_url' || vs.blank?}
.collect {|k,vs| "#{I18n.t "toshokan.catalog.show_field_labels.#{k}"}:\n #{vs.first}"}
.join("\n")
issue_description << '</pre>'
issue_description << "\nCancel reason: #{options[:reason]}\n" if options[:reason]
issue_description << "\"View order in DTU Findit\":#{order_url}" if order.id
issue = {
:project_id => LibrarySupport.project_ids[:failed_requests],
:subject => "#{order.user} requests \"#{order.document['title_ts'].first}\""[0..254],
:description => issue_description.join("\n"),
:custom_fields => [
LibrarySupport.custom_fields[:failed_from].merge({
:value => failed_from_for(order.supplier),
}),
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(order.user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => options[:reordered] ? 'Yes' : 'No',
}),
]
}
Rails.logger.debug "Creating redmine issue:\n#{issue}"
response = redmine.create_issue issue
if response.try :[], "issue"
order.order_events << OrderEvent.new(:name => 'delivery_manual', :data => response['issue']['id'])
order.save!
else
Rails.logger.error "Error submitting failed order to library support Redmine. Redmine response:\n#{response || 'nil'}"
raise
end
end
def self.submit_assistance_request user, assistance_request, status_url, reordered = false
genre = assistance_request.genre
title = assistance_request.title
author = assistance_request.author
item_description = description_for assistance_request, genre
# Append indented user notes to item description
item_description += "\n\n== Notes from user ==========\n\n #{assistance_request.notes.gsub /\n/, "\n "}\n" unless assistance_request.notes.blank?
send_mail_link = send_mail_link_for user, {
'subject' => "Regarding your #{genre.to_s.gsub /_/, ' '} request",
'body' => "\"#{title}\" by #{author}",
}
issue_description = []
issue_description << "User #{phonebook_link_for user} ( CWIS: #{user.user_data['dtu']['matrikel_id']}; #{send_mail_link} ) requests the following:"
issue_description << "<pre>\n#{item_description}\n</pre>"
issue_description << I18n.t("toshokan.assistance_requests.forms.sections.physical_delivery.values.#{assistance_request.physical_delivery}")
if assistance_request.physical_delivery == 'internal_mail'
issue_description << "<pre>#{user.address.reject {|k,v| v.blank?}.collect {|k,v| v}.join("\n")}</pre>"
end
issue_description << "\"View order in DTU Findit\":#{status_url}" if assistance_request.id
redmine_project_id =
if assistance_request.book_suggest
:book_suggestions
elsif [:thesis, :report, :standard, :other].include? genre
:other
else
genre
end
issue = {
:project_id => LibrarySupport.project_ids[redmine_project_id],
:description => issue_description.join("\n\n"),
:subject => "#{user} requests \"#{title}\""[0..254],
:custom_fields => [
LibrarySupport.custom_fields[:dtu_unit].merge({
:value => dtu_unit_for(user),
}),
LibrarySupport.custom_fields[:reordered].merge({
:value => reordered ? 'Yes' : 'No',
}),
],
}
case assistance_request.auto_cancel
when '30'
issue[:due_date] = Time.now.to_date + 1.month
when '90'
issue[:due_date] = Time.now.to_date + 3.months
when '180'
issue[:due_date] = Time.now.to_date + 6.months
end
response = redmine.create_issue issue
if response
assistance_request.library_support_issue = response['issue']['id']
assistance_request.save!
order = Order.where('assistance_request_id = ?', assistance_request.id).first
if order
order.supplier_order_id = response['issue']['id']
order.order_events << OrderEvent.new(:name => 'delivery_manual', :data => response['issue']['id'])
order.save!
end
else
raise 'Error creating RedMine issue'
end
end
def self.phonebook_link_for user
"\"#{user}\":http://www.dtu.dk/Service/Telefonbog/Person?id=#{user.user_data['dtu']['matrikel_id']}&tab=1" if user.dtu?
end
def self.send_mail_link_for user, local_params = {}
return unless user.dtu?
params = {
'reply-to' => LibrarySupport.reply_to,
}.merge local_params
"\"Send email\":mailto:#{user.email}?#{params.collect {|k,v| "#{k}=#{URI.escape v}"}.join '&'}"
end
def self.description_for assistance_request, genre
sections =
case genre
when :journal_article
{
:article => [:title, :author, :doi],
:journal => [:title, :issn, :volume, :issue, :year, :pages],
}
when :conference_article
{
:article => [:title, :author, :doi],
:conference => [:title, :location, :isxn, :year, :pages],
}
when :book
{
:book => [:title, :year, :author, :edition, :doi, :isbn, :publisher],
}
when :thesis
{
:thesis => [:title, :author, :affiliation, :publisher, :type, :year, :pages],
}
when :report
{
:report => [:title, :author, :publisher, :doi, :number],
:host => [:title, :isxn, :volume, :issue, :year, :pages, :series],
}
when :standard
{
:standard => [:title, :subtitle, :publisher, :doi, :number, :isbn, :year, :pages],
}
when :other
{
:other => [:title, :author, :publisher, :doi],
:host => [:title, :isxn, :volume, :issue, :year, :pages, :series],
}
else
Rails.logger.error "Unknown assistance request genre: #{genre || 'nil'}"
raise ArgumentError.new "genre should be one of :journal_article, :conference_article or :book, but was #{genre || 'nil'}"
end
result = []
sections.each do |section, fields|
section_result = ["== #{section.capitalize} ==========\n"]
fields.each do |field|
value = assistance_request.send "#{section}_#{field}"
section_result << "#{field.capitalize}:\n #{value.gsub /\n/, "\n "}" unless value.blank? || value == '?'
end
result << section_result.join("\n") if section_result.size > 1
end
result.join "\n\n"
end
def self.failed_from_for supplier
{
:rd => 'Reprints Desk',
:dtu => 'DTU Library - local scan',
}[supplier]
end
def self.dtu_unit_for user
return unless user.dtu?
{
'stud' => 'Students',
'25' => 'DTU Aqua',
'NNFCB' => 'DTU Biosustain',
'54' => 'DTU Executive School of Business',
'59' => 'DTU Cen',
'28' => 'DTU Chemical Engineering',
'26' => 'DTU Chemistry',
'11' => 'DTU Civil Engineering',
'1' => 'DTU Compute',
'Danchip' => 'DTU Danchip',
'IHK' => 'DTU Diplom',
'31' => 'DTU Electrical Engineering',
'47' => 'DTU Energy Conversion',
'12' => 'DTU Environment',
'23' => 'DTU Food',
'34' => 'DTU Fotonik',
'2' => 'DTU Informatics',
'58' => 'DTU Library',
'42' => 'DTU Management Engineering',
'41' => 'DTU Mechanical Engineering',
'33' => 'DTU Nanotech',
'NATLAB' => 'DTU Natlab',
'48' => 'DTU Nutech',
'10' => 'DTU Physics',
'30' => 'DTU Space',
'27' => 'DTU Systems Biology',
'13' => 'DTU Transport',
'24' => 'DTU Vet',
'46' => 'DTU Wind Energi',
'7' => 'Others',
}[user.user_data["dtu"]["org_units"].first]
end
end
|
module Ontology::Import
extend ActiveSupport::Concern
def import_xml(io, user)
now = Time.now
transaction do
root = nil
ontology = nil
link = nil
ontologies_count = 0
versions = []
OntologyParser.parse io,
root: Proc.new { |h|
root = h
},
ontology: Proc.new { |h|
ontologies_count += 1
if distributed?
# generate IRI for sub-ontology
child_name = h['name']
child_iri = iri_for_child(child_name)
# find or create sub-ontology by IRI
ontology = self.children.find_by_iri(child_iri)
if ontology.nil?
ontology = SingleOntology.create!({iri: child_iri,
name: child_name,
basepath: self.basepath,
file_extension: self.file_extension,
repository_id: repository_id},
without_protection: true)
self.children << ontology
end
version = ontology.versions.build
version.user = user
versions << version
else
raise "more than one ontology found" if ontologies_count > 1
ontology = self
end
if h['language']
ontology.language = Language.where(:iri => "http://purl.net/dol/language/#{h['language']}")
.first_or_create(user: user, name: h['language'])
end
if h['logic']
ontology.logic = Logic.where(:iri => "http://purl.net/dol/logics/#{h['logic']}")
.first_or_create(user: user, name: h['logic'])
end
ontology.entities_count = 0
ontology.sentences_count = 0
},
ontology_end: Proc.new {
# remove outdated sentences and entities
conditions = ['updated_at < ?', now]
ontology.entities.where(conditions).destroy_all
ontology.sentences.where(conditions).delete_all
#ontology.save!
},
symbol: Proc.new { |h|
ontology.entities.update_or_create_from_hash(h, now)
ontology.entities_count += 1
},
axiom: Proc.new { |h|
ontology.sentences.update_or_create_from_hash(h, now)
ontology.sentences_count += 1
},
link: Proc.new { |h|
self.links.update_or_create_from_hash(h, user, now)
}
save!
versions.each { |version| version.save! }
end
end
def import_xml_from_file(path, user)
import_xml File.open(path), user
end
def import_latest_version(user)
return if versions.last.nil?
import_xml_from_file versions.last.xml_path, user
end
end
Fix error which resulted in unsaved ontologies
When merging staging and git we commented ontology.save
out, which resulted in the logic-specific settings
(and probably more) not being saved.
module Ontology::Import
extend ActiveSupport::Concern
def import_xml(io, user)
now = Time.now
transaction do
root = nil
ontology = nil
link = nil
ontologies_count = 0
versions = []
OntologyParser.parse io,
root: Proc.new { |h|
root = h
},
ontology: Proc.new { |h|
ontologies_count += 1
if distributed?
# generate IRI for sub-ontology
child_name = h['name']
child_iri = iri_for_child(child_name)
# find or create sub-ontology by IRI
ontology = self.children.find_by_iri(child_iri)
if ontology.nil?
ontology = SingleOntology.create!({iri: child_iri,
name: child_name,
basepath: self.basepath,
file_extension: self.file_extension,
repository_id: repository_id},
without_protection: true)
self.children << ontology
end
version = ontology.versions.build
version.user = user
versions << version
else
raise "more than one ontology found" if ontologies_count > 1
ontology = self
end
if h['language']
ontology.language = Language.where(:iri => "http://purl.net/dol/language/#{h['language']}")
.first_or_create(user: user, name: h['language'])
end
if h['logic']
ontology.logic = Logic.where(:iri => "http://purl.net/dol/logics/#{h['logic']}")
.first_or_create(user: user, name: h['logic'])
end
ontology.entities_count = 0
ontology.sentences_count = 0
},
ontology_end: Proc.new {
# remove outdated sentences and entities
conditions = ['updated_at < ?', now]
ontology.entities.where(conditions).destroy_all
ontology.sentences.where(conditions).delete_all
ontology.save!
},
symbol: Proc.new { |h|
ontology.entities.update_or_create_from_hash(h, now)
ontology.entities_count += 1
},
axiom: Proc.new { |h|
ontology.sentences.update_or_create_from_hash(h, now)
ontology.sentences_count += 1
},
link: Proc.new { |h|
self.links.update_or_create_from_hash(h, user, now)
}
save!
versions.each { |version| version.save! }
end
end
def import_xml_from_file(path, user)
import_xml File.open(path), user
end
def import_latest_version(user)
return if versions.last.nil?
import_xml_from_file versions.last.xml_path, user
end
end
|
module PaidUp
module Ability
include CanCan::Ability
def initialize_paid_up(user)
features = PaidUp::Feature.all
for feature in features
case feature.setting_type
when 'table_rows'
can :read, feature.feature_model
if user.table_rows_allowed(feature.slug) > 0 || user.table_rows_unlimited?(feature.slug)
can :manage, feature.feature_model, :user => user
can :own, feature.feature_model
unless user.table_rows_remaining(feature.slug) > 0
cannot :create, feature.feature_model
end
else
cannot :delete, feature.feature_model
cannot :update, feature.feature_model
cannot :own, feature.feature_model
cannot :create, feature.feature_model
end
when 'rolify_rows'
can :read, feature.feature_model
if user.rolify_rows_allowed(feature.slug) > 0 || user.rolify_rows_unlimited?(feature.slug)
can :manage, feature.feature_model do |record|
user.has_role? :owner, record
end
can :own, feature.feature_model
if user.rolify_rows_remaining(feature.slug) > 0
can :create, feature.feature_model
else
cannot :create, feature.feature_model
end
else
cannot :delete, feature.feature_model
cannot :update, feature.feature_model
cannot :own, feature.feature_model
cannot :create, feature.feature_model
end
when 'boolean'
if user.plan.feature_setting feature.slug
can :use, feature.slug.to_sym
end
else
raise(:unknown_feature_type.l)
end
end
end
end
end
Updated abilities
module PaidUp
module Ability
include CanCan::Ability
def initialize_paid_up(user)
features = PaidUp::Feature.all
for feature in features
case feature.setting_type
when 'table_rows'
can [:index, :read], feature.feature_model
if user.table_rows_allowed(feature.slug) > 0 || user.table_rows_unlimited?(feature.slug)
can :manage, feature.feature_model, :user => user
can :own, feature.feature_model
unless user.table_rows_remaining(feature.slug) > 0
cannot :create, feature.feature_model
end
else
cannot :delete, feature.feature_model
cannot :update, feature.feature_model
cannot :own, feature.feature_model
cannot :create, feature.feature_model
end
when 'rolify_rows'
can [:index, :read], feature.feature_model
if user.rolify_rows_allowed(feature.slug) > 0 || user.rolify_rows_unlimited?(feature.slug)
can :manage, feature.feature_model, id: Group.with_role(:owner, user).pluck(:id)
can :own, feature.feature_model
unless user.rolify_rows_remaining(feature.slug) > 0
cannot :create, feature.feature_model
end
else
cannot :delete, feature.feature_model
cannot :update, feature.feature_model
cannot :own, feature.feature_model
cannot :create, feature.feature_model
end
when 'boolean'
if user.plan.feature_setting feature.slug
can :use, feature.slug.to_sym
end
else
raise(:unknown_feature_type.l)
end
end
end
end
end
|
class RemoteDocument
attr_reader :tempfile
def initialize(url)
@url = url
@tempfile = Tempfile.new.tap(&:binmode)
end
def download
open(url) { |url_file| tempfile.write(url_file.read) }
self
rescue OpenURI::HTTPError
return self if s3_download
ensure
rotate if landscape_image?
end
def pdf?
file_type.include?("pdf")
end
private
attr_reader :url
def rotate
@tempfile = magick_file.combine_options do |actions|
actions.rotate "-90"
end.tempfile
end
def landscape_image?
file_type.include?("image") && magick_file.width > magick_file.height
end
def magick_file
MiniMagick::Image.open(tempfile.path)
end
def file_type
@_file_type ||= FileMagic.open(:mime) do |fm|
fm.file(tempfile.tap(&:rewind).path, true)
end
end
def s3_download
s3.get_object(
{
bucket: Rails.application.secrets.aws_bucket,
key: s3_object_key,
},
target: tempfile.path,
)
true
rescue Aws::S3::Errors::NoSuchKey
false
end
def s3
@_s3 ||= Aws::S3::Client.new(
access_key_id: Rails.application.secrets.aws_key,
secret_access_key: Rails.application.secrets.aws_secret,
region: Rails.application.secrets.aws_region,
)
end
def s3_object_key
url.split("amazonaws.com/").last
end
end
Remove usage of `.tap`. Not needed in these contexts.
Signed-off-by: Ben Golder <8e821d337f8aee1d7fe1c7e2d13a89439a0a774b@codeforamerica.org>
class RemoteDocument
attr_reader :tempfile
def initialize(url)
@url = url
@tempfile = Tempfile.new.binmode
end
def download
open(url) { |url_file| tempfile.write(url_file.read) }
self
rescue OpenURI::HTTPError
return self if s3_download
ensure
rotate if landscape_image?
end
def pdf?
file_type.include?("pdf")
end
private
attr_reader :url
def rotate
@tempfile = magick_file.combine_options do |actions|
actions.rotate "-90"
end.tempfile
end
def landscape_image?
file_type.include?("image") && magick_file.width > magick_file.height
end
def magick_file
MiniMagick::Image.open(tempfile.path)
end
def file_type
@_file_type ||= FileMagic.open(:mime) do |fm|
tempfile.rewind
fm.file(tempfile.path, true)
end
end
def s3_download
s3.get_object(
{
bucket: Rails.application.secrets.aws_bucket,
key: s3_object_key,
},
target: tempfile.path,
)
true
rescue Aws::S3::Errors::NoSuchKey
false
end
def s3
@_s3 ||= Aws::S3::Client.new(
access_key_id: Rails.application.secrets.aws_key,
secret_access_key: Rails.application.secrets.aws_secret,
region: Rails.application.secrets.aws_region,
)
end
def s3_object_key
url.split("amazonaws.com/").last
end
end
|
require 'rkelly'
module Setup
class Algorithm
include SnippetCode
include NamespaceNamed
include Taggable
include RailsAdmin::Models::Setup::AlgorithmAdmin
legacy_code_attribute :code
build_in_data_type.referenced_by(:namespace, :name)
field :description, type: String
embeds_many :parameters, class_name: Setup::AlgorithmParameter.to_s, inverse_of: :algorithm
embeds_many :call_links, class_name: Setup::CallLink.to_s, inverse_of: :algorithm
validates_format_of :name, with: /\A[a-z]([a-z]|_|\d)*\Z/
accepts_nested_attributes_for :parameters, allow_destroy: true
accepts_nested_attributes_for :call_links, allow_destroy: true
field :store_output, type: Boolean
belongs_to :output_datatype, class_name: Setup::DataType.to_s, inverse_of: nil
field :validate_output, type: Boolean
before_save :validate_parameters, :validate_code, :validate_output_processing
attr_reader :last_output
field :language, type: Symbol, default: -> { new_record? ? :auto : ruby }
validates_inclusion_of :language, in: ->(alg) { alg.class.language_enum.values }
def code_extension
case language
when :python
'.py'
when :javascript
'.js'
when :php
'.php'
else
'.rb'
end
end
def validate_parameters
not_required = false
parameters.each do |p|
next unless not_required ||= !p.required
p.errors.add(:required, 'marked as "Required" must come before non marked') if p.required
end
errors.add(:parameters, 'contains invalid sequence of required parameters') if (last = parameters.last) && last.errors.present?
errors.blank?
end
def validate_code
if code.blank?
errors.add(:code, "can't be blank")
else
logs = parse_code
if logs[:errors].present?
logs[:errors].each { |msg| errors.add(:code, msg) }
self.call_links = []
else
links = []
(logs[:self_sends] || []).each do |call_name|
if (call_link = call_links.where(name: call_name).first)
links << call_link
else
links << Setup::CallLink.new(name: call_name)
end
end
self.call_links = links
do_link
end
end
errors.blank?
end
def validate_output_processing
if store_output and not output_datatype
rc = Setup::FileDataType.find_or_create_by(namespace: namespace, name: "#{name} output")
if rc.errors.present?
errors.add(:output_datatype, rc.errors.full_messages)
else
self.output_datatype = rc
end
end
errors.blank?
end
def do_link
call_links.each { |call_link| call_link.do_link }
end
attr_accessor :self_linker
def with_linker(linker)
self.self_linker = linker
self
end
def do_store_output(output)
rc = []
r = nil
while output.capataz_proxy?
output = output.capataz_slave
end
if output_datatype.is_a? Setup::FileDataType
begin
case output
when Hash, Array
r = output_datatype.create_from!(output.to_json, contentType: 'application/json')
when String
ct = 'text/plain'
begin
JSON.parse(output)
ct = 'application/json'
rescue JSON::ParserError
unless Nokogiri.XML(output).errors.present?
ct = 'application/xml'
end
end
r = output_datatype.create_from!(output, contentType: ct)
else
r = output_datatype.create_from!(output.to_s)
end
rescue Exception
r = output_datatype.create_from!(output.to_s)
end
else
begin
case output
when Hash, String
begin
r = output_datatype.create_from_json!(output)
rescue Exception => e
puts e.backtrace
end
when Array
output.each do |item|
rc += do_store_output(item)
end
else
raise
end
rescue Exception
fail 'Output failed to validate against Output DataType.'
end
end
if r
if r.errors.present?
fail 'Output failed to validate against Output DataType.'
else
rc << r.id
end
end
rc
end
def run(input)
input = Cenit::Utility.json_value_of(input)
input = [input] unless input.is_a?(Array)
rc = Cenit::BundlerInterpreter.run(self, *input)
if rc.present?
if store_output
unless output_datatype
fail 'Execution failed! Output storage required and no Output DataType defined.'
end
begin
ids = do_store_output rc
@last_output = AlgorithmOutput.create(algorithm: self, data_type: output_datatype, input_params: args,
output_ids: ids)
rescue Exception => e
if validate_output
fail 'Execution failed!' + e.message
end
end
end
end
rc
end
def link?(call_symbol)
link(call_symbol).present?
end
def link(call_symbol)
if (call_link = call_links.where(name: call_symbol).first)
call_link.do_link
else
nil
end
end
def linker_id
id.to_s
end
def for_each_call(visited = Set.new, &block)
unless visited.include?(self)
visited << self
block.call(self) if block
call_links.each { |call_link| call_link.link.for_each_call(visited, &block) if call_link.link }
end
end
def stored_outputs(options = {})
AlgorithmOutput.where(algorithm: self).desc(:created_at)
end
def configuration_schema
schema =
{
type: 'object',
properties: properties = {},
required: parameters.select(&:required).collect(&:name)
}
parameters.each { |p| properties[p.name] = p.schema }
schema.stringify_keys
end
def configuration_model
@mongoff_model ||= Mongoff::Model.for(data_type: self.class.data_type,
schema: configuration_schema,
name: self.class.configuration_model_name,
cache: false)
end
def language_name
self.class.language_enum.keys.detect { |key| self.class.language_enum[key] == language }
end
class << self
def language_enum
{
'Auto detect': :auto,
# 'Python': :python,
# 'PHP': :php,
'JavaScript': :javascript,
'Ruby': :ruby
}
end
def configuration_model_name
"#{Setup::Algorithm}::Config"
end
end
def parse_code
if language == :auto
logs = {}
lang = self.class.language_enum.values.detect do |lang|
next if lang == :auto
logs.clear
parse_method = "parse_#{lang}_code"
logs.merge!(send(parse_method))
logs[:errors].blank?
end
if lang
self.language = lang
else
logs.clear
logs[:errors] = ["can't be auto-detected with syntax errors or typed language is not supported"]
end
logs
else
parse_method = "parse_#{language}_code"
send(parse_method)
end
end
protected
def parse_ruby_code
logs = { errors: errors = [] }
unless Capataz.rewrite(code, halt_on_error: false, logs: logs, locals: parameters.collect { |p| p.name })
errors << 'with no valid Ruby syntax'
end
logs
end
def parse_javascript_code
logs = { errors: errors = [] }
ast = RKelly.parse(code) rescue nil
if ast
logs[:self_sends] = call_names = Set.new
ast.each do |node|
if node.is_a?(RKelly::Nodes::FunctionCallNode) && (node = node.value).is_a?(RKelly::Nodes::ResolveNode)
call_names << node.value
end
end
else
errors << 'with no valid JavaScript syntax'
end
logs
end
def parse_php_code
{
errors: ['PHP parsing not yet supported']
}
end
def parse_python_code
{
errors: ['Python parsing not yet supported']
}
end
end
end
class Array
def range=(arg)
@range = arg
end
end
Fixing default language for existing algorithms
require 'rkelly'
module Setup
class Algorithm
include SnippetCode
include NamespaceNamed
include Taggable
include RailsAdmin::Models::Setup::AlgorithmAdmin
legacy_code_attribute :code
build_in_data_type.referenced_by(:namespace, :name)
field :description, type: String
embeds_many :parameters, class_name: Setup::AlgorithmParameter.to_s, inverse_of: :algorithm
embeds_many :call_links, class_name: Setup::CallLink.to_s, inverse_of: :algorithm
validates_format_of :name, with: /\A[a-z]([a-z]|_|\d)*\Z/
accepts_nested_attributes_for :parameters, allow_destroy: true
accepts_nested_attributes_for :call_links, allow_destroy: true
field :store_output, type: Boolean
belongs_to :output_datatype, class_name: Setup::DataType.to_s, inverse_of: nil
field :validate_output, type: Boolean
before_save :validate_parameters, :validate_code, :validate_output_processing
attr_reader :last_output
field :language, type: Symbol, default: -> { new_record? ? :auto : :ruby }
validates_inclusion_of :language, in: ->(alg) { alg.class.language_enum.values }
def code_extension
case language
when :python
'.py'
when :javascript
'.js'
when :php
'.php'
else
'.rb'
end
end
def validate_parameters
not_required = false
parameters.each do |p|
next unless not_required ||= !p.required
p.errors.add(:required, 'marked as "Required" must come before non marked') if p.required
end
errors.add(:parameters, 'contains invalid sequence of required parameters') if (last = parameters.last) && last.errors.present?
errors.blank?
end
def validate_code
if code.blank?
errors.add(:code, "can't be blank")
else
logs = parse_code
if logs[:errors].present?
logs[:errors].each { |msg| errors.add(:code, msg) }
self.call_links = []
else
links = []
(logs[:self_sends] || []).each do |call_name|
if (call_link = call_links.where(name: call_name).first)
links << call_link
else
links << Setup::CallLink.new(name: call_name)
end
end
self.call_links = links
do_link
end
end
errors.blank?
end
def validate_output_processing
if store_output and not output_datatype
rc = Setup::FileDataType.find_or_create_by(namespace: namespace, name: "#{name} output")
if rc.errors.present?
errors.add(:output_datatype, rc.errors.full_messages)
else
self.output_datatype = rc
end
end
errors.blank?
end
def do_link
call_links.each { |call_link| call_link.do_link }
end
attr_accessor :self_linker
def with_linker(linker)
self.self_linker = linker
self
end
def do_store_output(output)
rc = []
r = nil
while output.capataz_proxy?
output = output.capataz_slave
end
if output_datatype.is_a? Setup::FileDataType
begin
case output
when Hash, Array
r = output_datatype.create_from!(output.to_json, contentType: 'application/json')
when String
ct = 'text/plain'
begin
JSON.parse(output)
ct = 'application/json'
rescue JSON::ParserError
unless Nokogiri.XML(output).errors.present?
ct = 'application/xml'
end
end
r = output_datatype.create_from!(output, contentType: ct)
else
r = output_datatype.create_from!(output.to_s)
end
rescue Exception
r = output_datatype.create_from!(output.to_s)
end
else
begin
case output
when Hash, String
begin
r = output_datatype.create_from_json!(output)
rescue Exception => e
puts e.backtrace
end
when Array
output.each do |item|
rc += do_store_output(item)
end
else
raise
end
rescue Exception
fail 'Output failed to validate against Output DataType.'
end
end
if r
if r.errors.present?
fail 'Output failed to validate against Output DataType.'
else
rc << r.id
end
end
rc
end
def run(input)
input = Cenit::Utility.json_value_of(input)
input = [input] unless input.is_a?(Array)
rc = Cenit::BundlerInterpreter.run(self, *input)
if rc.present?
if store_output
unless output_datatype
fail 'Execution failed! Output storage required and no Output DataType defined.'
end
begin
ids = do_store_output rc
@last_output = AlgorithmOutput.create(algorithm: self, data_type: output_datatype, input_params: args,
output_ids: ids)
rescue Exception => e
if validate_output
fail 'Execution failed!' + e.message
end
end
end
end
rc
end
def link?(call_symbol)
link(call_symbol).present?
end
def link(call_symbol)
if (call_link = call_links.where(name: call_symbol).first)
call_link.do_link
else
nil
end
end
def linker_id
id.to_s
end
def for_each_call(visited = Set.new, &block)
unless visited.include?(self)
visited << self
block.call(self) if block
call_links.each { |call_link| call_link.link.for_each_call(visited, &block) if call_link.link }
end
end
def stored_outputs(options = {})
AlgorithmOutput.where(algorithm: self).desc(:created_at)
end
def configuration_schema
schema =
{
type: 'object',
properties: properties = {},
required: parameters.select(&:required).collect(&:name)
}
parameters.each { |p| properties[p.name] = p.schema }
schema.stringify_keys
end
def configuration_model
@mongoff_model ||= Mongoff::Model.for(data_type: self.class.data_type,
schema: configuration_schema,
name: self.class.configuration_model_name,
cache: false)
end
def language_name
self.class.language_enum.keys.detect { |key| self.class.language_enum[key] == language }
end
class << self
def language_enum
{
'Auto detect': :auto,
# 'Python': :python,
# 'PHP': :php,
'JavaScript': :javascript,
'Ruby': :ruby
}
end
def configuration_model_name
"#{Setup::Algorithm}::Config"
end
end
def parse_code
if language == :auto
logs = {}
lang = self.class.language_enum.values.detect do |lang|
next if lang == :auto
logs.clear
parse_method = "parse_#{lang}_code"
logs.merge!(send(parse_method))
logs[:errors].blank?
end
if lang
self.language = lang
else
logs.clear
logs[:errors] = ["can't be auto-detected with syntax errors or typed language is not supported"]
end
logs
else
parse_method = "parse_#{language}_code"
send(parse_method)
end
end
protected
def parse_ruby_code
logs = { errors: errors = [] }
unless Capataz.rewrite(code, halt_on_error: false, logs: logs, locals: parameters.collect { |p| p.name })
errors << 'with no valid Ruby syntax'
end
logs
end
def parse_javascript_code
logs = { errors: errors = [] }
ast = RKelly.parse(code) rescue nil
if ast
logs[:self_sends] = call_names = Set.new
ast.each do |node|
if node.is_a?(RKelly::Nodes::FunctionCallNode) && (node = node.value).is_a?(RKelly::Nodes::ResolveNode)
call_names << node.value
end
end
else
errors << 'with no valid JavaScript syntax'
end
logs
end
def parse_php_code
{
errors: ['PHP parsing not yet supported']
}
end
def parse_python_code
{
errors: ['Python parsing not yet supported']
}
end
end
end
class Array
def range=(arg)
@range = arg
end
end
|
module Setup
class Parameter
include CenitScoped
include JsonMetadata
include ChangedIf
build_in_data_type
.with(:key, :value, :description, :metadata)
.referenced_by(:key)
.and({ label: '{{key}}' })
deny :create
field :key, type: String, as: :name
field :description, type: String
field :value
validates_presence_of :key
def to_s
"#{key}: #{value}"
end
def parent_relation
@parent_relation ||= reflect_on_all_associations(:belongs_to).detect { |r| send(r.name) }
end
def location
(r = parent_relation) && r.inverse_name
end
def parent_model
(r = parent_relation) && r.klass
end
def parent
(r = parent_relation) && send(r.name)
end
end
end
Update | Allow parameters creation
module Setup
class Parameter
include CenitScoped
include JsonMetadata
include ChangedIf
build_in_data_type
.with(:key, :value, :description, :metadata)
.referenced_by(:key)
.and({ label: '{{key}}' })
field :key, type: String, as: :name
field :description, type: String
field :value
validates_presence_of :key
def to_s
"#{key}: #{value}"
end
def parent_relation
@parent_relation ||= reflect_on_all_associations(:belongs_to).detect { |r| send(r.name) }
end
def location
(r = parent_relation) && r.inverse_name
end
def parent_model
(r = parent_relation) && r.klass
end
def parent
(r = parent_relation) && send(r.name)
end
end
end
|
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2017 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: tax_declarations
#
# accounted_at :datetime
# created_at :datetime not null
# creator_id :integer
# currency :string not null
# description :text
# financial_year_id :integer not null
# id :integer not null, primary key
# invoiced_on :date
# journal_entry_id :integer
# lock_version :integer default(0), not null
# mode :string not null
# number :string
# reference_number :string
# responsible_id :integer
# started_on :date not null
# state :string
# stopped_on :date not null
# updated_at :datetime not null
# updater_id :integer
#
class TaxDeclaration < Ekylibre::Record::Base
include Attachable
attr_readonly :currency
refers_to :currency
enumerize :mode, in: %i[debit payment], predicates: true
belongs_to :financial_year
belongs_to :journal_entry, dependent: :destroy
belongs_to :responsible, class_name: 'User'
# belongs_to :tax_office, class_name: 'Entity'
has_many :items, class_name: 'TaxDeclarationItem', dependent: :destroy, inverse_of: :tax_declaration
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :accounted_at, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.now + 50.years } }, allow_blank: true
validates :currency, :financial_year, :mode, presence: true
validates :description, length: { maximum: 500_000 }, allow_blank: true
validates :invoiced_on, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }, allow_blank: true
validates :number, :reference_number, :state, length: { maximum: 500 }, allow_blank: true
validates :started_on, presence: true, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }
validates :stopped_on, presence: true, timeliness: { on_or_after: ->(tax_declaration) { tax_declaration.started_on || Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }
# ]VALIDATORS]
validates :number, uniqueness: true
validates_associated :items
acts_as_numbered
# acts_as_affairable :tax_office
accepts_nested_attributes_for :items, reject_if: proc { |item| item[:tax_id].blank? && item[:tax].blank? }, allow_destroy: true
delegate :tax_declaration_mode, :tax_declaration_frequency,
:tax_declaration_mode_payment?, :tax_declaration_mode_debit?,
to: :financial_year
protect on: :destroy do
sent?
end
state_machine :state, initial: :draft do
state :draft
state :validated
state :sent
event :propose do
transition draft: :validated, if: :has_content?
end
event :confirm do
transition validated: :sent, if: :has_content?
end
end
before_validation(on: :create) do
self.state ||= :draft
if financial_year
self.mode = financial_year.tax_declaration_mode
self.currency = financial_year.currency
# if tax_declarations exists for current financial_year, then get the last to compute started_on
self.started_on = financial_year.next_tax_declaration_on
# anyway, stopped_on is started_on + tax_declaration_frequency_duration
end
if started_on
self.stopped_on ||= financial_year.tax_declaration_end_date(started_on)
end
self.invoiced_on ||= self.stopped_on
end
before_validation do
self.created_at ||= Time.zone.now
end
after_create :compute!, if: :draft?
def has_content?
items.any?
end
# Prints human name of current state
def state_label
self.class.state_machine.state(self.state.to_sym).human_name
end
# This callback bookkeeps the sale depending on its state
bookkeep do |b|
journal = unsuppress { Journal.used_for_tax_declarations!(currency: currency) }
b.journal_entry(journal, printed_on: invoiced_on, if: (has_content? && (validated? || sent?))) do |entry|
label = tc(:bookkeep, resource: self.class.model_name.human, number: number, started_on: started_on.l, stopped_on: stopped_on.l)
items.each do |item|
entry.add_debit(label, item.tax.collect_account.id, item.collected_tax_amount.round(2), tax: item.tax, resource: item, as: :collect) unless item.collected_tax_amount.zero?
entry.add_credit(label, item.tax.deduction_account.id, item.deductible_tax_amount.round(2), tax: item.tax, resource: item, as: :deduction) unless item.deductible_tax_amount.zero?
entry.add_credit(label, item.tax.fixed_asset_deduction_account.id, item.fixed_asset_deductible_tax_amount.round(2), tax: item.tax, resource: item, as: :fixed_asset_deduction) unless item.fixed_asset_deductible_tax_amount.zero?
entry.add_credit(label, item.tax.intracommunity_payable_account.id, item.intracommunity_payable_tax_amount.round(2), tax: item.tax, resource: item, as: :intracommunity_payable) unless item.intracommunity_payable_tax_amount.zero?
end
unless global_balance.zero?
if global_balance < 0
account = Account.find_or_import_from_nomenclature(:report_vat_credit)
# account = Account.find_or_create_by!(number: '44567', usages: :deductible_vat)
elsif global_balance > 0
account = Account.find_or_import_from_nomenclature(:vat_to_pay)
# account = Account.find_or_create_by!(number: '44551', usages: :collected_vat)
end
entry.add_credit(label, account, global_balance, as: :balance)
end
end
end
def dealt_at
(validated? ? invoiced_on : stopped_on? ? self.created_at : Time.zone.now)
end
def status
return :go if sent?
return :caution if validated?
:stop
end
# FIXME: Too french
def undeclared_tax_journal_entry_items
JournalEntryItem
.includes(:entry, account: %i[collected_taxes paid_taxes])
.order('journal_entries.printed_on, accounts.number')
.where(printed_on: financial_year.started_on..stopped_on)
.where.not(id: TaxDeclarationItemPart.select('journal_entry_item_id'))
.where.not(resource_type: 'TaxDeclarationItem')
.where('accounts.number LIKE ?', '445%')
end
def out_of_range_tax_journal_entry_items
journal_entry_item_ids = TaxDeclarationItemPart.select('journal_entry_item_id').where(tax_declaration_item_id: items.select('id'))
JournalEntryItem
.includes(:entry)
.order('journal_entries.printed_on')
.where('journal_entry_items.printed_on < ?', started_on)
.where(id: journal_entry_item_ids)
end
# FIXME: Too french
def unidentified_revenues_journal_entry_items
JournalEntryItem.includes(:entry, :account).order('journal_entries.printed_on, accounts.number').where(printed_on: started_on..stopped_on).where('accounts.number LIKE ? AND journal_entry_items.resource_id is null', '7%')
end
# FIXME: Too french
def unidentified_expenses_journal_entry_items
JournalEntryItem.includes(:entry, :account).order('journal_entries.printed_on, accounts.number').where(printed_on: started_on..stopped_on).where('accounts.number LIKE ? AND journal_entry_items.resource_id is null', '6%')
end
def deductible_tax_amount_balance
items.map(&:deductible_tax_amount).compact.sum
end
def collected_tax_amount_balance
items.map(&:collected_tax_amount).compact.sum
end
def global_balance
items.sum(:balance_tax_amount).round(2)
end
# Compute tax declaration with its items
def compute!
set_entry_items_tax_modes
taxes = Tax.order(:name)
# Removes unwanted tax declaration item
items.where.not(tax: taxes).find_each(&:destroy)
# Create or update other items
taxes.find_each do |tax|
items.find_or_initialize_by(tax: tax).compute!
end
end
private
def set_entry_items_tax_modes
all = JournalEntryItem
.where.not(tax_id: nil)
.where('printed_on <= ?', stopped_on)
.where(tax_declaration_mode: nil)
set_non_purchase_entry_items_tax_modes all.where.not(resource_type: 'PurchaseItem')
set_purchase_entry_items_tax_modes all.where(resource_type: 'PurchaseItem')
end
def set_non_purchase_entry_items_tax_modes(entry_items)
entry_items.update_all tax_declaration_mode: financial_year.tax_declaration_mode
end
def set_purchase_entry_items_tax_modes(entry_items)
{ 'at_invoicing' => 'debit', 'at_paying' => 'payment' }.each do |tax_payability, declaration_mode|
entry_items
.joins('INNER JOIN purchase_items pi ON pi.id = journal_entry_items.resource_id')
.joins('INNER JOIN purchases p ON p.id = pi.purchase_id')
.where('p.tax_payability' => tax_payability)
.update_all tax_declaration_mode: declaration_mode
end
end
end
Improve performances on TaxDeclaration#destroy
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2017 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: tax_declarations
#
# accounted_at :datetime
# created_at :datetime not null
# creator_id :integer
# currency :string not null
# description :text
# financial_year_id :integer not null
# id :integer not null, primary key
# invoiced_on :date
# journal_entry_id :integer
# lock_version :integer default(0), not null
# mode :string not null
# number :string
# reference_number :string
# responsible_id :integer
# started_on :date not null
# state :string
# stopped_on :date not null
# updated_at :datetime not null
# updater_id :integer
#
class TaxDeclaration < Ekylibre::Record::Base
include Attachable
attr_readonly :currency
refers_to :currency
enumerize :mode, in: %i[debit payment], predicates: true
belongs_to :financial_year
belongs_to :journal_entry, dependent: :destroy
belongs_to :responsible, class_name: 'User'
# belongs_to :tax_office, class_name: 'Entity'
has_many :items, class_name: 'TaxDeclarationItem', dependent: :destroy, inverse_of: :tax_declaration
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :accounted_at, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.now + 50.years } }, allow_blank: true
validates :currency, :financial_year, :mode, presence: true
validates :description, length: { maximum: 500_000 }, allow_blank: true
validates :invoiced_on, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }, allow_blank: true
validates :number, :reference_number, :state, length: { maximum: 500 }, allow_blank: true
validates :started_on, presence: true, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }
validates :stopped_on, presence: true, timeliness: { on_or_after: ->(tax_declaration) { tax_declaration.started_on || Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.today + 50.years }, type: :date }
# ]VALIDATORS]
validates :number, uniqueness: true
validates_associated :items
acts_as_numbered
# acts_as_affairable :tax_office
accepts_nested_attributes_for :items, reject_if: proc { |item| item[:tax_id].blank? && item[:tax].blank? }, allow_destroy: true
delegate :tax_declaration_mode, :tax_declaration_frequency,
:tax_declaration_mode_payment?, :tax_declaration_mode_debit?,
to: :financial_year
protect on: :destroy do
sent?
end
state_machine :state, initial: :draft do
state :draft
state :validated
state :sent
event :propose do
transition draft: :validated, if: :has_content?
end
event :confirm do
transition validated: :sent, if: :has_content?
end
end
before_validation(on: :create) do
self.state ||= :draft
if financial_year
self.mode = financial_year.tax_declaration_mode
self.currency = financial_year.currency
# if tax_declarations exists for current financial_year, then get the last to compute started_on
self.started_on = financial_year.next_tax_declaration_on
# anyway, stopped_on is started_on + tax_declaration_frequency_duration
end
if started_on
self.stopped_on ||= financial_year.tax_declaration_end_date(started_on)
end
self.invoiced_on ||= self.stopped_on
end
before_validation do
self.created_at ||= Time.zone.now
end
after_create :compute!, if: :draft?
def destroy
ActiveRecord::Base.transaction do
ActiveRecord::Base.connection.execute("DELETE FROM tax_declaration_item_parts tdip USING tax_declaration_items tdi WHERE tdip.tax_declaration_item_id = tdi.id AND tdi.tax_declaration_id = #{id}")
ActiveRecord::Base.connection.execute("DELETE FROM tax_declaration_items WHERE tax_declaration_id = #{id}")
items.reload
super
end
end
def has_content?
items.any?
end
# Prints human name of current state
def state_label
self.class.state_machine.state(self.state.to_sym).human_name
end
# This callback bookkeeps the sale depending on its state
bookkeep do |b|
journal = unsuppress { Journal.used_for_tax_declarations!(currency: currency) }
b.journal_entry(journal, printed_on: invoiced_on, if: (has_content? && (validated? || sent?))) do |entry|
label = tc(:bookkeep, resource: self.class.model_name.human, number: number, started_on: started_on.l, stopped_on: stopped_on.l)
items.each do |item|
entry.add_debit(label, item.tax.collect_account.id, item.collected_tax_amount.round(2), tax: item.tax, resource: item, as: :collect) unless item.collected_tax_amount.zero?
entry.add_credit(label, item.tax.deduction_account.id, item.deductible_tax_amount.round(2), tax: item.tax, resource: item, as: :deduction) unless item.deductible_tax_amount.zero?
entry.add_credit(label, item.tax.fixed_asset_deduction_account.id, item.fixed_asset_deductible_tax_amount.round(2), tax: item.tax, resource: item, as: :fixed_asset_deduction) unless item.fixed_asset_deductible_tax_amount.zero?
entry.add_credit(label, item.tax.intracommunity_payable_account.id, item.intracommunity_payable_tax_amount.round(2), tax: item.tax, resource: item, as: :intracommunity_payable) unless item.intracommunity_payable_tax_amount.zero?
end
unless global_balance.zero?
if global_balance < 0
account = Account.find_or_import_from_nomenclature(:report_vat_credit)
# account = Account.find_or_create_by!(number: '44567', usages: :deductible_vat)
elsif global_balance > 0
account = Account.find_or_import_from_nomenclature(:vat_to_pay)
# account = Account.find_or_create_by!(number: '44551', usages: :collected_vat)
end
entry.add_credit(label, account, global_balance, as: :balance)
end
end
end
def dealt_at
(validated? ? invoiced_on : stopped_on? ? self.created_at : Time.zone.now)
end
def status
return :go if sent?
return :caution if validated?
:stop
end
# FIXME: Too french
def undeclared_tax_journal_entry_items
JournalEntryItem
.includes(:entry, account: %i[collected_taxes paid_taxes])
.order('journal_entries.printed_on, accounts.number')
.where(printed_on: financial_year.started_on..stopped_on)
.where.not(id: TaxDeclarationItemPart.select('journal_entry_item_id'))
.where.not(resource_type: 'TaxDeclarationItem')
.where('accounts.number LIKE ?', '445%')
end
def out_of_range_tax_journal_entry_items
journal_entry_item_ids = TaxDeclarationItemPart.select('journal_entry_item_id').where(tax_declaration_item_id: items.select('id'))
JournalEntryItem
.includes(:entry)
.order('journal_entries.printed_on')
.where('journal_entry_items.printed_on < ?', started_on)
.where(id: journal_entry_item_ids)
end
# FIXME: Too french
def unidentified_revenues_journal_entry_items
JournalEntryItem.includes(:entry, :account).order('journal_entries.printed_on, accounts.number').where(printed_on: started_on..stopped_on).where('accounts.number LIKE ? AND journal_entry_items.resource_id is null', '7%')
end
# FIXME: Too french
def unidentified_expenses_journal_entry_items
JournalEntryItem.includes(:entry, :account).order('journal_entries.printed_on, accounts.number').where(printed_on: started_on..stopped_on).where('accounts.number LIKE ? AND journal_entry_items.resource_id is null', '6%')
end
def deductible_tax_amount_balance
items.map(&:deductible_tax_amount).compact.sum
end
def collected_tax_amount_balance
items.map(&:collected_tax_amount).compact.sum
end
def global_balance
items.sum(:balance_tax_amount).round(2)
end
# Compute tax declaration with its items
def compute!
set_entry_items_tax_modes
taxes = Tax.order(:name)
# Removes unwanted tax declaration item
items.where.not(tax: taxes).find_each(&:destroy)
# Create or update other items
taxes.find_each do |tax|
items.find_or_initialize_by(tax: tax).compute!
end
end
private
def set_entry_items_tax_modes
all = JournalEntryItem
.where.not(tax_id: nil)
.where('printed_on <= ?', stopped_on)
.where(tax_declaration_mode: nil)
set_non_purchase_entry_items_tax_modes all.where.not(resource_type: 'PurchaseItem')
set_purchase_entry_items_tax_modes all.where(resource_type: 'PurchaseItem')
end
def set_non_purchase_entry_items_tax_modes(entry_items)
entry_items.update_all tax_declaration_mode: financial_year.tax_declaration_mode
end
def set_purchase_entry_items_tax_modes(entry_items)
{ 'at_invoicing' => 'debit', 'at_paying' => 'payment' }.each do |tax_payability, declaration_mode|
entry_items
.joins('INNER JOIN purchase_items pi ON pi.id = journal_entry_items.resource_id')
.joins('INNER JOIN purchases p ON p.id = pi.purchase_id')
.where('p.tax_payability' => tax_payability)
.update_all tax_declaration_mode: declaration_mode
end
end
end
|
require "tyne_core/extensions/issues/workflow"
module TyneCore
# Represents an issue that affects a particular project.
# Issues can be part of a sprint and they have an issue type.
class Issue < ActiveRecord::Base
include TyneCore::Extensions::Issues::Workflow
include TyneCore::Extensions::Votable
belongs_to :reported_by, :class_name => "TyneAuth::User"
belongs_to :project, :class_name => "TyneCore::Project"
belongs_to :issue_type, :class_name => "TyneCore::IssueType"
belongs_to :issue_priority, :class_name => "TyneCore::IssuePriority"
belongs_to :assigned_to, :class_name => "TyneAuth::User"
has_many :comments, :class_name => "TyneCore::Comment"
attr_accessible :project_id, :summary, :description, :issue_type_id, :issue_priority_id, :assigned_to_id
validates :project_id, :summary, :issue_type_id, :number, :presence => true
validates :number, :uniqueness => { :scope => :project_id }
validate :security_assigned_to
default_scope includes(:project).includes(:reported_by).includes(:issue_type).includes(:comments).includes(:issue_priority)
after_initialize :set_defaults
before_validation :set_number, :on => :create
scope :sort_by_issue_type, lambda { |sord| joins(:issue_type).order("tyne_core_issue_types.name #{sord}") }
scope :sort_by_issue_priority, lambda { |sord| joins(:issue_priority).order("tyne_core_issue_priorities.number #{sord}") }
# Returns the issue number prefixed with the projecy key
# to better identify issues.
#
# e.g. TYNE-1337
#
# @return [String] issue-key
def key
"#{project.key}-#{number}"
end
def closed?
%w(done invalid).include? self.state
end
private
def set_defaults
self.issue_type_id ||= TyneCore::IssueType.first.id if attributes.include?("issue_type_id")
end
def set_number
self.number = (project.issues.maximum('number') || 0) + 1
end
def security_assigned_to
return true if self.assigned_to_id.blank?
users = project.workers.uniq { |x| x.user_id }.map { |x| x.user_id }
errors.add(:assigned_to_id, :value_not_allowed) unless users.include?(self.assigned_to_id)
end
end
end
changes the custom sort scope to use includes instead of joins to not exclude issues without priority
require "tyne_core/extensions/issues/workflow"
module TyneCore
# Represents an issue that affects a particular project.
# Issues can be part of a sprint and they have an issue type.
class Issue < ActiveRecord::Base
include TyneCore::Extensions::Issues::Workflow
include TyneCore::Extensions::Votable
belongs_to :reported_by, :class_name => "TyneAuth::User"
belongs_to :project, :class_name => "TyneCore::Project"
belongs_to :issue_type, :class_name => "TyneCore::IssueType"
belongs_to :issue_priority, :class_name => "TyneCore::IssuePriority"
belongs_to :assigned_to, :class_name => "TyneAuth::User"
has_many :comments, :class_name => "TyneCore::Comment"
attr_accessible :project_id, :summary, :description, :issue_type_id, :issue_priority_id, :assigned_to_id
validates :project_id, :summary, :issue_type_id, :number, :presence => true
validates :number, :uniqueness => { :scope => :project_id }
validate :security_assigned_to
default_scope includes(:project).includes(:reported_by).includes(:issue_type).includes(:comments).includes(:issue_priority)
after_initialize :set_defaults
before_validation :set_number, :on => :create
scope :sort_by_issue_type, lambda { |sord| joins(:issue_type).order("tyne_core_issue_types.name #{sord}") }
scope :sort_by_issue_priority, lambda { |sord| includes(:issue_priority).order("tyne_core_issue_priorities.number #{sord}") }
# Returns the issue number prefixed with the projecy key
# to better identify issues.
#
# e.g. TYNE-1337
#
# @return [String] issue-key
def key
"#{project.key}-#{number}"
end
def closed?
%w(done invalid).include? self.state
end
private
def set_defaults
self.issue_type_id ||= TyneCore::IssueType.first.id if attributes.include?("issue_type_id")
end
def set_number
self.number = (project.issues.maximum('number') || 0) + 1
end
def security_assigned_to
return true if self.assigned_to_id.blank?
users = project.workers.uniq { |x| x.user_id }.map { |x| x.user_id }
errors.add(:assigned_to_id, :value_not_allowed) unless users.include?(self.assigned_to_id)
end
end
end
|
# encoding: utf-8
require_relative 'db_queries'
require_dependency 'carto/db/database'
require_dependency 'carto/db/user_schema_mover'
require 'cartodb/sequel_connection_helper'
# To avoid collisions with User model
module CartoDB
# To avoid collisions with User class
module UserModule
class DBService
include CartoDB::MiniSequel
extend CartoDB::SequelConnectionHelper
# Also default schema for new users
SCHEMA_PUBLIC = 'public'.freeze
SCHEMA_CARTODB = 'cartodb'.freeze
SCHEMA_IMPORTER = 'cdb_importer'.freeze
SCHEMA_GEOCODING = 'cdb'.freeze
SCHEMA_CDB_DATASERVICES_API = 'cdb_dataservices_client'.freeze
SCHEMA_AGGREGATION_TABLES = 'aggregation'.freeze
CDB_DATASERVICES_CLIENT_VERSION = '0.5.0'.freeze
def initialize(user)
raise "User nil" unless user
@user = user
@queries = CartoDB::UserModule::DBQueries.new(@user)
end
def queries
@queries
end
# This method is used both upon user creation and by the UserMover
# All methods called inside should allow to be executed multiple times without errors
def configure_database
set_database_search_path
grant_user_in_database
set_statement_timeouts
# INFO: Added to everyone because eases migration of normal users to org owners
# and non-org users just don't use it
set_user_as_organization_member
if @user.database_schema == SCHEMA_PUBLIC
setup_single_user_schema
else
setup_organization_user_schema
end
create_function_invalidate_varnish
grant_publicuser_in_database
end
# All methods called inside should allow to be executed multiple times without errors
def setup_single_user_schema
set_user_privileges_at_db
rebuild_quota_trigger
end
# All methods called inside should allow to be executed multiple times without errors
def setup_organization_user_schema
# WIP
# reset_user_schema_permissions
reset_schema_owner
set_user_privileges_at_db
set_user_as_organization_member
rebuild_quota_trigger
# INFO: organization privileges are set for org_member_role, which is assigned to each org user
if @user.organization_owner?
setup_organization_owner
end
# Rebuild the geocoder api user config to reflect that is an organization user
install_and_configure_geocoder_api_extension
end
# INFO: main setup for non-org users
def new_non_organization_user_main_db_setup
Thread.new do
create_db_user
create_user_db
grant_owner_in_database
end.join
create_importer_schema
create_geocoding_schema
load_cartodb_functions
install_and_configure_geocoder_api_extension
# We reset the connections to this database to be sure the change in default search_path is effective
reset_pooled_connections
reset_database_permissions # Reset privileges
configure_database
revoke_cdb_conf_access
end
# INFO: main setup for org users
def new_organization_user_main_db_setup
Thread.new do
create_db_user
end.join
create_own_schema
setup_organization_user_schema
install_and_configure_geocoder_api_extension
# We reset the connections to this database to be sure the change in default search_path is effective
reset_pooled_connections
revoke_cdb_conf_access
end
def set_user_privileges_at_db # MU
# INFO: organization permission on public schema is handled through role assignment
unless @user.organization_user?
set_user_privileges_in_cartodb_schema
set_user_privileges_in_public_schema
end
set_user_privileges_in_own_schema
set_privileges_to_publicuser_in_own_schema
unless @user.organization_user?
set_user_privileges_in_importer_schema
set_user_privileges_in_geocoding_schema
set_geo_columns_privileges
set_raster_privileges
end
end
def disable_writes
# NOTE: This will not affect already opened connections. Run `terminate_database_conections` method after this
# to ensure no more writes are possible.
@user.in_database(as: :cluster_admin) do |database|
database.run(%{
ALTER DATABASE "#{@user.database_name}"
SET default_transaction_read_only = 'on'
})
end
end
def enable_writes
# NOTE: This will not affect already opened connections. Run `terminate_database_conections` method after this
# to ensure no more writes are possible.
@user.in_database(as: :cluster_admin) do |database|
database.run(%{
ALTER DATABASE "#{@user.database_name}"
SET default_transaction_read_only = default
})
end
end
def writes_enabled?
@user.in_database(as: :superuser) do |database|
database.fetch(%{SHOW default_transaction_read_only}).first[:default_transaction_read_only] == 'off'
end
end
# Cartodb functions
def load_cartodb_functions(statement_timeout = nil, cdb_extension_target_version = nil)
add_python
# Install dependencies of cartodb extension
@user.in_database(as: :superuser, no_cartodb_in_schema: true) do |db|
db.transaction do
unless statement_timeout.nil?
old_timeout = db.fetch("SHOW statement_timeout;").first[:statement_timeout]
db.run("SET statement_timeout TO '#{statement_timeout}';")
end
db.run('CREATE EXTENSION plpythonu FROM unpackaged') unless db.fetch(%{
SELECT count(*) FROM pg_extension WHERE extname='plpythonu'
}).first[:count] > 0
db.run('CREATE EXTENSION postgis FROM unpackaged') unless db.fetch(%{
SELECT count(*) FROM pg_extension WHERE extname='postgis'
}).first[:count] > 0
unless statement_timeout.nil?
db.run("SET statement_timeout TO '#{old_timeout}';")
end
end
end
upgrade_cartodb_postgres_extension(statement_timeout, cdb_extension_target_version)
rebuild_quota_trigger
end
def rebuild_quota_trigger
@user.in_database(as: :superuser) do |db|
rebuild_quota_trigger_with_database(db)
end
end
def rebuild_quota_trigger_with_database(db)
if !cartodb_extension_version_pre_mu? && @user.has_organization?
db.run("DROP FUNCTION IF EXISTS public._CDB_UserQuotaInBytes();")
end
db.transaction do
# NOTE: this has been written to work for both databases that switched to "cartodb" extension
# and those before the switch.
# In the future we should guarantee that exntension lives in cartodb schema so we don't need to set
# a search_path before
search_path = db.fetch("SHOW search_path;").first[:search_path]
db.run("SET search_path TO #{SCHEMA_CARTODB}, #{SCHEMA_PUBLIC};")
if cartodb_extension_version_pre_mu?
db.run("SELECT CDB_SetUserQuotaInBytes(#{@user.quota_in_bytes});")
else
db.run("SELECT CDB_SetUserQuotaInBytes('#{@user.database_schema}', #{@user.quota_in_bytes});")
end
db.run("SET search_path TO #{search_path};")
end
end
def build_search_path(user_schema = nil, quote_user_schema = true)
user_schema ||= @user.database_schema
DBService.build_search_path(user_schema, quote_user_schema)
end
# Centralized method to provide the (ordered) search_path
def self.build_search_path(user_schema, quote_user_schema = true)
quote_char = quote_user_schema ? "\"" : ""
"#{quote_char}#{user_schema}#{quote_char}, #{SCHEMA_CARTODB}, #{SCHEMA_CDB_DATASERVICES_API}, #{SCHEMA_PUBLIC}"
end
def set_database_search_path
@user.in_database(as: :superuser) do |database|
database.run(%{
ALTER USER "#{@user.database_username}"
SET search_path = #{build_search_path}
})
end
end
def create_importer_schema
create_schema('cdb_importer')
end
def create_geocoding_schema
create_schema('cdb')
end
def create_user_schema
create_schema(@user.database_schema, @user.database_username)
end
def create_schema(schema, role = nil)
@user.in_database(as: :superuser) do |db|
Carto::Db::Database.new(@user.database_name, db).create_schema(schema, role)
end
end
def setup_owner_permissions
@user.in_database(as: :superuser) do |database|
database.run(%{ SELECT cartodb.CDB_Organization_AddAdmin('#{@user.username}') })
end
end
def revoke_cdb_conf_access
errors = []
roles = [@user.database_username]
if @user.organization_owner?
begin
roles << organization_member_group_role_member_name
rescue => e
errors << "WARN: Error fetching org member role (does #{@user.organization.name} has that role?)"
end
end
roles << CartoDB::PUBLIC_DB_USER
queries = []
roles.map do |db_role|
queries.concat(@queries.revoke_permissions_on_cartodb_conf_queries(db_role))
end
queries.map do |query|
@user.in_database(as: :superuser) do |database|
begin
database.run(query)
rescue => e
# We can find organizations not yet upgraded for any reason or missing roles
errors << e.message
end
end
end
errors
rescue => e
# For broken organizations
["FATAL ERROR for #{name}: #{e.message}"]
end
def create_public_db_user
@user.in_database(as: :superuser) do |database|
database.run(%{ CREATE USER "#{@user.database_public_username}" LOGIN INHERIT })
database.run(%{ GRANT publicuser TO "#{@user.database_public_username}" })
database.run(%{ ALTER USER "#{@user.database_public_username}" SET search_path = #{build_search_path} })
end
end
def tables_effective(schema = 'public')
@user.in_database do |user_database|
user_database.synchronize do |conn|
query = "select table_name::text from information_schema.tables where table_schema = '#{schema}'"
tables = user_database[query].all.map { |i| i[:table_name] }
return tables
end
end
end
def drop_owned_by_user(conn, role)
conn.run("DROP OWNED BY \"#{role}\"")
end
def drop_user(conn = nil, username = nil)
conn ||= @user.in_database(as: :cluster_admin)
username ||= @user.database_username
database_with_conflicts = nil
retried = false
begin
conn.run("DROP USER IF EXISTS \"#{username}\"")
rescue => e
if !retried && e.message =~ /cannot be dropped because some objects depend on it/
retried = true
e.message =~ /object[s]? in database (.*)$/
if database_with_conflicts == $1
raise e
else
database_with_conflicts = $1
revoke_all_on_database_from(conn, database_with_conflicts, username)
revoke_all_memberships_on_database_to_role(conn, username)
drop_owned_by_user(conn, username)
conflict_database_conn = @user.in_database(
as: :cluster_admin,
'database' => database_with_conflicts
)
drop_owned_by_user(conflict_database_conn, username)
['cdb', 'cdb_importer', 'cartodb', 'public', @user.database_schema].each do |s|
drop_users_privileges_in_schema(s, [username])
end
retry
end
else
raise e
end
end
end
# Org users share the same db, so must only delete the schema unless he's the owner
def drop_organization_user(org_id, is_owner = false)
raise CartoDB::BaseCartoDBError.new('Tried to delete an organization user without org id') if org_id.nil?
Thread.new do
@user.in_database(as: :superuser) do |database|
if is_owner
schemas = ['cdb', 'cdb_importer', 'cartodb', 'public', @user.database_schema] +
::User.select(:database_schema).where(organization_id: org_id).all.collect(&:database_schema)
schemas.uniq.each do |s|
drop_users_privileges_in_schema(
s,
[@user.database_username, @user.database_public_username, CartoDB::PUBLIC_DB_USER])
end
# To avoid "cannot drop function" errors
database.run("drop extension if exists plproxy cascade")
end
# If user is in an organization should never have public schema, but to be safe (& tests which stub stuff)
unless @user.database_schema == SCHEMA_PUBLIC
database.run(%{ DROP FUNCTION IF EXISTS "#{@user.database_schema}"._CDB_UserQuotaInBytes()})
drop_all_functions_from_schema(@user.database_schema)
database.run(%{ DROP SCHEMA IF EXISTS "#{@user.database_schema}" })
end
end
conn = @user.in_database(as: :cluster_admin)
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
# If user is in an organization should never have public schema, but to be safe (& tests which stub stuff)
unless @user.database_schema == SCHEMA_PUBLIC
drop_user(conn, @user.database_public_username)
end
if is_owner
conn.run("DROP DATABASE \"#{@user.database_name}\"")
end
drop_user(conn)
end.join
monitor_user_notification
end
def configure_extension_org_metadata_api_endpoint
config = Cartodb.config[:org_metadata_api]
host = config['host']
port = config['port']
username = config['username']
password = config['password']
timeout = config.fetch('timeout', 10)
if host.present? && port.present? && username.present? && password.present?
conf_sql = %{
SELECT cartodb.CDB_Conf_SetConf('groups_api',
'{ \"host\": \"#{host}\", \"port\": #{port}, \"timeout\": #{timeout}, \"username\": \"#{username}\",
\"password\": \"#{password}\"}'::json
)
}
@user.in_database(as: :superuser) do |database|
database.fetch(conf_sql).first
end
else
CartoDB.notify_debug("org_metadata_api configuration missing", user_id: @user.id, config: config)
end
end
def install_and_configure_geocoder_api_extension
geocoder_api_config = Cartodb.get_config(:geocoder, 'api')
# If there's no config we assume there's no need to install the
# geocoder client as it is an independent API
return if geocoder_api_config.blank?
install_geocoder_api_extension(geocoder_api_config)
@user.in_database(as: :superuser).run("ALTER USER \"#{@user.database_username}\"
SET search_path TO #{build_search_path}")
@user.in_database(as: :superuser).run("ALTER USER \"#{@user.database_public_username}\"
SET search_path TO #{build_search_path}") if @user.organization_user?
return true
rescue => e
CartoDB.notify_error(
'Error installing and configuring geocoder api extension',
error: e.inspect, user: @user
)
return false
end
def install_geocoder_api_extension(geocoder_api_config)
@user.in_database(as: :superuser) do |db|
db.transaction do
db.run('CREATE EXTENSION IF NOT EXISTS plproxy SCHEMA public')
db.run("CREATE EXTENSION IF NOT EXISTS cdb_dataservices_client VERSION '#{CDB_DATASERVICES_CLIENT_VERSION}'")
db.run("ALTER EXTENSION cdb_dataservices_client UPDATE TO '#{CDB_DATASERVICES_CLIENT_VERSION}'")
geocoder_server_sql = build_geocoder_server_config_sql(geocoder_api_config)
db.run(geocoder_server_sql)
db.run(build_entity_config_sql)
end
end
end
def setup_organization_owner
setup_organization_role_permissions
setup_owner_permissions
configure_extension_org_metadata_api_endpoint
end
# Use a direct connection to the db through the direct port specified
# in the database configuration and set up its statement timeout value. This
# allows to overpass the statement_timeout limit if a connection pooler is used.
# This method is supposed to receive a block that will be run with the created
# connection.
def in_database_direct_connection(statement_timeout:)
raise 'need block' unless block_given?
configuration = db_configuration_for
configuration[:port] = configuration.fetch(:direct_port, configuration["direct_port"]) || configuration[:port] || configuration["port"]
# Temporary trace to be removed once https://github.com/CartoDB/cartodb/issues/7047 is solved
CartoDB::Logger.warning(message: 'Direct connection not used from queue') unless Socket.gethostname =~ /^que/
connection = @user.get_connection(_opts = {}, configuration)
begin
connection.run("SET statement_timeout TO #{statement_timeout}")
yield(connection)
ensure
connection.run("SET statement_timeout TO DEFAULT")
end
end
def reset_pooled_connections
# Only close connections to this users' database
$pool.close_connections!(@user.database_name)
end
# Upgrade the cartodb postgresql extension
def upgrade_cartodb_postgres_extension(statement_timeout = nil, cdb_extension_target_version = nil)
if cdb_extension_target_version.nil?
cdb_extension_target_version = '0.16.4'
end
@user.in_database(as: :superuser, no_cartodb_in_schema: true) do |db|
db.transaction do
unless statement_timeout.nil?
old_timeout = db.fetch("SHOW statement_timeout;").first[:statement_timeout]
db.run("SET statement_timeout TO '#{statement_timeout}';")
end
db.run(%{
DO LANGUAGE 'plpgsql' $$
DECLARE
ver TEXT;
BEGIN
BEGIN
SELECT cartodb.cdb_version() INTO ver;
EXCEPTION WHEN undefined_function OR invalid_schema_name THEN
RAISE NOTICE 'Got % (%)', SQLERRM, SQLSTATE;
BEGIN
CREATE EXTENSION cartodb VERSION '#{cdb_extension_target_version}' FROM unpackaged;
EXCEPTION WHEN undefined_table THEN
RAISE NOTICE 'Got % (%)', SQLERRM, SQLSTATE;
CREATE EXTENSION cartodb VERSION '#{cdb_extension_target_version}';
RETURN;
END;
RETURN;
END;
ver := '#{cdb_extension_target_version}';
IF position('dev' in ver) > 0 THEN
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || 'next''';
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || '''';
ELSE
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || '''';
END IF;
END;
$$;
})
unless statement_timeout.nil?
db.run("SET statement_timeout TO '#{old_timeout}';")
end
obtained = db.fetch('SELECT cartodb.cdb_version() as v').first[:v]
unless cartodb_extension_semver(cdb_extension_target_version) == cartodb_extension_semver(obtained)
raise("Expected cartodb extension '#{cdb_extension_target_version}' obtained '#{obtained}'")
end
end
end
end
def cartodb_extension_version_pre_mu?
current_version = cartodb_extension_semver(cartodb_extension_version)
if current_version.size == 3
major, minor, = current_version
major == 0 && minor < 3
else
raise 'Current cartodb extension version does not match standard x.y.z format'
end
end
# Returns a tree elements array with [major, minor, patch] as in http://semver.org/
def cartodb_extension_semver(extension_version)
extension_version.split('.').take(3).map(&:to_i)
end
def cartodb_extension_version
@cartodb_extension_version ||= @user.in_database(as: :superuser)
.fetch('SELECT cartodb.cdb_version() AS v').first[:v]
end
def reset_user_schema_permissions
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
schemas = [@user.database_schema].uniq
schemas.each do |schema|
revoke_privileges(user_database, schema, 'PUBLIC')
end
yield(user_database) if block_given?
end
end
end
def reset_database_permissions
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
schemas = %w(public cdb_importer cdb cartodb)
['PUBLIC', CartoDB::PUBLIC_DB_USER].each do |user|
revoke_all_on_database_from(user_database, @user.database_name, user)
schemas.each do |schema|
revoke_privileges(user_database, schema, user)
end
end
yield(user_database) if block_given?
end
end
end
def set_statement_timeouts
@user.in_database(as: :superuser) do |user_database|
user_database["ALTER ROLE \"?\" SET statement_timeout to ?", @user.database_username.lit,
@user.user_timeout].all
user_database["ALTER DATABASE \"?\" SET statement_timeout to ?", @user.database_name.lit,
@user.database_timeout].all
end
@user.in_database.disconnect
@user.in_database.connect(db_configuration_for)
@user.in_database(as: :public_user).disconnect
@user.in_database(as: :public_user).connect(db_configuration_for(:public_user))
rescue Sequel::DatabaseConnectionError
end
def set_user_as_organization_member
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
user_database.run("SELECT cartodb.CDB_Organization_Create_Member('#{@user.database_username}');")
end
end
end
def reset_schema_owner
@user.in_database(as: :superuser) do |database|
database.run(%{ ALTER SCHEMA "#{@user.database_schema}" OWNER TO "#{@user.database_username}" })
end
end
def grant_user_in_database
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries,
true
)
end
def grant_publicuser_in_database
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries(CartoDB::PUBLIC_DB_USER),
true
)
@queries.run_in_transaction(
@queries.grant_read_on_schema_queries(SCHEMA_CARTODB, CartoDB::PUBLIC_DB_USER),
true
)
@queries.run_in_transaction(
[
"REVOKE SELECT ON cartodb.cdb_tablemetadata FROM #{CartoDB::PUBLIC_DB_USER} CASCADE",
"REVOKE SELECT ON cartodb.cdb_analysis_catalog FROM #{CartoDB::PUBLIC_DB_USER} CASCADE"
],
true
)
@queries.run_in_transaction(
[
"GRANT USAGE ON SCHEMA #{SCHEMA_PUBLIC} TO #{CartoDB::PUBLIC_DB_USER}",
"GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA #{SCHEMA_PUBLIC} TO #{CartoDB::PUBLIC_DB_USER}",
"GRANT SELECT ON spatial_ref_sys TO #{CartoDB::PUBLIC_DB_USER}"
],
true
)
end
def set_user_privileges_in_own_schema # MU
@queries.run_in_transaction(
@queries.grant_all_on_user_schema_queries,
true
)
end
def set_user_privileges_in_cartodb_schema(db_user = nil)
@queries.run_in_transaction(
(
@queries.grant_read_on_schema_queries(SCHEMA_CARTODB, db_user) +
@queries.grant_write_on_cdb_tablemetadata_queries(db_user) +
@queries.grant_write_on_cdb_analysis_catalog_queries(db_user)
),
true
)
end
def set_privileges_to_publicuser_in_own_schema # MU
# Privileges in user schema for publicuser
@queries.run_in_transaction(
@queries.grant_usage_on_user_schema_to_other(CartoDB::PUBLIC_DB_USER),
true
)
end
def set_user_privileges_in_public_schema(db_user = nil)
@queries.run_in_transaction(
@queries.grant_read_on_schema_queries(SCHEMA_PUBLIC, db_user),
true
)
end
def set_user_privileges_in_importer_schema(db_user = nil) # MU
@queries.run_in_transaction(
@queries.grant_all_on_schema_queries(SCHEMA_IMPORTER, db_user),
true
)
end
def revoke_all_on_database_from(conn, database, role)
conn.run("REVOKE ALL ON DATABASE \"#{database}\" FROM \"#{role}\" CASCADE") if role_exists?(conn, role)
end
def grant_owner_in_database
@queries.run_in_transaction(
@queries.grant_all_on_database_queries,
true
)
end
def fix_table_permissions
tables_queries = []
@user.tables.each do |table|
if table.public? || table.public_with_link_only?
tables_queries << %{
GRANT SELECT ON \"#{@user.database_schema}\".\"#{table.name}\" TO #{CartoDB::PUBLIC_DB_USER} }
end
tables_queries << %{
ALTER TABLE \"#{@user.database_schema}\".\"#{table.name}\" OWNER TO \"#{@user.database_username}\" }
end
@queries.run_in_transaction(
tables_queries,
true
)
end
def set_user_privileges_in_geocoding_schema(db_user = nil)
@queries.run_in_transaction(
@queries.grant_all_on_schema_queries(SCHEMA_GEOCODING, db_user),
true
)
end
def set_geo_columns_privileges(role_name = nil)
# Postgis lives at public schema, as do geometry_columns and geography_columns
catalogs_schema = SCHEMA_PUBLIC
target_user = role_name.nil? ? @user.database_public_username : role_name
queries = [
%{ GRANT SELECT ON "#{catalogs_schema}"."geometry_columns" TO "#{target_user}" },
%{ GRANT SELECT ON "#{catalogs_schema}"."geography_columns" TO "#{target_user}" }
]
@queries.run_in_transaction(queries, true)
end
def set_raster_privileges(role_name = nil)
# Postgis lives at public schema, so raster catalogs too
catalogs_schema = SCHEMA_PUBLIC
queries = [
"GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_overviews\" TO \"#{CartoDB::PUBLIC_DB_USER}\"",
"GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_columns\" TO \"#{CartoDB::PUBLIC_DB_USER}\""
]
target_user = role_name.nil? ? @user.database_public_username : role_name
unless @user.organization.nil?
queries << "GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_overviews\" TO \"#{target_user}\""
queries << "GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_columns\" TO \"#{target_user}\""
end
@queries.run_in_transaction(queries, true)
end
def setup_organization_role_permissions
org_member_role = organization_member_group_role_member_name
set_user_privileges_in_public_schema(org_member_role)
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries(org_member_role), true
)
set_geo_columns_privileges(org_member_role)
set_raster_privileges(org_member_role)
set_user_privileges_in_cartodb_schema(org_member_role)
set_user_privileges_in_importer_schema(org_member_role)
set_user_privileges_in_geocoding_schema(org_member_role)
end
def schema_exists?(schema, database = @user.in_database)
query = "SELECT 1 as schema_exist FROM information_schema.schemata WHERE schema_name = '#{schema}'"
!database.fetch(query).first.nil?
end
def drop_users_privileges_in_schema(schema, accounts)
@user.in_database(as: :superuser, statement_timeout: 600000) do |user_database|
return unless schema_exists?(schema, user_database)
user_database.transaction do
accounts
.select { |role| role_exists?(user_database, role) }
.each { |role| revoke_privileges(user_database, schema, "\"#{role}\"") }
end
end
end
def revoke_all_memberships_on_database_to_role(conn, role)
conn.fetch(%{
select rolname from pg_user join pg_auth_members on (pg_user.usesysid=pg_auth_members.member)
join pg_roles on (pg_roles.oid=pg_auth_members.roleid) where pg_user.usename='#{role}'
}).each do |rolname|
conn.run("REVOKE \"#{rolname[:rolname]}\" FROM \"#{role}\" CASCADE")
end
end
# Drops grants and functions in a given schema, avoiding by all means a CASCADE
# to not affect extensions or other users
def drop_all_functions_from_schema(schema_name)
recursivity_max_depth = 3
return if schema_name == SCHEMA_PUBLIC
@user.in_database(as: :superuser) do |database|
# Non-aggregate functions
drop_function_sqls = database.fetch(%{
SELECT 'DROP FUNCTION ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ');'
AS sql
FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid AND pg_proc.proisagg = FALSE)
WHERE ns.nspname = '#{schema_name}'
})
# Simulate a controlled environment drop cascade contained to only functions
failed_sqls = []
recursivity_level = 0
begin
failed_sqls = []
drop_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError => e
if e.message =~ /depends on function /i
failed_sqls.push(sql_sentence)
else
raise
end
end
end
drop_function_sqls = failed_sqls
recursivity_level += 1
end while failed_sqls.count > 0 && recursivity_level < recursivity_max_depth
# If something remains, reattempt later after dropping aggregates
if drop_function_sqls.count > 0
aggregate_dependant_function_sqls = drop_function_sqls
else
aggregate_dependant_function_sqls = []
end
# And now aggregate functions
failed_sqls = []
drop_function_sqls = database.fetch(%{
SELECT 'DROP AGGREGATE ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ');'
AS sql
FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid AND pg_proc.proisagg = TRUE)
WHERE ns.nspname = '#{schema_name}'
})
drop_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError
failed_sqls.push(sql_sentence)
end
end
if failed_sqls.count > 0
raise CartoDB::BaseCartoDBError.new('Cannot drop schema aggregate functions, dependencies remain')
end
# One final pass of normal functions, if left
if aggregate_dependant_function_sqls.count > 0
aggregate_dependant_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError
failed_sqls.push(sql_sentence)
end
end
end
if failed_sqls.count > 0
raise CartoDB::BaseCartoDBError.new('Cannot drop schema functions, dependencies remain')
end
end
end
# Create a "public.cdb_invalidate_varnish()" function to invalidate Varnish
#
# The function can only be used by the superuser, we expect
# security-definer triggers OR triggers on superuser-owned tables
# to call it with controlled set of parameters.
#
# The function is written in python because it needs to reach out
# to a Varnish server.
#
# Being unable to communicate with Varnish may or may not be critical
# depending on CartoDB configuration at time of function definition.
#
def create_function_invalidate_varnish
if Cartodb.config[:invalidation_service] && Cartodb.config[:invalidation_service].fetch('enabled', false)
create_function_invalidate_varnish_invalidation_service
elsif Cartodb.config[:varnish_management].fetch('http_port', false)
create_function_invalidate_varnish_http
else
create_function_invalidate_varnish_telnet
end
end
# Add plpythonu pl handler
def add_python
@user.in_database(
as: :superuser,
no_cartodb_in_schema: true
).run("CREATE OR REPLACE PROCEDURAL LANGUAGE 'plpythonu' HANDLER plpython_call_handler;")
end
# Needed because in some cases it might not exist and failure ends transaction
def role_exists?(db, role)
!db.fetch("SELECT 1 FROM pg_roles WHERE rolname='#{role}'").first.nil?
end
def revoke_privileges(db, schema, user)
db.run("REVOKE ALL ON SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL SEQUENCES IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL FUNCTIONS IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL TABLES IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
end
def organization_member_group_role_member_name
@user.in_database.fetch(
"SELECT cartodb.CDB_Organization_Member_Group_Role_Member_Name() as org_member_role;"
)[:org_member_role][:org_member_role]
end
def db_configuration_for(user_role = nil)
logger = (Rails.env.development? || Rails.env.test? ? ::Rails.logger : nil)
if user_role == :superuser
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :cluster_admin
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => 'postgres',
:logger => logger,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :public_user
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => CartoDB::PUBLIC_DB_USER, 'password' => CartoDB::PUBLIC_DB_USER_PASSWORD,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :public_db_user
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => @user.database_public_username, 'password' => CartoDB::PUBLIC_DB_USER_PASSWORD,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
else
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => @user.database_username,
'password' => @user.database_password,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
end
end
def monitor_user_notification
FileUtils.touch(Rails.root.join('log', 'users_modifications'))
if !Cartodb.config[:signups].nil? && !Cartodb.config[:signups]["service"].nil? &&
!Cartodb.config[:signups]["service"]["port"].nil?
enable_remote_db_user
end
end
def enable_remote_db_user
request = http_client.request(
"#{@user.database_host}:#{Cartodb.config[:signups]['service']['port']}/scripts/activate_db_user",
method: :post,
headers: { "Content-Type" => "application/json" }
)
response = request.run
if response.code != 200
raise(response.body)
else
comm_response = JSON.parse(response.body)
if comm_response['retcode'].to_i != 0
raise(response['stderr'])
end
end
end
def create_own_schema
load_cartodb_functions
@user.database_schema = @user.username
@user.this.update(database_schema: @user.database_schema)
create_user_schema
set_database_search_path
create_public_db_user
end
def move_to_own_schema
new_schema_name = @user.username
old_database_schema_name = @user.database_schema
if @user.database_schema != new_schema_name
Carto::Db::UserSchemaMover.new(@user).move_objects(new_schema_name)
create_public_db_user
set_database_search_path
end
rescue => e
# Undo metadata changes if process fails
begin
@user.this.update database_schema: old_database_schema_name
# Defensive measure to avoid undesired table dropping
if schema_exists?(new_schema_name) && tables_effective(new_schema_name).count == 0
drop_all_functions_from_schema(new_schema_name)
@user.in_database.run(%{ DROP SCHEMA "#{new_schema_name}" })
end
rescue => ee
# Avoid shadowing the actual error
CartoDB.notify_exception(ee, user: @user)
end
raise e
end
def drop_database_and_user(conn = nil)
conn ||= @user.in_database(as: :cluster_admin)
if !@user.database_name.nil? && !@user.database_name.empty?
@user.in_database(as: :superuser).run("DROP SCHEMA \"#{@user.database_schema}\" CASCADE")
conn.run("UPDATE pg_database SET datallowconn = 'false' WHERE datname = '#{@user.database_name}'")
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
conn.run("DROP DATABASE \"#{@user.database_name}\"")
end
if !@user.database_username.nil? && !@user.database_username.empty?
conn.run("DROP USER \"#{@user.database_username}\"")
end
end
def run_pg_query(query)
time = nil
res = nil
translation_proc = nil
@user.in_database do |user_database|
time = Benchmark.measure do
user_database.synchronize do |conn|
res = conn.exec query
end
translation_proc = user_database.conversion_procs
end
end
{
time: time.real,
total_rows: res.ntuples,
rows: pg_to_hash(res, translation_proc),
results: pg_results?(res),
modified: pg_modified?(res),
affected_rows: pg_size(res)
}
rescue => e
if e.is_a? PGError
if e.message.include?("does not exist")
if e.message.include?("column")
raise CartoDB::ColumnNotExists, e.message
else
raise CartoDB::TableNotExists, e.message
end
else
raise CartoDB::ErrorRunningQuery, e.message
end
else
raise e
end
end
def create_db_user
conn = @user.in_database(as: :cluster_admin)
begin
conn.transaction do
begin
conn.run("CREATE USER \"#{@user.database_username}\" PASSWORD '#{@user.database_password}'")
conn.run("GRANT publicuser to \"#{@user.database_username}\"")
rescue => e
puts "#{Time.now} USER SETUP ERROR (#{@user.database_username}): #{$!}"
raise e
end
end
end
end
def create_user_db
conn = @user.in_database(as: :cluster_admin)
begin
conn.run("CREATE DATABASE \"#{@user.database_name}\"
WITH TEMPLATE = template_postgis
OWNER = #{::Rails::Sequel.configuration.environment_for(Rails.env)['username']}
ENCODING = 'UTF8'
CONNECTION LIMIT=-1")
rescue => e
puts "#{Time.now} USER SETUP ERROR WHEN CREATING DATABASE #{@user.database_name}: #{$!}"
raise e
end
end
def set_database_name
@user.database_name = case Rails.env
when 'development'
"cartodb_dev_user_#{@user.partial_db_name}_db"
when 'staging'
"cartodb_staging_user_#{@user.partial_db_name}_db"
when 'test'
"cartodb_test_user_#{@user.partial_db_name}_db"
else
"cartodb_user_#{@user.partial_db_name}_db"
end
if @user.has_organization_enabled?
if !@user.database_exists?
raise "Organization database #{@user.database_name} doesn't exist"
end
else
if @user.database_exists?
raise "Database #{@user.database_name} already exists"
end
end
@user.this.update database_name: @user.database_name
end
def public_user_roles
@user.organization_user? ? [CartoDB::PUBLIC_DB_USER, @user.database_public_username] : [CartoDB::PUBLIC_DB_USER]
end
def terminate_database_connections
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
end
def self.terminate_database_connections(database_name, database_host)
connection_params = ::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'host' => database_host,
'database' => 'postgres'
) { |_, o, n| n.nil? ? o : n }
conn = ::Sequel.connect(connection_params)
conn.run("
DO language plpgsql $$
DECLARE
ver INT;
sql TEXT;
BEGIN
SELECT INTO ver setting from pg_settings where name='server_version_num';
sql := 'SELECT pg_terminate_backend(';
IF ver > 90199 THEN
sql := sql || 'pid';
ELSE
sql := sql || 'procpid';
END IF;
sql := sql || ') FROM pg_stat_activity WHERE datname = '
|| quote_literal('#{database_name}');
RAISE NOTICE '%', sql;
EXECUTE sql;
END
$$
")
close_sequel_connection(conn)
end
def triggers(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).triggers(schema)
end
def functions(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).functions(schema, @user.database_username)
end
def views(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).views(schema, @user.database_username)
end
def materialized_views(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).materialized_views(schema, @user.database_username)
end
def get_database_version
version_match = @user.in_database.fetch("SELECT version()").first[:version].match(/(PostgreSQL (([0-9]+\.?){2,3})).*/)
if version_match.nil?
return nil
else
return version_match[2]
end
end
def connect_to_aggregation_tables
config = Cartodb.get_config(:aggregation_tables)
@user.in_database(as: :superuser) do |db|
db.transaction do
db.run(build_aggregation_fdw_config_sql(config))
db.run("SELECT cartodb._CDB_Setup_FDW('aggregation');")
db.run("CREATE FOREIGN TABLE IF NOT EXISTS #{SCHEMA_AGGREGATION_TABLES}.agg_admin0 " \
"(cartodb_id integer, the_geom geometry(Geometry,4326), " \
"the_geom_webmercator geometry(Geometry,3857), " \
"population double precision OPTIONS (column_name 'pop_est')) SERVER aggregation OPTIONS " \
"(schema_name 'public', table_name '#{config['tables']['admin0']}', updatable 'false');")
db.run("CREATE FOREIGN TABLE IF NOT EXISTS #{SCHEMA_AGGREGATION_TABLES}.agg_admin1 " \
"(cartodb_id integer,the_geom geometry(Geometry,4326), " \
"the_geom_webmercator geometry(Geometry,3857)) " \
"SERVER aggregation OPTIONS (schema_name 'public', table_name '#{config['tables']['admin1']}', updatable 'false');")
db.run("GRANT SELECT ON TABLE #{SCHEMA_AGGREGATION_TABLES}.agg_admin0 TO \"#{@user.database_username}\";")
db.run("GRANT SELECT ON TABLE #{SCHEMA_AGGREGATION_TABLES}.agg_admin1 TO \"#{@user.database_username}\";")
end
end
end
private
def http_client
@http_client ||= Carto::Http::Client.get('old_user', log_requests: true)
end
# Telnet invalidation works only for Varnish 2.x.
def create_function_invalidate_varnish_telnet
add_python
varnish_host = Cartodb.config[:varnish_management].try(:[], 'host') || '127.0.0.1'
varnish_port = Cartodb.config[:varnish_management].try(:[], 'port') || 6082
varnish_timeout = Cartodb.config[:varnish_management].try(:[], 'timeout') || 5
varnish_critical = Cartodb.config[:varnish_management].try(:[], 'critical') == true ? 1 : 0
varnish_retry = Cartodb.config[:varnish_management].try(:[], 'retry') || 5
purge_command = Cartodb::config[:varnish_management]["purge_command"]
varnish_trigger_verbose = Cartodb.config[:varnish_management].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{varnish_critical}
timeout = #{varnish_timeout}
retry = #{varnish_retry}
trigger_verbose = #{varnish_trigger_verbose}
client = GD.get('varnish', None)
for i in ('base64', 'hashlib'):
if not i in GD:
GD[i] = __import__(i)
while True:
if not client:
try:
import varnish
client = GD['varnish'] = varnish.VarnishHandler(('#{varnish_host}', #{varnish_port}, timeout))
except Exception as err:
# NOTE: we won't retry on connection error
if critical:
plpy.error('Varnish connection error: ' + str(err))
break
try:
cache_key = "t:" + GD['base64'].b64encode(GD['hashlib'].sha256('#{@user.database_name}:%s' % table_name).digest())[0:6]
# We want to say \b here, but the Varnish telnet interface expects \\b, we have to escape that on Python to \\\\b and double that for SQL
client.fetch('#{purge_command} obj.http.Surrogate-Key ~ "\\\\\\\\b%s\\\\\\\\b"' % cache_key)
break
except Exception as err:
if trigger_verbose:
plpy.warning('Varnish fetch error: ' + str(err))
client = GD['varnish'] = None # force reconnect
if not retry:
if critical:
plpy.error('Varnish fetch error: ' + str(err))
break
retry -= 1 # try reconnecting
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
def create_function_invalidate_varnish_http
add_python
varnish_host = Cartodb.config[:varnish_management].try(:[], 'host') || '127.0.0.1'
varnish_port = Cartodb.config[:varnish_management].try(:[], 'http_port') || 6081
varnish_timeout = Cartodb.config[:varnish_management].try(:[], 'timeout') || 5
varnish_critical = Cartodb.config[:varnish_management].try(:[], 'critical') == true ? 1 : 0
varnish_retry = Cartodb.config[:varnish_management].try(:[], 'retry') || 5
varnish_trigger_verbose = Cartodb.config[:varnish_management].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{varnish_critical}
timeout = #{varnish_timeout}
retry = #{varnish_retry}
trigger_verbose = #{varnish_trigger_verbose}
for i in ('httplib', 'base64', 'hashlib'):
if not i in GD:
GD[i] = __import__(i)
while True:
try:
client = GD['httplib'].HTTPConnection('#{varnish_host}', #{varnish_port}, False, timeout)
cache_key = "t:" + GD['base64'].b64encode(GD['hashlib'].sha256('#{@user.database_name}:%s' % table_name).digest())[0:6]
client.request('PURGE', '/key', '', {"Invalidation-Match": ('\\\\b%s\\\\b' % cache_key) })
response = client.getresponse()
assert response.status == 204
break
except Exception as err:
if trigger_verbose:
plpy.warning('Varnish purge error: ' + str(err))
if not retry:
if critical:
plpy.error('Varnish purge error: ' + str(err))
break
retry -= 1 # try reconnecting
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
# Invalidate through external service
def create_function_invalidate_varnish_invalidation_service
add_python
invalidation_host = Cartodb.config[:invalidation_service].try(:[], 'host') || '127.0.0.1'
invalidation_port = Cartodb.config[:invalidation_service].try(:[], 'port') || 3142
invalidation_timeout = Cartodb.config[:invalidation_service].try(:[], 'timeout') || 5
invalidation_critical = Cartodb.config[:invalidation_service].try(:[], 'critical') ? 1 : 0
invalidation_retry = Cartodb.config[:invalidation_service].try(:[], 'retry') || 5
invalidation_trigger_verbose =
Cartodb.config[:invalidation_service].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{invalidation_critical}
timeout = #{invalidation_timeout}
retry = #{invalidation_retry}
trigger_verbose = #{invalidation_trigger_verbose}
client = GD.get('invalidation', None)
if 'syslog' not in GD:
import syslog
GD['syslog'] = syslog
else:
syslog = GD['syslog']
if 'time' not in GD:
import time
GD['time'] = time
else:
time = GD['time']
if 'json' not in GD:
import json
GD['json'] = json
else:
json = GD['json']
start = time.time()
retries = 0
termination_state = 1
error = ''
while True:
if not client:
try:
import redis
client = GD['invalidation'] = redis.Redis(host='#{invalidation_host}', port=#{invalidation_port}, socket_timeout=timeout)
except Exception as err:
error = "client_error - %s" % str(err)
# NOTE: we won't retry on connection error
if critical:
plpy.error('Invalidation Service connection error: ' + str(err))
break
try:
client.execute_command('TCH', '#{@user.database_name}', table_name)
termination_state = 0
error = ''
break
except Exception as err:
error = "request_error - %s" % str(err)
if trigger_verbose:
plpy.warning('Invalidation Service warning: ' + str(err))
client = GD['invalidation'] = None # force reconnect
if not retry:
if critical:
plpy.error('Invalidation Service error: ' + str(err))
break
retries = retries + 1
retry -= 1 # try reconnecting
end = time.time()
invalidation_duration = (end - start)
current_time = time.strftime("%Y-%m-%d %H:%M:%S %Z", time.localtime())
session_user = plpy.execute("SELECT session_user", 1)[0]["session_user"]
invalidation_result = {"timestamp": current_time, "duration": round(invalidation_duration, 8), "termination_state": termination_state, "retries": retries, "error": error, "database": "#{@user.database_name}", "table_name": table_name, "dbuser": session_user}
if trigger_verbose:
syslog.syslog(syslog.LOG_INFO, "invalidation: %s" % json.dumps(invalidation_result))
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
# Geocoder api extension related
def build_geocoder_server_config_sql(config)
host = config['host']
port = config['port']
user = config['user']
dbname = config['dbname']
%{
SELECT cartodb.CDB_Conf_SetConf('geocoder_server_config',
'{ \"connection_str\": \"host=#{host} port=#{port} dbname=#{dbname} user=#{user}\"}'::json
);
}
end
def build_entity_config_sql
# User configuration
entity_name = @user.organization_user? ? @user.organization.name : @user.username
%{
SELECT cartodb.CDB_Conf_SetConf('user_config',
'{"is_organization": #{@user.organization_user?}, "entity_name": "#{entity_name}"}'::json
);
}
end
def build_aggregation_fdw_config_sql(config)
%{
SELECT cartodb.CDB_Conf_SetConf('fdws',
'{"aggregation":{"server":{"extensions":"postgis", "dbname":"#{config['dbname']}",
"host":"#{config['host']}", "port":"#{config['port']}"}, "users":{"public":{"user":"#{config['username']}",
"password":"#{config['password']}"} } } }'::json
);
}
end
end
end
end
Better comments related to CartoDB/cartodb-management#4467
# encoding: utf-8
require_relative 'db_queries'
require_dependency 'carto/db/database'
require_dependency 'carto/db/user_schema_mover'
require 'cartodb/sequel_connection_helper'
# To avoid collisions with User model
module CartoDB
# To avoid collisions with User class
module UserModule
class DBService
include CartoDB::MiniSequel
extend CartoDB::SequelConnectionHelper
# Also default schema for new users
SCHEMA_PUBLIC = 'public'.freeze
SCHEMA_CARTODB = 'cartodb'.freeze
SCHEMA_IMPORTER = 'cdb_importer'.freeze
SCHEMA_GEOCODING = 'cdb'.freeze
SCHEMA_CDB_DATASERVICES_API = 'cdb_dataservices_client'.freeze
SCHEMA_AGGREGATION_TABLES = 'aggregation'.freeze
CDB_DATASERVICES_CLIENT_VERSION = '0.5.0'.freeze
def initialize(user)
raise "User nil" unless user
@user = user
@queries = CartoDB::UserModule::DBQueries.new(@user)
end
def queries
@queries
end
# This method is used both upon user creation and by the UserMover
# All methods called inside should allow to be executed multiple times without errors
def configure_database
set_database_search_path
grant_user_in_database
set_statement_timeouts
# INFO: Added to everyone because eases migration of normal users to org owners
# and non-org users just don't use it
set_user_as_organization_member
if @user.database_schema == SCHEMA_PUBLIC
setup_single_user_schema
else
setup_organization_user_schema
end
create_function_invalidate_varnish
grant_publicuser_in_database
end
# All methods called inside should allow to be executed multiple times without errors
def setup_single_user_schema
set_user_privileges_at_db
rebuild_quota_trigger
end
# All methods called inside should allow to be executed multiple times without errors
def setup_organization_user_schema
# WIP: CartoDB/cartodb-management#4467
# reset_user_schema_permissions
reset_schema_owner
set_user_privileges_at_db
set_user_as_organization_member
rebuild_quota_trigger
# INFO: organization privileges are set for org_member_role, which is assigned to each org user
if @user.organization_owner?
setup_organization_owner
end
# Rebuild the geocoder api user config to reflect that is an organization user
install_and_configure_geocoder_api_extension
end
# INFO: main setup for non-org users
def new_non_organization_user_main_db_setup
Thread.new do
create_db_user
create_user_db
grant_owner_in_database
end.join
create_importer_schema
create_geocoding_schema
load_cartodb_functions
install_and_configure_geocoder_api_extension
# We reset the connections to this database to be sure the change in default search_path is effective
reset_pooled_connections
reset_database_permissions # Reset privileges
configure_database
revoke_cdb_conf_access
end
# INFO: main setup for org users
def new_organization_user_main_db_setup
Thread.new do
create_db_user
end.join
create_own_schema
setup_organization_user_schema
install_and_configure_geocoder_api_extension
# We reset the connections to this database to be sure the change in default search_path is effective
reset_pooled_connections
revoke_cdb_conf_access
end
def set_user_privileges_at_db # MU
# INFO: organization permission on public schema is handled through role assignment
unless @user.organization_user?
set_user_privileges_in_cartodb_schema
set_user_privileges_in_public_schema
end
set_user_privileges_in_own_schema
set_privileges_to_publicuser_in_own_schema
unless @user.organization_user?
set_user_privileges_in_importer_schema
set_user_privileges_in_geocoding_schema
set_geo_columns_privileges
set_raster_privileges
end
end
def disable_writes
# NOTE: This will not affect already opened connections. Run `terminate_database_conections` method after this
# to ensure no more writes are possible.
@user.in_database(as: :cluster_admin) do |database|
database.run(%{
ALTER DATABASE "#{@user.database_name}"
SET default_transaction_read_only = 'on'
})
end
end
def enable_writes
# NOTE: This will not affect already opened connections. Run `terminate_database_conections` method after this
# to ensure no more writes are possible.
@user.in_database(as: :cluster_admin) do |database|
database.run(%{
ALTER DATABASE "#{@user.database_name}"
SET default_transaction_read_only = default
})
end
end
def writes_enabled?
@user.in_database(as: :superuser) do |database|
database.fetch(%{SHOW default_transaction_read_only}).first[:default_transaction_read_only] == 'off'
end
end
# Cartodb functions
def load_cartodb_functions(statement_timeout = nil, cdb_extension_target_version = nil)
add_python
# Install dependencies of cartodb extension
@user.in_database(as: :superuser, no_cartodb_in_schema: true) do |db|
db.transaction do
unless statement_timeout.nil?
old_timeout = db.fetch("SHOW statement_timeout;").first[:statement_timeout]
db.run("SET statement_timeout TO '#{statement_timeout}';")
end
db.run('CREATE EXTENSION plpythonu FROM unpackaged') unless db.fetch(%{
SELECT count(*) FROM pg_extension WHERE extname='plpythonu'
}).first[:count] > 0
db.run('CREATE EXTENSION postgis FROM unpackaged') unless db.fetch(%{
SELECT count(*) FROM pg_extension WHERE extname='postgis'
}).first[:count] > 0
unless statement_timeout.nil?
db.run("SET statement_timeout TO '#{old_timeout}';")
end
end
end
upgrade_cartodb_postgres_extension(statement_timeout, cdb_extension_target_version)
rebuild_quota_trigger
end
def rebuild_quota_trigger
@user.in_database(as: :superuser) do |db|
rebuild_quota_trigger_with_database(db)
end
end
def rebuild_quota_trigger_with_database(db)
if !cartodb_extension_version_pre_mu? && @user.has_organization?
db.run("DROP FUNCTION IF EXISTS public._CDB_UserQuotaInBytes();")
end
db.transaction do
# NOTE: this has been written to work for both databases that switched to "cartodb" extension
# and those before the switch.
# In the future we should guarantee that exntension lives in cartodb schema so we don't need to set
# a search_path before
search_path = db.fetch("SHOW search_path;").first[:search_path]
db.run("SET search_path TO #{SCHEMA_CARTODB}, #{SCHEMA_PUBLIC};")
if cartodb_extension_version_pre_mu?
db.run("SELECT CDB_SetUserQuotaInBytes(#{@user.quota_in_bytes});")
else
db.run("SELECT CDB_SetUserQuotaInBytes('#{@user.database_schema}', #{@user.quota_in_bytes});")
end
db.run("SET search_path TO #{search_path};")
end
end
def build_search_path(user_schema = nil, quote_user_schema = true)
user_schema ||= @user.database_schema
DBService.build_search_path(user_schema, quote_user_schema)
end
# Centralized method to provide the (ordered) search_path
def self.build_search_path(user_schema, quote_user_schema = true)
quote_char = quote_user_schema ? "\"" : ""
"#{quote_char}#{user_schema}#{quote_char}, #{SCHEMA_CARTODB}, #{SCHEMA_CDB_DATASERVICES_API}, #{SCHEMA_PUBLIC}"
end
def set_database_search_path
@user.in_database(as: :superuser) do |database|
database.run(%{
ALTER USER "#{@user.database_username}"
SET search_path = #{build_search_path}
})
end
end
def create_importer_schema
create_schema('cdb_importer')
end
def create_geocoding_schema
create_schema('cdb')
end
def create_user_schema
create_schema(@user.database_schema, @user.database_username)
end
def create_schema(schema, role = nil)
@user.in_database(as: :superuser) do |db|
Carto::Db::Database.new(@user.database_name, db).create_schema(schema, role)
end
end
def setup_owner_permissions
@user.in_database(as: :superuser) do |database|
database.run(%{ SELECT cartodb.CDB_Organization_AddAdmin('#{@user.username}') })
end
end
def revoke_cdb_conf_access
errors = []
roles = [@user.database_username]
if @user.organization_owner?
begin
roles << organization_member_group_role_member_name
rescue => e
errors << "WARN: Error fetching org member role (does #{@user.organization.name} has that role?)"
end
end
roles << CartoDB::PUBLIC_DB_USER
queries = []
roles.map do |db_role|
queries.concat(@queries.revoke_permissions_on_cartodb_conf_queries(db_role))
end
queries.map do |query|
@user.in_database(as: :superuser) do |database|
begin
database.run(query)
rescue => e
# We can find organizations not yet upgraded for any reason or missing roles
errors << e.message
end
end
end
errors
rescue => e
# For broken organizations
["FATAL ERROR for #{name}: #{e.message}"]
end
def create_public_db_user
@user.in_database(as: :superuser) do |database|
database.run(%{ CREATE USER "#{@user.database_public_username}" LOGIN INHERIT })
database.run(%{ GRANT publicuser TO "#{@user.database_public_username}" })
database.run(%{ ALTER USER "#{@user.database_public_username}" SET search_path = #{build_search_path} })
end
end
def tables_effective(schema = 'public')
@user.in_database do |user_database|
user_database.synchronize do |conn|
query = "select table_name::text from information_schema.tables where table_schema = '#{schema}'"
tables = user_database[query].all.map { |i| i[:table_name] }
return tables
end
end
end
def drop_owned_by_user(conn, role)
conn.run("DROP OWNED BY \"#{role}\"")
end
def drop_user(conn = nil, username = nil)
conn ||= @user.in_database(as: :cluster_admin)
username ||= @user.database_username
database_with_conflicts = nil
retried = false
begin
conn.run("DROP USER IF EXISTS \"#{username}\"")
rescue => e
if !retried && e.message =~ /cannot be dropped because some objects depend on it/
retried = true
e.message =~ /object[s]? in database (.*)$/
if database_with_conflicts == $1
raise e
else
database_with_conflicts = $1
revoke_all_on_database_from(conn, database_with_conflicts, username)
revoke_all_memberships_on_database_to_role(conn, username)
drop_owned_by_user(conn, username)
conflict_database_conn = @user.in_database(
as: :cluster_admin,
'database' => database_with_conflicts
)
drop_owned_by_user(conflict_database_conn, username)
['cdb', 'cdb_importer', 'cartodb', 'public', @user.database_schema].each do |s|
drop_users_privileges_in_schema(s, [username])
end
retry
end
else
raise e
end
end
end
# Org users share the same db, so must only delete the schema unless he's the owner
def drop_organization_user(org_id, is_owner = false)
raise CartoDB::BaseCartoDBError.new('Tried to delete an organization user without org id') if org_id.nil?
Thread.new do
@user.in_database(as: :superuser) do |database|
if is_owner
schemas = ['cdb', 'cdb_importer', 'cartodb', 'public', @user.database_schema] +
::User.select(:database_schema).where(organization_id: org_id).all.collect(&:database_schema)
schemas.uniq.each do |s|
drop_users_privileges_in_schema(
s,
[@user.database_username, @user.database_public_username, CartoDB::PUBLIC_DB_USER])
end
# To avoid "cannot drop function" errors
database.run("drop extension if exists plproxy cascade")
end
# If user is in an organization should never have public schema, but to be safe (& tests which stub stuff)
unless @user.database_schema == SCHEMA_PUBLIC
database.run(%{ DROP FUNCTION IF EXISTS "#{@user.database_schema}"._CDB_UserQuotaInBytes()})
drop_all_functions_from_schema(@user.database_schema)
database.run(%{ DROP SCHEMA IF EXISTS "#{@user.database_schema}" })
end
end
conn = @user.in_database(as: :cluster_admin)
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
# If user is in an organization should never have public schema, but to be safe (& tests which stub stuff)
unless @user.database_schema == SCHEMA_PUBLIC
drop_user(conn, @user.database_public_username)
end
if is_owner
conn.run("DROP DATABASE \"#{@user.database_name}\"")
end
drop_user(conn)
end.join
monitor_user_notification
end
def configure_extension_org_metadata_api_endpoint
config = Cartodb.config[:org_metadata_api]
host = config['host']
port = config['port']
username = config['username']
password = config['password']
timeout = config.fetch('timeout', 10)
if host.present? && port.present? && username.present? && password.present?
conf_sql = %{
SELECT cartodb.CDB_Conf_SetConf('groups_api',
'{ \"host\": \"#{host}\", \"port\": #{port}, \"timeout\": #{timeout}, \"username\": \"#{username}\",
\"password\": \"#{password}\"}'::json
)
}
@user.in_database(as: :superuser) do |database|
database.fetch(conf_sql).first
end
else
CartoDB.notify_debug("org_metadata_api configuration missing", user_id: @user.id, config: config)
end
end
def install_and_configure_geocoder_api_extension
geocoder_api_config = Cartodb.get_config(:geocoder, 'api')
# If there's no config we assume there's no need to install the
# geocoder client as it is an independent API
return if geocoder_api_config.blank?
install_geocoder_api_extension(geocoder_api_config)
@user.in_database(as: :superuser).run("ALTER USER \"#{@user.database_username}\"
SET search_path TO #{build_search_path}")
@user.in_database(as: :superuser).run("ALTER USER \"#{@user.database_public_username}\"
SET search_path TO #{build_search_path}") if @user.organization_user?
return true
rescue => e
CartoDB.notify_error(
'Error installing and configuring geocoder api extension',
error: e.inspect, user: @user
)
return false
end
def install_geocoder_api_extension(geocoder_api_config)
@user.in_database(as: :superuser) do |db|
db.transaction do
db.run('CREATE EXTENSION IF NOT EXISTS plproxy SCHEMA public')
db.run("CREATE EXTENSION IF NOT EXISTS cdb_dataservices_client VERSION '#{CDB_DATASERVICES_CLIENT_VERSION}'")
db.run("ALTER EXTENSION cdb_dataservices_client UPDATE TO '#{CDB_DATASERVICES_CLIENT_VERSION}'")
geocoder_server_sql = build_geocoder_server_config_sql(geocoder_api_config)
db.run(geocoder_server_sql)
db.run(build_entity_config_sql)
end
end
end
def setup_organization_owner
setup_organization_role_permissions
setup_owner_permissions
configure_extension_org_metadata_api_endpoint
end
# Use a direct connection to the db through the direct port specified
# in the database configuration and set up its statement timeout value. This
# allows to overpass the statement_timeout limit if a connection pooler is used.
# This method is supposed to receive a block that will be run with the created
# connection.
def in_database_direct_connection(statement_timeout:)
raise 'need block' unless block_given?
configuration = db_configuration_for
configuration[:port] = configuration.fetch(:direct_port, configuration["direct_port"]) || configuration[:port] || configuration["port"]
# Temporary trace to be removed once https://github.com/CartoDB/cartodb/issues/7047 is solved
CartoDB::Logger.warning(message: 'Direct connection not used from queue') unless Socket.gethostname =~ /^que/
connection = @user.get_connection(_opts = {}, configuration)
begin
connection.run("SET statement_timeout TO #{statement_timeout}")
yield(connection)
ensure
connection.run("SET statement_timeout TO DEFAULT")
end
end
def reset_pooled_connections
# Only close connections to this users' database
$pool.close_connections!(@user.database_name)
end
# Upgrade the cartodb postgresql extension
def upgrade_cartodb_postgres_extension(statement_timeout = nil, cdb_extension_target_version = nil)
if cdb_extension_target_version.nil?
cdb_extension_target_version = '0.16.4'
end
@user.in_database(as: :superuser, no_cartodb_in_schema: true) do |db|
db.transaction do
unless statement_timeout.nil?
old_timeout = db.fetch("SHOW statement_timeout;").first[:statement_timeout]
db.run("SET statement_timeout TO '#{statement_timeout}';")
end
db.run(%{
DO LANGUAGE 'plpgsql' $$
DECLARE
ver TEXT;
BEGIN
BEGIN
SELECT cartodb.cdb_version() INTO ver;
EXCEPTION WHEN undefined_function OR invalid_schema_name THEN
RAISE NOTICE 'Got % (%)', SQLERRM, SQLSTATE;
BEGIN
CREATE EXTENSION cartodb VERSION '#{cdb_extension_target_version}' FROM unpackaged;
EXCEPTION WHEN undefined_table THEN
RAISE NOTICE 'Got % (%)', SQLERRM, SQLSTATE;
CREATE EXTENSION cartodb VERSION '#{cdb_extension_target_version}';
RETURN;
END;
RETURN;
END;
ver := '#{cdb_extension_target_version}';
IF position('dev' in ver) > 0 THEN
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || 'next''';
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || '''';
ELSE
EXECUTE 'ALTER EXTENSION cartodb UPDATE TO ''' || ver || '''';
END IF;
END;
$$;
})
unless statement_timeout.nil?
db.run("SET statement_timeout TO '#{old_timeout}';")
end
obtained = db.fetch('SELECT cartodb.cdb_version() as v').first[:v]
unless cartodb_extension_semver(cdb_extension_target_version) == cartodb_extension_semver(obtained)
raise("Expected cartodb extension '#{cdb_extension_target_version}' obtained '#{obtained}'")
end
end
end
end
def cartodb_extension_version_pre_mu?
current_version = cartodb_extension_semver(cartodb_extension_version)
if current_version.size == 3
major, minor, = current_version
major == 0 && minor < 3
else
raise 'Current cartodb extension version does not match standard x.y.z format'
end
end
# Returns a tree elements array with [major, minor, patch] as in http://semver.org/
def cartodb_extension_semver(extension_version)
extension_version.split('.').take(3).map(&:to_i)
end
def cartodb_extension_version
@cartodb_extension_version ||= @user.in_database(as: :superuser)
.fetch('SELECT cartodb.cdb_version() AS v').first[:v]
end
def reset_user_schema_permissions
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
schemas = [@user.database_schema].uniq
schemas.each do |schema|
revoke_privileges(user_database, schema, 'PUBLIC')
end
yield(user_database) if block_given?
end
end
end
def reset_database_permissions
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
schemas = %w(public cdb_importer cdb cartodb)
['PUBLIC', CartoDB::PUBLIC_DB_USER].each do |user|
revoke_all_on_database_from(user_database, @user.database_name, user)
schemas.each do |schema|
revoke_privileges(user_database, schema, user)
end
end
yield(user_database) if block_given?
end
end
end
def set_statement_timeouts
@user.in_database(as: :superuser) do |user_database|
user_database["ALTER ROLE \"?\" SET statement_timeout to ?", @user.database_username.lit,
@user.user_timeout].all
user_database["ALTER DATABASE \"?\" SET statement_timeout to ?", @user.database_name.lit,
@user.database_timeout].all
end
@user.in_database.disconnect
@user.in_database.connect(db_configuration_for)
@user.in_database(as: :public_user).disconnect
@user.in_database(as: :public_user).connect(db_configuration_for(:public_user))
rescue Sequel::DatabaseConnectionError
end
def set_user_as_organization_member
@user.in_database(as: :superuser) do |user_database|
user_database.transaction do
user_database.run("SELECT cartodb.CDB_Organization_Create_Member('#{@user.database_username}');")
end
end
end
def reset_schema_owner
@user.in_database(as: :superuser) do |database|
database.run(%{ ALTER SCHEMA "#{@user.database_schema}" OWNER TO "#{@user.database_username}" })
end
end
def grant_user_in_database
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries,
true
)
end
def grant_publicuser_in_database
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries(CartoDB::PUBLIC_DB_USER),
true
)
@queries.run_in_transaction(
@queries.grant_read_on_schema_queries(SCHEMA_CARTODB, CartoDB::PUBLIC_DB_USER),
true
)
@queries.run_in_transaction(
[
"REVOKE SELECT ON cartodb.cdb_tablemetadata FROM #{CartoDB::PUBLIC_DB_USER} CASCADE",
"REVOKE SELECT ON cartodb.cdb_analysis_catalog FROM #{CartoDB::PUBLIC_DB_USER} CASCADE"
],
true
)
@queries.run_in_transaction(
[
"GRANT USAGE ON SCHEMA #{SCHEMA_PUBLIC} TO #{CartoDB::PUBLIC_DB_USER}",
"GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA #{SCHEMA_PUBLIC} TO #{CartoDB::PUBLIC_DB_USER}",
"GRANT SELECT ON spatial_ref_sys TO #{CartoDB::PUBLIC_DB_USER}"
],
true
)
end
def set_user_privileges_in_own_schema # MU
@queries.run_in_transaction(
@queries.grant_all_on_user_schema_queries,
true
)
end
def set_user_privileges_in_cartodb_schema(db_user = nil)
@queries.run_in_transaction(
(
@queries.grant_read_on_schema_queries(SCHEMA_CARTODB, db_user) +
@queries.grant_write_on_cdb_tablemetadata_queries(db_user) +
@queries.grant_write_on_cdb_analysis_catalog_queries(db_user)
),
true
)
end
def set_privileges_to_publicuser_in_own_schema # MU
# Privileges in user schema for publicuser
@queries.run_in_transaction(
@queries.grant_usage_on_user_schema_to_other(CartoDB::PUBLIC_DB_USER),
true
)
end
def set_user_privileges_in_public_schema(db_user = nil)
@queries.run_in_transaction(
@queries.grant_read_on_schema_queries(SCHEMA_PUBLIC, db_user),
true
)
end
def set_user_privileges_in_importer_schema(db_user = nil) # MU
@queries.run_in_transaction(
@queries.grant_all_on_schema_queries(SCHEMA_IMPORTER, db_user),
true
)
end
def revoke_all_on_database_from(conn, database, role)
conn.run("REVOKE ALL ON DATABASE \"#{database}\" FROM \"#{role}\" CASCADE") if role_exists?(conn, role)
end
def grant_owner_in_database
@queries.run_in_transaction(
@queries.grant_all_on_database_queries,
true
)
end
def fix_table_permissions
tables_queries = []
@user.tables.each do |table|
if table.public? || table.public_with_link_only?
tables_queries << %{
GRANT SELECT ON \"#{@user.database_schema}\".\"#{table.name}\" TO #{CartoDB::PUBLIC_DB_USER} }
end
tables_queries << %{
ALTER TABLE \"#{@user.database_schema}\".\"#{table.name}\" OWNER TO \"#{@user.database_username}\" }
end
@queries.run_in_transaction(
tables_queries,
true
)
end
def set_user_privileges_in_geocoding_schema(db_user = nil)
@queries.run_in_transaction(
@queries.grant_all_on_schema_queries(SCHEMA_GEOCODING, db_user),
true
)
end
def set_geo_columns_privileges(role_name = nil)
# Postgis lives at public schema, as do geometry_columns and geography_columns
catalogs_schema = SCHEMA_PUBLIC
target_user = role_name.nil? ? @user.database_public_username : role_name
queries = [
%{ GRANT SELECT ON "#{catalogs_schema}"."geometry_columns" TO "#{target_user}" },
%{ GRANT SELECT ON "#{catalogs_schema}"."geography_columns" TO "#{target_user}" }
]
@queries.run_in_transaction(queries, true)
end
def set_raster_privileges(role_name = nil)
# Postgis lives at public schema, so raster catalogs too
catalogs_schema = SCHEMA_PUBLIC
queries = [
"GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_overviews\" TO \"#{CartoDB::PUBLIC_DB_USER}\"",
"GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_columns\" TO \"#{CartoDB::PUBLIC_DB_USER}\""
]
target_user = role_name.nil? ? @user.database_public_username : role_name
unless @user.organization.nil?
queries << "GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_overviews\" TO \"#{target_user}\""
queries << "GRANT SELECT ON TABLE \"#{catalogs_schema}\".\"raster_columns\" TO \"#{target_user}\""
end
@queries.run_in_transaction(queries, true)
end
def setup_organization_role_permissions
org_member_role = organization_member_group_role_member_name
set_user_privileges_in_public_schema(org_member_role)
@queries.run_in_transaction(
@queries.grant_connect_on_database_queries(org_member_role), true
)
set_geo_columns_privileges(org_member_role)
set_raster_privileges(org_member_role)
set_user_privileges_in_cartodb_schema(org_member_role)
set_user_privileges_in_importer_schema(org_member_role)
set_user_privileges_in_geocoding_schema(org_member_role)
end
def schema_exists?(schema, database = @user.in_database)
query = "SELECT 1 as schema_exist FROM information_schema.schemata WHERE schema_name = '#{schema}'"
!database.fetch(query).first.nil?
end
def drop_users_privileges_in_schema(schema, accounts)
@user.in_database(as: :superuser, statement_timeout: 600000) do |user_database|
return unless schema_exists?(schema, user_database)
user_database.transaction do
accounts
.select { |role| role_exists?(user_database, role) }
.each { |role| revoke_privileges(user_database, schema, "\"#{role}\"") }
end
end
end
def revoke_all_memberships_on_database_to_role(conn, role)
conn.fetch(%{
select rolname from pg_user join pg_auth_members on (pg_user.usesysid=pg_auth_members.member)
join pg_roles on (pg_roles.oid=pg_auth_members.roleid) where pg_user.usename='#{role}'
}).each do |rolname|
conn.run("REVOKE \"#{rolname[:rolname]}\" FROM \"#{role}\" CASCADE")
end
end
# Drops grants and functions in a given schema, avoiding by all means a CASCADE
# to not affect extensions or other users
def drop_all_functions_from_schema(schema_name)
recursivity_max_depth = 3
return if schema_name == SCHEMA_PUBLIC
@user.in_database(as: :superuser) do |database|
# Non-aggregate functions
drop_function_sqls = database.fetch(%{
SELECT 'DROP FUNCTION ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ');'
AS sql
FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid AND pg_proc.proisagg = FALSE)
WHERE ns.nspname = '#{schema_name}'
})
# Simulate a controlled environment drop cascade contained to only functions
failed_sqls = []
recursivity_level = 0
begin
failed_sqls = []
drop_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError => e
if e.message =~ /depends on function /i
failed_sqls.push(sql_sentence)
else
raise
end
end
end
drop_function_sqls = failed_sqls
recursivity_level += 1
end while failed_sqls.count > 0 && recursivity_level < recursivity_max_depth
# If something remains, reattempt later after dropping aggregates
if drop_function_sqls.count > 0
aggregate_dependant_function_sqls = drop_function_sqls
else
aggregate_dependant_function_sqls = []
end
# And now aggregate functions
failed_sqls = []
drop_function_sqls = database.fetch(%{
SELECT 'DROP AGGREGATE ' || ns.nspname || '.' || proname || '(' || oidvectortypes(proargtypes) || ');'
AS sql
FROM pg_proc INNER JOIN pg_namespace ns ON (pg_proc.pronamespace = ns.oid AND pg_proc.proisagg = TRUE)
WHERE ns.nspname = '#{schema_name}'
})
drop_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError
failed_sqls.push(sql_sentence)
end
end
if failed_sqls.count > 0
raise CartoDB::BaseCartoDBError.new('Cannot drop schema aggregate functions, dependencies remain')
end
# One final pass of normal functions, if left
if aggregate_dependant_function_sqls.count > 0
aggregate_dependant_function_sqls.each do |sql_sentence|
begin
database.run(sql_sentence[:sql])
rescue Sequel::DatabaseError
failed_sqls.push(sql_sentence)
end
end
end
if failed_sqls.count > 0
raise CartoDB::BaseCartoDBError.new('Cannot drop schema functions, dependencies remain')
end
end
end
# Create a "public.cdb_invalidate_varnish()" function to invalidate Varnish
#
# The function can only be used by the superuser, we expect
# security-definer triggers OR triggers on superuser-owned tables
# to call it with controlled set of parameters.
#
# The function is written in python because it needs to reach out
# to a Varnish server.
#
# Being unable to communicate with Varnish may or may not be critical
# depending on CartoDB configuration at time of function definition.
#
def create_function_invalidate_varnish
if Cartodb.config[:invalidation_service] && Cartodb.config[:invalidation_service].fetch('enabled', false)
create_function_invalidate_varnish_invalidation_service
elsif Cartodb.config[:varnish_management].fetch('http_port', false)
create_function_invalidate_varnish_http
else
create_function_invalidate_varnish_telnet
end
end
# Add plpythonu pl handler
def add_python
@user.in_database(
as: :superuser,
no_cartodb_in_schema: true
).run("CREATE OR REPLACE PROCEDURAL LANGUAGE 'plpythonu' HANDLER plpython_call_handler;")
end
# Needed because in some cases it might not exist and failure ends transaction
def role_exists?(db, role)
!db.fetch("SELECT 1 FROM pg_roles WHERE rolname='#{role}'").first.nil?
end
def revoke_privileges(db, schema, user)
db.run("REVOKE ALL ON SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL SEQUENCES IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL FUNCTIONS IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
db.run("REVOKE ALL ON ALL TABLES IN SCHEMA \"#{schema}\" FROM #{user} CASCADE")
end
def organization_member_group_role_member_name
@user.in_database.fetch(
"SELECT cartodb.CDB_Organization_Member_Group_Role_Member_Name() as org_member_role;"
)[:org_member_role][:org_member_role]
end
def db_configuration_for(user_role = nil)
logger = (Rails.env.development? || Rails.env.test? ? ::Rails.logger : nil)
if user_role == :superuser
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :cluster_admin
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => 'postgres',
:logger => logger,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :public_user
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => CartoDB::PUBLIC_DB_USER, 'password' => CartoDB::PUBLIC_DB_USER_PASSWORD,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
elsif user_role == :public_db_user
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => @user.database_public_username, 'password' => CartoDB::PUBLIC_DB_USER_PASSWORD,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
else
::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'database' => @user.database_name,
:logger => logger,
'username' => @user.database_username,
'password' => @user.database_password,
'host' => @user.database_host
) { |_, o, n| n.nil? ? o : n }
end
end
def monitor_user_notification
FileUtils.touch(Rails.root.join('log', 'users_modifications'))
if !Cartodb.config[:signups].nil? && !Cartodb.config[:signups]["service"].nil? &&
!Cartodb.config[:signups]["service"]["port"].nil?
enable_remote_db_user
end
end
def enable_remote_db_user
request = http_client.request(
"#{@user.database_host}:#{Cartodb.config[:signups]['service']['port']}/scripts/activate_db_user",
method: :post,
headers: { "Content-Type" => "application/json" }
)
response = request.run
if response.code != 200
raise(response.body)
else
comm_response = JSON.parse(response.body)
if comm_response['retcode'].to_i != 0
raise(response['stderr'])
end
end
end
def create_own_schema
load_cartodb_functions
@user.database_schema = @user.username
@user.this.update(database_schema: @user.database_schema)
create_user_schema
set_database_search_path
create_public_db_user
end
def move_to_own_schema
new_schema_name = @user.username
old_database_schema_name = @user.database_schema
if @user.database_schema != new_schema_name
Carto::Db::UserSchemaMover.new(@user).move_objects(new_schema_name)
create_public_db_user
set_database_search_path
end
rescue => e
# Undo metadata changes if process fails
begin
@user.this.update database_schema: old_database_schema_name
# Defensive measure to avoid undesired table dropping
if schema_exists?(new_schema_name) && tables_effective(new_schema_name).count == 0
drop_all_functions_from_schema(new_schema_name)
@user.in_database.run(%{ DROP SCHEMA "#{new_schema_name}" })
end
rescue => ee
# Avoid shadowing the actual error
CartoDB.notify_exception(ee, user: @user)
end
raise e
end
def drop_database_and_user(conn = nil)
conn ||= @user.in_database(as: :cluster_admin)
if !@user.database_name.nil? && !@user.database_name.empty?
@user.in_database(as: :superuser).run("DROP SCHEMA \"#{@user.database_schema}\" CASCADE")
conn.run("UPDATE pg_database SET datallowconn = 'false' WHERE datname = '#{@user.database_name}'")
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
conn.run("DROP DATABASE \"#{@user.database_name}\"")
end
if !@user.database_username.nil? && !@user.database_username.empty?
conn.run("DROP USER \"#{@user.database_username}\"")
end
end
def run_pg_query(query)
time = nil
res = nil
translation_proc = nil
@user.in_database do |user_database|
time = Benchmark.measure do
user_database.synchronize do |conn|
res = conn.exec query
end
translation_proc = user_database.conversion_procs
end
end
{
time: time.real,
total_rows: res.ntuples,
rows: pg_to_hash(res, translation_proc),
results: pg_results?(res),
modified: pg_modified?(res),
affected_rows: pg_size(res)
}
rescue => e
if e.is_a? PGError
if e.message.include?("does not exist")
if e.message.include?("column")
raise CartoDB::ColumnNotExists, e.message
else
raise CartoDB::TableNotExists, e.message
end
else
raise CartoDB::ErrorRunningQuery, e.message
end
else
raise e
end
end
def create_db_user
conn = @user.in_database(as: :cluster_admin)
begin
conn.transaction do
begin
conn.run("CREATE USER \"#{@user.database_username}\" PASSWORD '#{@user.database_password}'")
conn.run("GRANT publicuser to \"#{@user.database_username}\"")
rescue => e
puts "#{Time.now} USER SETUP ERROR (#{@user.database_username}): #{$!}"
raise e
end
end
end
end
def create_user_db
conn = @user.in_database(as: :cluster_admin)
begin
conn.run("CREATE DATABASE \"#{@user.database_name}\"
WITH TEMPLATE = template_postgis
OWNER = #{::Rails::Sequel.configuration.environment_for(Rails.env)['username']}
ENCODING = 'UTF8'
CONNECTION LIMIT=-1")
rescue => e
puts "#{Time.now} USER SETUP ERROR WHEN CREATING DATABASE #{@user.database_name}: #{$!}"
raise e
end
end
def set_database_name
@user.database_name = case Rails.env
when 'development'
"cartodb_dev_user_#{@user.partial_db_name}_db"
when 'staging'
"cartodb_staging_user_#{@user.partial_db_name}_db"
when 'test'
"cartodb_test_user_#{@user.partial_db_name}_db"
else
"cartodb_user_#{@user.partial_db_name}_db"
end
if @user.has_organization_enabled?
if !@user.database_exists?
raise "Organization database #{@user.database_name} doesn't exist"
end
else
if @user.database_exists?
raise "Database #{@user.database_name} already exists"
end
end
@user.this.update database_name: @user.database_name
end
def public_user_roles
@user.organization_user? ? [CartoDB::PUBLIC_DB_USER, @user.database_public_username] : [CartoDB::PUBLIC_DB_USER]
end
def terminate_database_connections
CartoDB::UserModule::DBService.terminate_database_connections(@user.database_name, @user.database_host)
end
def self.terminate_database_connections(database_name, database_host)
connection_params = ::Rails::Sequel.configuration.environment_for(Rails.env).merge(
'host' => database_host,
'database' => 'postgres'
) { |_, o, n| n.nil? ? o : n }
conn = ::Sequel.connect(connection_params)
conn.run("
DO language plpgsql $$
DECLARE
ver INT;
sql TEXT;
BEGIN
SELECT INTO ver setting from pg_settings where name='server_version_num';
sql := 'SELECT pg_terminate_backend(';
IF ver > 90199 THEN
sql := sql || 'pid';
ELSE
sql := sql || 'procpid';
END IF;
sql := sql || ') FROM pg_stat_activity WHERE datname = '
|| quote_literal('#{database_name}');
RAISE NOTICE '%', sql;
EXECUTE sql;
END
$$
")
close_sequel_connection(conn)
end
def triggers(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).triggers(schema)
end
def functions(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).functions(schema, @user.database_username)
end
def views(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).views(schema, @user.database_username)
end
def materialized_views(schema = @user.database_schema)
Carto::Db::Database.build_with_user(@user).materialized_views(schema, @user.database_username)
end
def get_database_version
version_match = @user.in_database.fetch("SELECT version()").first[:version].match(/(PostgreSQL (([0-9]+\.?){2,3})).*/)
if version_match.nil?
return nil
else
return version_match[2]
end
end
def connect_to_aggregation_tables
config = Cartodb.get_config(:aggregation_tables)
@user.in_database(as: :superuser) do |db|
db.transaction do
db.run(build_aggregation_fdw_config_sql(config))
db.run("SELECT cartodb._CDB_Setup_FDW('aggregation');")
db.run("CREATE FOREIGN TABLE IF NOT EXISTS #{SCHEMA_AGGREGATION_TABLES}.agg_admin0 " \
"(cartodb_id integer, the_geom geometry(Geometry,4326), " \
"the_geom_webmercator geometry(Geometry,3857), " \
"population double precision OPTIONS (column_name 'pop_est')) SERVER aggregation OPTIONS " \
"(schema_name 'public', table_name '#{config['tables']['admin0']}', updatable 'false');")
db.run("CREATE FOREIGN TABLE IF NOT EXISTS #{SCHEMA_AGGREGATION_TABLES}.agg_admin1 " \
"(cartodb_id integer,the_geom geometry(Geometry,4326), " \
"the_geom_webmercator geometry(Geometry,3857)) " \
"SERVER aggregation OPTIONS (schema_name 'public', table_name '#{config['tables']['admin1']}', updatable 'false');")
db.run("GRANT SELECT ON TABLE #{SCHEMA_AGGREGATION_TABLES}.agg_admin0 TO \"#{@user.database_username}\";")
db.run("GRANT SELECT ON TABLE #{SCHEMA_AGGREGATION_TABLES}.agg_admin1 TO \"#{@user.database_username}\";")
end
end
end
private
def http_client
@http_client ||= Carto::Http::Client.get('old_user', log_requests: true)
end
# Telnet invalidation works only for Varnish 2.x.
def create_function_invalidate_varnish_telnet
add_python
varnish_host = Cartodb.config[:varnish_management].try(:[], 'host') || '127.0.0.1'
varnish_port = Cartodb.config[:varnish_management].try(:[], 'port') || 6082
varnish_timeout = Cartodb.config[:varnish_management].try(:[], 'timeout') || 5
varnish_critical = Cartodb.config[:varnish_management].try(:[], 'critical') == true ? 1 : 0
varnish_retry = Cartodb.config[:varnish_management].try(:[], 'retry') || 5
purge_command = Cartodb::config[:varnish_management]["purge_command"]
varnish_trigger_verbose = Cartodb.config[:varnish_management].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{varnish_critical}
timeout = #{varnish_timeout}
retry = #{varnish_retry}
trigger_verbose = #{varnish_trigger_verbose}
client = GD.get('varnish', None)
for i in ('base64', 'hashlib'):
if not i in GD:
GD[i] = __import__(i)
while True:
if not client:
try:
import varnish
client = GD['varnish'] = varnish.VarnishHandler(('#{varnish_host}', #{varnish_port}, timeout))
except Exception as err:
# NOTE: we won't retry on connection error
if critical:
plpy.error('Varnish connection error: ' + str(err))
break
try:
cache_key = "t:" + GD['base64'].b64encode(GD['hashlib'].sha256('#{@user.database_name}:%s' % table_name).digest())[0:6]
# We want to say \b here, but the Varnish telnet interface expects \\b, we have to escape that on Python to \\\\b and double that for SQL
client.fetch('#{purge_command} obj.http.Surrogate-Key ~ "\\\\\\\\b%s\\\\\\\\b"' % cache_key)
break
except Exception as err:
if trigger_verbose:
plpy.warning('Varnish fetch error: ' + str(err))
client = GD['varnish'] = None # force reconnect
if not retry:
if critical:
plpy.error('Varnish fetch error: ' + str(err))
break
retry -= 1 # try reconnecting
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
def create_function_invalidate_varnish_http
add_python
varnish_host = Cartodb.config[:varnish_management].try(:[], 'host') || '127.0.0.1'
varnish_port = Cartodb.config[:varnish_management].try(:[], 'http_port') || 6081
varnish_timeout = Cartodb.config[:varnish_management].try(:[], 'timeout') || 5
varnish_critical = Cartodb.config[:varnish_management].try(:[], 'critical') == true ? 1 : 0
varnish_retry = Cartodb.config[:varnish_management].try(:[], 'retry') || 5
varnish_trigger_verbose = Cartodb.config[:varnish_management].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{varnish_critical}
timeout = #{varnish_timeout}
retry = #{varnish_retry}
trigger_verbose = #{varnish_trigger_verbose}
for i in ('httplib', 'base64', 'hashlib'):
if not i in GD:
GD[i] = __import__(i)
while True:
try:
client = GD['httplib'].HTTPConnection('#{varnish_host}', #{varnish_port}, False, timeout)
cache_key = "t:" + GD['base64'].b64encode(GD['hashlib'].sha256('#{@user.database_name}:%s' % table_name).digest())[0:6]
client.request('PURGE', '/key', '', {"Invalidation-Match": ('\\\\b%s\\\\b' % cache_key) })
response = client.getresponse()
assert response.status == 204
break
except Exception as err:
if trigger_verbose:
plpy.warning('Varnish purge error: ' + str(err))
if not retry:
if critical:
plpy.error('Varnish purge error: ' + str(err))
break
retry -= 1 # try reconnecting
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
# Invalidate through external service
def create_function_invalidate_varnish_invalidation_service
add_python
invalidation_host = Cartodb.config[:invalidation_service].try(:[], 'host') || '127.0.0.1'
invalidation_port = Cartodb.config[:invalidation_service].try(:[], 'port') || 3142
invalidation_timeout = Cartodb.config[:invalidation_service].try(:[], 'timeout') || 5
invalidation_critical = Cartodb.config[:invalidation_service].try(:[], 'critical') ? 1 : 0
invalidation_retry = Cartodb.config[:invalidation_service].try(:[], 'retry') || 5
invalidation_trigger_verbose =
Cartodb.config[:invalidation_service].fetch('trigger_verbose', true) == true ? 1 : 0
@user.in_database(as: :superuser).run(
<<-TRIGGER
BEGIN;
CREATE OR REPLACE FUNCTION public.cdb_invalidate_varnish(table_name text) RETURNS void AS
$$
critical = #{invalidation_critical}
timeout = #{invalidation_timeout}
retry = #{invalidation_retry}
trigger_verbose = #{invalidation_trigger_verbose}
client = GD.get('invalidation', None)
if 'syslog' not in GD:
import syslog
GD['syslog'] = syslog
else:
syslog = GD['syslog']
if 'time' not in GD:
import time
GD['time'] = time
else:
time = GD['time']
if 'json' not in GD:
import json
GD['json'] = json
else:
json = GD['json']
start = time.time()
retries = 0
termination_state = 1
error = ''
while True:
if not client:
try:
import redis
client = GD['invalidation'] = redis.Redis(host='#{invalidation_host}', port=#{invalidation_port}, socket_timeout=timeout)
except Exception as err:
error = "client_error - %s" % str(err)
# NOTE: we won't retry on connection error
if critical:
plpy.error('Invalidation Service connection error: ' + str(err))
break
try:
client.execute_command('TCH', '#{@user.database_name}', table_name)
termination_state = 0
error = ''
break
except Exception as err:
error = "request_error - %s" % str(err)
if trigger_verbose:
plpy.warning('Invalidation Service warning: ' + str(err))
client = GD['invalidation'] = None # force reconnect
if not retry:
if critical:
plpy.error('Invalidation Service error: ' + str(err))
break
retries = retries + 1
retry -= 1 # try reconnecting
end = time.time()
invalidation_duration = (end - start)
current_time = time.strftime("%Y-%m-%d %H:%M:%S %Z", time.localtime())
session_user = plpy.execute("SELECT session_user", 1)[0]["session_user"]
invalidation_result = {"timestamp": current_time, "duration": round(invalidation_duration, 8), "termination_state": termination_state, "retries": retries, "error": error, "database": "#{@user.database_name}", "table_name": table_name, "dbuser": session_user}
if trigger_verbose:
syslog.syslog(syslog.LOG_INFO, "invalidation: %s" % json.dumps(invalidation_result))
$$
LANGUAGE 'plpythonu' VOLATILE;
REVOKE ALL ON FUNCTION public.cdb_invalidate_varnish(TEXT) FROM PUBLIC;
COMMIT;
TRIGGER
)
end
# Geocoder api extension related
def build_geocoder_server_config_sql(config)
host = config['host']
port = config['port']
user = config['user']
dbname = config['dbname']
%{
SELECT cartodb.CDB_Conf_SetConf('geocoder_server_config',
'{ \"connection_str\": \"host=#{host} port=#{port} dbname=#{dbname} user=#{user}\"}'::json
);
}
end
def build_entity_config_sql
# User configuration
entity_name = @user.organization_user? ? @user.organization.name : @user.username
%{
SELECT cartodb.CDB_Conf_SetConf('user_config',
'{"is_organization": #{@user.organization_user?}, "entity_name": "#{entity_name}"}'::json
);
}
end
def build_aggregation_fdw_config_sql(config)
%{
SELECT cartodb.CDB_Conf_SetConf('fdws',
'{"aggregation":{"server":{"extensions":"postgis", "dbname":"#{config['dbname']}",
"host":"#{config['host']}", "port":"#{config['port']}"}, "users":{"public":{"user":"#{config['username']}",
"password":"#{config['password']}"} } } }'::json
);
}
end
end
end
end
|
require 'open3'
module Util
class DbManager
attr_accessor :con, :stage_con, :pub_con, :load_event
def initialize(params={})
if params[:load_event]
@load_event = params[:load_event]
else
@load_event = Admin::LoadEvent.create({:event_type=>'ad hoc',:status=>'running',:description=>'',:problems=>''})
end
end
def dump_database
fm=Util::FileManager.new
# First populate db named 'aact' from background db so the dump file will be configured to restore db named aact
psql_file="#{fm.dump_directory}/aact.psql"
File.delete(psql_file) if File.exist?(psql_file)
cmd="pg_dump --no-owner --no-acl --host=localhost --username=#{ENV['DB_SUPER_USERNAME']} --dbname=aact_back --schema=ctgov > #{psql_file}"
run_command_line(cmd)
# clear out previous ctgov content from staging db
puts "Recreating ctgov schema in aact staging database..."
terminate_stage_db_sessions
stage_con.execute('DROP SCHEMA IF EXISTS ctgov CASCADE;')
stage_con.execute('CREATE SCHEMA ctgov;')
# refresh staging db
puts "Refreshing aact staging database..."
cmd="psql -h localhost aact < #{psql_file} > /dev/null"
run_command_line(cmd)
File.delete(fm.pg_dump_file) if File.exist?(fm.pg_dump_file)
cmd="pg_dump aact -v -h localhost -p 5432 -U #{ENV['DB_SUPER_USERNAME']} --no-password --clean --exclude-table schema_migrations --schema=ctgov -c -C -Fc -f #{fm.pg_dump_file}"
run_command_line(cmd)
ActiveRecord::Base.establish_connection(ENV["AACT_BACK_DATABASE_URL"]).connection
end
def refresh_public_db
begin
success_code=true
revoke_db_privs
terminate_db_sessions
dump_file_name=Util::FileManager.new.pg_dump_file
return nil if dump_file_name.nil?
cmd="pg_restore -c -j 5 -v -h #{public_host_name} -p 5432 -U #{ENV['DB_SUPER_USERNAME']} -d #{public_db_name} #{dump_file_name}"
run_command_line(cmd)
terminate_alt_db_sessions
cmd="pg_restore -c -j 5 -v -h #{public_host_name} -p 5432 -U #{ENV['DB_SUPER_USERNAME']} -d aact_alt #{dump_file_name}"
run_command_line(cmd)
grant_db_privs
return success_code
rescue => error
load_event.add_problem("#{error.message} (#{error.class} #{error.backtrace}")
grant_db_privs
return false
end
end
def grant_db_privs
revoke_db_privs # to avoid errors, ensure privs revoked first
pub_con.execute("grant connect on database #{public_db_name} to public;")
pub_con.execute("grant usage on schema ctgov TO public;")
pub_con.execute('grant select on all tables in schema ctgov to public;')
end
def revoke_db_privs
begin
pub_con.execute("revoke connect on database #{public_db_name} from public;")
pub_con.execute("revoke select on all tables in schema ctgov from public;")
pub_con.execute("revoke all on schema ctgov from public;")
rescue => error
# error raised if schema missing. Ignore. Will be created in a pg_restore.
puts "DbManager.revoke_db_privs: #{error}"
end
end
def run_command_line(cmd)
puts cmd
stdout, stderr, status = Open3.capture3(cmd)
if status.exitstatus != 0
load_event.add_problem("#{stderr}")
success_code=false
end
end
def terminate_stage_db_sessions
stage_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname ='aact'")
end
def terminate_db_sessions
pub_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname = '#{public_db_name}'")
end
def terminate_alt_db_sessions
pub_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname = 'aact_alt'")
end
def public_study_count
pub_con.execute("select count(*) from studies").values.flatten.first.to_i
end
def background_study_count
con.execute("select count(*) from studies").values.flatten.first.to_i
end
def con
@con ||= ActiveRecord::Base.establish_connection(ENV["AACT_BACK_DATABASE_URL"]).connection
end
def stage_con
@stage_con ||= ActiveRecord::Base.establish_connection(ENV["AACT_STAGE_DATABASE_URL"]).connection
end
def pub_con
@pub_con ||= PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
end
def public_host_name
ENV['AACT_PUBLIC_HOSTNAME']
end
def public_db_name
ENV['AACT_PUBLIC_DATABASE_NAME']
end
end
end
aact-611: use pg_dump command that works on postgres 9.2.23
require 'open3'
module Util
class DbManager
attr_accessor :con, :stage_con, :pub_con, :load_event
def initialize(params={})
if params[:load_event]
@load_event = params[:load_event]
else
@load_event = Admin::LoadEvent.create({:event_type=>'ad hoc',:status=>'running',:description=>'',:problems=>''})
end
end
def dump_database
fm=Util::FileManager.new
# First populate db named 'aact' from background db so the dump file will be configured to restore db named aact
psql_file="#{fm.dump_directory}/aact.psql"
File.delete(psql_file) if File.exist?(psql_file)
# pg_dump that works on postgres 10.3
#cmd="pg_dump --no-owner --no-acl --host=localhost --username=#{ENV['DB_SUPER_USERNAME']} --dbname=aact_back --schema=ctgov > #{psql_file}"
# pg_dump that works on postgres 9.2.23 - which is what's running on servers as of 4/20/18
cmd="pg_dump --no-owner --no-acl --host=localhost --username=#{ENV['DB_SUPER_USERNAME']} --schema=ctgov aact_back > #{psql_file}"
#pg_dump --no-owner --no-acl --host=localhost --username=ctti --schema=ctgov aact_back > lookat.sql
run_command_line(cmd)
# clear out previous ctgov content from staging db
puts "Recreating ctgov schema in aact staging database..."
terminate_stage_db_sessions
stage_con.execute('DROP SCHEMA IF EXISTS ctgov CASCADE;')
stage_con.execute('CREATE SCHEMA ctgov;')
# refresh staging db
puts "Refreshing aact staging database..."
cmd="psql -h localhost aact < #{psql_file} > /dev/null"
run_command_line(cmd)
File.delete(fm.pg_dump_file) if File.exist?(fm.pg_dump_file)
cmd="pg_dump aact -v -h localhost -p 5432 -U #{ENV['DB_SUPER_USERNAME']} --no-password --clean --exclude-table schema_migrations --schema=ctgov -c -C -Fc -f #{fm.pg_dump_file}"
run_command_line(cmd)
ActiveRecord::Base.establish_connection(ENV["AACT_BACK_DATABASE_URL"]).connection
end
def refresh_public_db
begin
success_code=true
revoke_db_privs
terminate_db_sessions
dump_file_name=Util::FileManager.new.pg_dump_file
return nil if dump_file_name.nil?
cmd="pg_restore -c -j 5 -v -h #{public_host_name} -p 5432 -U #{ENV['DB_SUPER_USERNAME']} -d #{public_db_name} #{dump_file_name}"
run_command_line(cmd)
terminate_alt_db_sessions
cmd="pg_restore -c -j 5 -v -h #{public_host_name} -p 5432 -U #{ENV['DB_SUPER_USERNAME']} -d aact_alt #{dump_file_name}"
run_command_line(cmd)
grant_db_privs
return success_code
rescue => error
load_event.add_problem("#{error.message} (#{error.class} #{error.backtrace}")
grant_db_privs
return false
end
end
def grant_db_privs
revoke_db_privs # to avoid errors, ensure privs revoked first
pub_con.execute("grant connect on database #{public_db_name} to public;")
pub_con.execute("grant usage on schema ctgov TO public;")
pub_con.execute('grant select on all tables in schema ctgov to public;')
end
def revoke_db_privs
begin
pub_con.execute("revoke connect on database #{public_db_name} from public;")
pub_con.execute("revoke select on all tables in schema ctgov from public;")
pub_con.execute("revoke all on schema ctgov from public;")
rescue => error
# error raised if schema missing. Ignore. Will be created in a pg_restore.
puts "DbManager.revoke_db_privs: #{error}"
end
end
def run_command_line(cmd)
puts cmd
stdout, stderr, status = Open3.capture3(cmd)
if status.exitstatus != 0
load_event.add_problem("#{stderr}")
success_code=false
end
end
def terminate_stage_db_sessions
stage_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname ='aact'")
end
def terminate_db_sessions
pub_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname = '#{public_db_name}'")
end
def terminate_alt_db_sessions
pub_con.execute("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid <> pg_backend_pid() AND datname = 'aact_alt'")
end
def public_study_count
pub_con.execute("select count(*) from studies").values.flatten.first.to_i
end
def background_study_count
con.execute("select count(*) from studies").values.flatten.first.to_i
end
def con
@con ||= ActiveRecord::Base.establish_connection(ENV["AACT_BACK_DATABASE_URL"]).connection
end
def stage_con
@stage_con ||= ActiveRecord::Base.establish_connection(ENV["AACT_STAGE_DATABASE_URL"]).connection
end
def pub_con
@pub_con ||= PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
end
def public_host_name
ENV['AACT_PUBLIC_HOSTNAME']
end
def public_db_name
ENV['AACT_PUBLIC_DATABASE_NAME']
end
end
end
|
module Util
class DbManager
def self.public_db_name
'aact'
end
def self.change_password(user,pwd)
begin
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("alter user #{user.username} password '#{pwd}'")
con.disconnect!
rescue => e
user.errors.add(:base, e.message)
end
end
def self.add_user(user)
begin
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("create user #{user.username} password '#{user.unencrypted_password}'")
con.execute("grant connect on database aact to #{user.username}")
con.execute("grant usage on schema public TO #{user.username}")
con.execute("grant select on all tables in schema public to #{user.username};")
con.disconnect!
rescue => e
user.errors.add(:base, e.message)
end
end
def self.remove_user(user)
self.terminate_sessions_for(user)
con=ActiveRecord::Base.establish_connection(:public).connection
begin
con.execute("drop owned by #{user.username};")
con.execute("revoke all on schema public from #{user.username};")
con.execute("drop user #{user.username};")
rescue => e
con.disconnect!
raise e unless e.message == "role \"#{user.username}\" does not exist"
end
con.disconnect!
end
def self.grant_db_privs
self.revoke_db_privs # to avoid errors, ensure privs revoked first
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("grant connect on database #{public_db_name} to public;")
con.execute("grant usage on schema public TO public;")
con.execute('grant select on all tables in schema public to public;')
con.disconnect!
end
def self.revoke_db_privs
self.terminate_active_sessions
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("revoke connect on database #{public_db_name} from public;")
con.execute("revoke select on all tables in schema public from public;")
con.execute("revoke all on schema public from public;")
con.disconnect!
end
def self.terminate_sessions_for(user)
con=ActiveRecord::Base.establish_connection(:public).connection
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{user.username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
def self.terminate_active_sessions
con=ActiveRecord::Base.establish_connection(:public).connection
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['datname']=="#{public_db_name}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
con.disconnect!
end
end
end
Cannot terminate active sessions when revoking db privy, or the updater cuts itself off at the knees
module Util
class DbManager
def self.public_db_name
'aact'
end
def self.change_password(user,pwd)
begin
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("alter user #{user.username} password '#{pwd}'")
con.disconnect!
rescue => e
user.errors.add(:base, e.message)
end
end
def self.add_user(user)
begin
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("create user #{user.username} password '#{user.unencrypted_password}'")
con.execute("grant connect on database aact to #{user.username}")
con.execute("grant usage on schema public TO #{user.username}")
con.execute("grant select on all tables in schema public to #{user.username};")
con.disconnect!
rescue => e
user.errors.add(:base, e.message)
end
end
def self.remove_user(user)
self.terminate_sessions_for(user)
con=ActiveRecord::Base.establish_connection(:public).connection
begin
con.execute("drop owned by #{user.username};")
con.execute("revoke all on schema public from #{user.username};")
con.execute("drop user #{user.username};")
rescue => e
con.disconnect!
raise e unless e.message == "role \"#{user.username}\" does not exist"
end
con.disconnect!
end
def self.grant_db_privs
self.revoke_db_privs # to avoid errors, ensure privs revoked first
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("grant connect on database #{public_db_name} to public;")
con.execute("grant usage on schema public TO public;")
con.execute('grant select on all tables in schema public to public;')
con.disconnect!
end
def self.revoke_db_privs
con=ActiveRecord::Base.establish_connection(:public).connection
con.execute("revoke connect on database #{public_db_name} from public;")
con.execute("revoke select on all tables in schema public from public;")
con.execute("revoke all on schema public from public;")
con.disconnect!
end
def self.terminate_sessions_for(user)
con=ActiveRecord::Base.establish_connection(:public).connection
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{user.username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
def self.terminate_active_sessions
con=ActiveRecord::Base.establish_connection(:public).connection
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['datname']=="#{public_db_name}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
con.disconnect!
end
end
end
|
require_relative 'base_builder'
require 'tmpdir'
class IsoBuilder < BaseBuilder
def initialize(rootfs_path, output_path)
raise ArgumentError, 'Missing rootfs' unless File.exists?(rootfs_path)
raise ArgumentError, 'Bad output file' unless output_path
@rootfs_path = rootfs_path
@output_path = output_path
@bootloader = "grub" # TODO: make this configurable
@bootmode = "bios" # TODO: make this configurable
end
def build
case @bootloader
when "isolinux"
notice "Building ISO using isolinux as bootloader"
self.build_isolinux()
when "grub"
notice("Building ISO using grub as bootloader")
self.build_grub()
else
notice("Building ISO using grub as bootloader")
self.build_grub()
end
end
##
# Build the iso with isolinux as the bootloader
#
def build_isolinux()
info("Ensure (temporary) workspace dirs exist")
work_dirs = [ 'unpacked', 'iso', 'iso/isolinux', 'iso/live', 'tools' ]
work_dirs.each { |dir| execute!("mkdir #{dir}", false) }
info("Unpacking the rootfs to prepare it for live booting")
execute!("tar -xzf #{@rootfs_path} -C unpacked")
info("Installing live-boot pkgs")
execute!("chroot unpacked apt-get --yes install live-boot")
execute!("chroot unpacked apt-get --yes install live-boot-initramfs-tools")
execute!("chroot unpacked update-initramfs -u")
info("Squashing the modified rootfs")
execute!("mksquashfs unpacked iso/live/root.squashfs -no-progress")
info("Copying kernel and initrd into iso dir")
execute!("cp unpacked/vmlinuz iso/live/vmlinuz")
execute!("cp unpacked/initrd.img iso/live/initrd.img")
download_unpack_isolinux_tools('tools')
execute!("cp tools/usr/lib/syslinux/modules/bios/* iso/isolinux/", false)
execute!("cp tools/usr/lib/ISOLINUX/isolinux.bin iso/isolinux/", false)
info("Writing out isolinux config file")
File.open("iso/isolinux/isolinux.cfg", 'w') { |f|
f.write(self.isolinux_cfg_contents())
}
info("Creating ISO (using xorriso)")
execute!("xorriso "\
"-as mkisofs "\
"-r -J "\
"-joliet-long "\
"-l -cache-inodes "\
"-isohybrid-mbr tools/usr/lib/ISOLINUX/isohdpfx.bin "\
"-partition_offset 16 "\
"-A 'LiveISO' "\
"-b isolinux/isolinux.bin "\
"-c isolinux/boot.cat "\
"-no-emul-boot "\
"-boot-load-size 4 "\
"-boot-info-table "\
"-o #{@output_path} "\
"iso")
ensure
info("deleting (temporary) work dirs")
work_dirs.each { |d| execute!("rm -rf #{d}") }
nil
end
##
#
#
def build_grub
info("Ensure (temporary) workspace dirs exist")
work_dirs = [ 'unpacked', 'iso', 'iso/boot/grub', 'iso/live', 'tools' ]
work_dirs.each { |dir| execute!("mkdir -p #{dir}", false) }
info("Unpacking the rootfs to prepare it for live booting")
execute!("tar -xzf #{@rootfs_path} -C unpacked")
info("Installing live-boot pkgs")
execute!("chroot unpacked apt-get update")
execute!("chroot unpacked apt-get --yes install live-boot")
execute!("chroot unpacked apt-get --yes install live-boot-initramfs-tools")
execute!("chroot unpacked update-initramfs -u")
info("Squashing the modified rootfs")
execute!("mksquashfs unpacked iso/live/root.squashfs -no-progress")
info("Copying kernel and initrd into iso dir")
execute!("cp unpacked/vmlinuz iso/live/vmlinuz")
execute!("cp unpacked/initrd.img iso/live/initrd.img")
download_unpack_grub_tools('tools')
info("Writing out isolinux config file")
File.open("iso/boot/grub/grub.cfg", 'w') { |f|
f.write(self.grub_cfg_contents())
}
grub_arch = (@bootmode == "bios") ? "i386-pc" : "x86_64-efi"
info("Using grub arch: #{grub_arch}")
info("Creating ISO (using grub-mkrescue)")
execute!(["grub-mkrescue",
"-d tools/usr/lib/grub/#{grub_arch}",
"-o #{@output_path}",
"./iso",
"-- -iso-level 3", # in case the squashfs file is >4GiB
].join(" "))
ensure
info("deleting (temporary) work dirs")
work_dirs.each { |d| execute!("rm -rf #{d}") }
nil
end
##
# Download and unpack the isolinux tools into the specified dir.
#
def download_unpack_isolinux_tools(dir)
execute!("cd #{dir} && apt-get download isolinux", false)
execute!("cd #{dir} && apt-get download syslinux-common", false)
Dir.glob("#{dir}/*.deb") { |pkg| execute!("dpkg-deb --extract #{pkg} #{dir}") }
end
def download_unpack_grub_tools(dir)
grubpkgname = (@bootmode == "bios") ? "grub-pc-bin" : "grub-efi-amd64-bin"
execute!("mkdir -p #{dir}", false)
execute!("cd #{dir} && apt-get download grub-common", false)
execute!("cd #{dir} && apt-get download #{grubpkgname}", false)
Dir.glob("#{dir}/*.deb") { |pkg| execute!("dpkg-deb --extract #{pkg} #{dir}") }
end
##
# File contents for the isolinux config file
#
def isolinux_cfg_contents
return [
"UI menu.c32",
"PROMPT LiveCD",
"DEFAULT 1",
"TIMEOUT 15",
"MENU RESOLUTION 1024 768",
"",
"LABEL 1",
" MENU DEFAULT",
" MENU LABEL ^LiveCD",
" KERNEL /live/vmlinuz",
" APPEND initrd=/live/initrd.img boot=live quiet splash",
"",
"LABEL 2",
" MENU LABEL ^LiveCD (verbose)",
" KERNEL /live/vmlinuz",
" APPEND initrd=/live/initrd.img boot=live",
"",
].join("\n")
end
def grub_cfg_contents
[
"set default=0",
"set timeout=10",
"set gfxpayload=1024x768x24",
"",
"menuentry \"Live CD\" {",
" linux /live/vmlinuz boot=live console=tty0 quiet splash",
" initrd /live/initrd.img",
"}",
"",
"menuentry \"Live CD (debug)\" {",
" linux /live/vmlinuz boot=live console=tty0 debug",
" initrd /live/initrd.img",
"}",
].join("\n")
end
end
Make ISO builder detect rootfs compression
require_relative 'base_builder'
require 'tmpdir'
class IsoBuilder < BaseBuilder
def initialize(rootfs_path, output_path)
raise ArgumentError, 'Missing rootfs' unless File.exists?(rootfs_path)
raise ArgumentError, 'Bad output file' unless output_path
@rootfs_path = rootfs_path
@output_path = output_path
@bootloader = "grub" # TODO: make this configurable
@bootmode = "bios" # TODO: make this configurable
@decompress_switch = ''
if rootfs_path.end_with?('gz'):
@decompress_switch = '-z'
elsif rootfs_path.end_with?('bz2'):
@decompress_switch = '-j'
end
end
def build
case @bootloader
when "isolinux"
notice "Building ISO using isolinux as bootloader"
self.build_isolinux()
when "grub"
notice("Building ISO using grub as bootloader")
self.build_grub()
else
notice("Building ISO using grub as bootloader")
self.build_grub()
end
end
##
# Build the iso with isolinux as the bootloader
#
def build_isolinux()
info("Ensure (temporary) workspace dirs exist")
work_dirs = [ 'unpacked', 'iso', 'iso/isolinux', 'iso/live', 'tools' ]
work_dirs.each { |dir| execute!("mkdir #{dir}", false) }
info("Unpacking the rootfs to prepare it for live booting")
execute!("tar #{@decompress_switch} -xf #{@rootfs_path} -C unpacked")
info("Installing live-boot pkgs")
execute!("chroot unpacked apt-get --yes install live-boot")
execute!("chroot unpacked apt-get --yes install live-boot-initramfs-tools")
execute!("chroot unpacked update-initramfs -u")
info("Squashing the modified rootfs")
execute!("mksquashfs unpacked iso/live/root.squashfs -no-progress")
info("Copying kernel and initrd into iso dir")
execute!("cp unpacked/vmlinuz iso/live/vmlinuz")
execute!("cp unpacked/initrd.img iso/live/initrd.img")
download_unpack_isolinux_tools('tools')
execute!("cp tools/usr/lib/syslinux/modules/bios/* iso/isolinux/", false)
execute!("cp tools/usr/lib/ISOLINUX/isolinux.bin iso/isolinux/", false)
info("Writing out isolinux config file")
File.open("iso/isolinux/isolinux.cfg", 'w') { |f|
f.write(self.isolinux_cfg_contents())
}
info("Creating ISO (using xorriso)")
execute!("xorriso "\
"-as mkisofs "\
"-r -J "\
"-joliet-long "\
"-l -cache-inodes "\
"-isohybrid-mbr tools/usr/lib/ISOLINUX/isohdpfx.bin "\
"-partition_offset 16 "\
"-A 'LiveISO' "\
"-b isolinux/isolinux.bin "\
"-c isolinux/boot.cat "\
"-no-emul-boot "\
"-boot-load-size 4 "\
"-boot-info-table "\
"-o #{@output_path} "\
"iso")
ensure
info("deleting (temporary) work dirs")
work_dirs.each { |d| execute!("rm -rf #{d}") }
nil
end
##
#
#
def build_grub
info("Ensure (temporary) workspace dirs exist")
work_dirs = [ 'unpacked', 'iso', 'iso/boot/grub', 'iso/live', 'tools' ]
work_dirs.each { |dir| execute!("mkdir -p #{dir}", false) }
info("Unpacking the rootfs to prepare it for live booting")
execute!("tar -xzf #{@rootfs_path} -C unpacked")
info("Installing live-boot pkgs")
execute!("chroot unpacked apt-get update")
execute!("chroot unpacked apt-get --yes install live-boot")
execute!("chroot unpacked apt-get --yes install live-boot-initramfs-tools")
execute!("chroot unpacked update-initramfs -u")
info("Squashing the modified rootfs")
execute!("mksquashfs unpacked iso/live/root.squashfs -no-progress")
info("Copying kernel and initrd into iso dir")
execute!("cp unpacked/vmlinuz iso/live/vmlinuz")
execute!("cp unpacked/initrd.img iso/live/initrd.img")
download_unpack_grub_tools('tools')
info("Writing out isolinux config file")
File.open("iso/boot/grub/grub.cfg", 'w') { |f|
f.write(self.grub_cfg_contents())
}
grub_arch = (@bootmode == "bios") ? "i386-pc" : "x86_64-efi"
info("Using grub arch: #{grub_arch}")
info("Creating ISO (using grub-mkrescue)")
execute!(["grub-mkrescue",
"-d tools/usr/lib/grub/#{grub_arch}",
"-o #{@output_path}",
"./iso",
"-- -iso-level 3", # in case the squashfs file is >4GiB
].join(" "))
ensure
info("deleting (temporary) work dirs")
work_dirs.each { |d| execute!("rm -rf #{d}") }
nil
end
##
# Download and unpack the isolinux tools into the specified dir.
#
def download_unpack_isolinux_tools(dir)
execute!("cd #{dir} && apt-get download isolinux", false)
execute!("cd #{dir} && apt-get download syslinux-common", false)
Dir.glob("#{dir}/*.deb") { |pkg| execute!("dpkg-deb --extract #{pkg} #{dir}") }
end
def download_unpack_grub_tools(dir)
grubpkgname = (@bootmode == "bios") ? "grub-pc-bin" : "grub-efi-amd64-bin"
execute!("mkdir -p #{dir}", false)
execute!("cd #{dir} && apt-get download grub-common", false)
execute!("cd #{dir} && apt-get download #{grubpkgname}", false)
Dir.glob("#{dir}/*.deb") { |pkg| execute!("dpkg-deb --extract #{pkg} #{dir}") }
end
##
# File contents for the isolinux config file
#
def isolinux_cfg_contents
return [
"UI menu.c32",
"PROMPT LiveCD",
"DEFAULT 1",
"TIMEOUT 15",
"MENU RESOLUTION 1024 768",
"",
"LABEL 1",
" MENU DEFAULT",
" MENU LABEL ^LiveCD",
" KERNEL /live/vmlinuz",
" APPEND initrd=/live/initrd.img boot=live quiet splash",
"",
"LABEL 2",
" MENU LABEL ^LiveCD (verbose)",
" KERNEL /live/vmlinuz",
" APPEND initrd=/live/initrd.img boot=live",
"",
].join("\n")
end
def grub_cfg_contents
[
"set default=0",
"set timeout=10",
"set gfxpayload=1024x768x24",
"",
"menuentry \"Live CD\" {",
" linux /live/vmlinuz boot=live console=tty0 quiet splash",
" initrd /live/initrd.img",
"}",
"",
"menuentry \"Live CD (debug)\" {",
" linux /live/vmlinuz boot=live console=tty0 debug",
" initrd /live/initrd.img",
"}",
].join("\n")
end
end
|
require './move.rb'
require './square.rb'
class State
attr_accessor :board
MAXTURNS = 80
$moveCounter = 1
$sideOnMove = 'W'
def initializer()
@board = []
end
def nextTurn
if $sideOnMove == 'W'
$sideOnMove = 'B'
elsif $sideOnMove == 'B'
$sideOnMove = 'W'
end
end
def printBoard
puts "#{$moveCounter} #{$sideOnMove}"
@board.each do |x|
puts x.join("")
end
end
def initBoard
@board = [
['k','q','b','n','r'],
['p','p','p','p','p'],
['.','.','.','.','.'],
['.','.','.','.','.'],
['P','P','P','P','P'],
['R','N','B','Q','K'],
]
end
def gameOver?
if not @board.flatten.include?('k')
puts "white wins"
return true
elsif not @board.flatten.include?('K')
puts "black wins"
return true
elsif @movesCounter == MAXTURNS
puts "draw"
return true
else
return false
end
end
def randomMove
color = $sideOnMove
# ask each piece for valid moves
allValidMoves = []
getPiecesForSide(color).each do |piece|
allValidMoves << moveList(piece.x, piece.y)
end
allValidMoves.flatten!
random = rand(allValidMoves.size)
randomMove = allValidMoves[random]
move(randomMove)
self.nextTurn
return randomMove
end
def evalMove
allValidMoves = []
getPiecesForSide($sideOnMove).each do |piece|
allValidMoves << moveList(piece.x, piece.y).flatten
end
m = {}
allValidMoves.flatten.each do |move|
piece = @board[move.toSquare.y][move.toSquare.x]
if piece.upcase == 'K'
self.move(move)
return move
end
copyOfBoard = Marshal.load( Marshal.dump(@board) )
score = scoreGen(move, copyOfBoard)
m[move] = score
end
if $sideOMove == 'W'
bestMove = m.max_by { |move,val| val }.first
else
bestMove = m.min_by { |move,val| val }.first
end
move(bestMove)
self.nextTurn
return bestMove.to_s
end
def scoreGen(move, copyOfBoard)
score = 0
updateBoard(move, copyOfBoard)
getPiecesForSide('W').each do |piece|
piece = copyOfBoard[piece.y][piece.x]
score += getPieceValue(piece)
end
getPiecesForSide('B').each do |piece|
piece = copyOfBoard[piece.y][piece.x]
score += getPieceValue(piece)
end
return score
end
# Not part of homework just strategy for comparision
def chooseBestKill
allValidMoves = []
getPiecesForSide($sideOnMove).each do |piece|
allValidMoves << moveList(piece.x, piece.y).flatten
end
m = {}
allValidMoves.flatten.each do |move|
piece = @board[move.toSquare.y][move.toSquare.x]
if piece.upcase == 'K'
self.move(move)
return move
end
m[move] = getPieceValue(piece.upcase)
end
bestMove = m.max_by { |move,val| val }.first
move(bestMove)
self.nextTurn
return bestMove
end
def getPieceValue(p)
case p.upcase
when 'P'
score = 100
when 'B', 'N'
score = 300
when 'R'
score = 500
when 'Q'
score = 900
when 'K', '.'
score = 0
end
return p.upcase == p ? score : - score
end
def getPiecesForSide(color)
pieces = []
for y in 0..5 do
for x in 0..4 do
piece = Square.new(x,y)
if isPiece?(piece) and color == getColor(x,y)
pieces << piece
end
end
end
return pieces
end
def humanTurn
puts "Enter Move:"
input = gets.chomp
begin
humanMove(input)
rescue
humanTurn
end
end
def humanMove(hmove)
m = hmove.split("-")
fs = getChessSquare(m.first)
ts = getChessSquare(m.last)
hmove = Move.new(Square.new(fs.x,fs.y), Square.new(ts.x,ts.y))
moves = []
moveList(fs.x, fs.y).flatten.each do |m|
moves << m.to_s
end
if moves.include?(hmove.to_s)
move(hmove)
else
puts "Invalid chess move #{hmove.to_s} please try again"
throw "Invalid Human move"
end
self.nextTurn
end
def move(m)
if isPiece?(m.fromSquare) and (getColor(m.fromSquare.x, m.fromSquare.y) == $sideOnMove)
puts "Move is #{m.to_s}"
moves = []
moveList(m.fromSquare.x, m.fromSquare.y).flatten.each do |m|
moves << m.to_s
end
if not moves.include?(m.to_s)
throw "Error: Not a valid move x, y is fromSquare: #{m.fromSquare.x} #{m.fromSquare.y}, toSquare is #{m.toSquare.x} #{m.toSquare.y}"
else
updateBoard(m, @board)
end
$moveCounter += 1
else
throw "move error"
end
end
def updateBoard(m, board)
# move piece to toSquare
board[m.toSquare.y][m.toSquare.x] = board[m.fromSquare.y][m.fromSquare.x]
# set from square to empty
board[m.fromSquare.y][m.fromSquare.x] = '.'
# if piece is pawn and reaches the end of the board, then its becomes a queen
if board[m.toSquare.y][m.toSquare.x].upcase == 'P'
if m.toSquare.y == 5 and getColor(m.toSquare.x, m.toSquare.y) == 'B'
board[m.toSquare.y][m.toSquare.x] = 'q'
end
if m.toSquare.y == 0 and getColor(m.toSquare.x, m.toSquare.y) == 'W'
board[m.toSquare.y][m.toSquare.x] = 'Q'
end
end
end
def moveScan(x0, y0, dx, dy, capture, stop_short)
x = x0
y = y0
c = getColor(x0,y0)
moves = []
loop do
x += dx
y += dy
break if not inBounds?(x,y)
if isOccupied?(@board[y][x])
break if getColor(x,y) == c
break if not capture
stop_short = true
end
validMove = Move.new(Square.new(x0,y0), Square.new(x,y))
moves << validMove
break if stop_short == true
end
return moves
end
def getColor(x, y)
if @board[y][x].to_s.upcase == @board[y][x]
return "W"
else
return "B"
end
end
def isPiece?(square)
return ['Q','K','R','N','B','P'].include?(@board[square.y][square.x].upcase)
end
def isCapture?(fs, ts)
if @board[ts.y][ts.x] == '.'
return false
else
return @board[fs.y][fs.x].to_s != @board[ts.y][ts.x].to_s
end
end
def isOccupied?(value)
return value != '.'
end
def inBounds?(x,y)
return (x < 5 and x > -1 and y < 6 and y > -1)
end
def moveList(x,y)
# To list the moves of a piece at x, y:
p = @board[y][x]
moves = []
case p
when 'Q', 'K', 'q', 'k'
(-1..1).each do |dx|
(-1..1).each do |dy|
if dx == 0 and dy == 0
next
end
stop_short = (p == 'K' or p == 'k')
moves << moveScan(x, y, dx, dy, capture=true, stop_short)
end
end
return moves
when 'R', 'r'
dx = 1
dy = 0
stop_short = false
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'B', 'b'
dx = 1
dy = 0
stop_short = true
capture = false
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
dx = 1
dy = 1
stop_short = false
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'N', 'n'
dx = 1
dy = 2
stop_short = true
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
dx = -1
dy = 2
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'P', 'p'
# inverse the direction of black pawns
dir = (getColor(x,y) == 'B') ? 1 : -1
stop_short = true
# West
m = moveScan(x, y, -1, dir, true, stop_short)
# check if I can capture diag (NW or SW)
if m.size == 1 and isCapture?(m[0].fromSquare, m[0].toSquare)
moves << m
end
# East
m = moveScan(x, y, 1, dir, true, stop_short)
# check if I can capture diag ( NE or SE)
if m.size == 1 and isCapture?(m[0].fromSquare, m[0].toSquare)
moves << m
end
moves << moveScan(x, y, 0, dir, false, stop_short)
return moves
else
throw "Error: moveList called on invalid piece '#{p}' with coordinates x: #{x} y: #{y}"
end
end
def getChessSquare(square)
row = ['a','b','c','d','e']
x = row.index(square[0])
y = [ 6, 5, 4, 3, 2, 1][square[1].to_i]
return Square.new(x,y)
end
end
Fix duplicated code in state class
require './move.rb'
require './square.rb'
class State
attr_accessor :board
MAXTURNS = 80
$moveCounter = 1
$sideOnMove = 'W'
def initializer()
@board = []
end
def nextTurn
if $sideOnMove == 'W'
$sideOnMove = 'B'
elsif $sideOnMove == 'B'
$sideOnMove = 'W'
end
end
def printBoard
puts "#{$moveCounter} #{$sideOnMove}"
@board.each do |x|
puts x.join("")
end
end
def initBoard
@board = [
['k','q','b','n','r'],
['p','p','p','p','p'],
['.','.','.','.','.'],
['.','.','.','.','.'],
['P','P','P','P','P'],
['R','N','B','Q','K'],
]
end
def gameOver?
if not @board.flatten.include?('k')
puts "white wins"
return true
elsif not @board.flatten.include?('K')
puts "black wins"
return true
elsif @movesCounter == MAXTURNS
puts "draw"
return true
else
return false
end
end
def randomMove
color = $sideOnMove
# ask each piece for valid moves
allValidMoves = []
getPiecesForSide(color).each do |piece|
allValidMoves << moveList(piece.x, piece.y)
end
allValidMoves.flatten!
random = rand(allValidMoves.size)
randomMove = allValidMoves[random]
move(randomMove)
self.nextTurn
return randomMove
end
def evalMove
allValidMoves = []
getPiecesForSide($sideOnMove).each do |piece|
allValidMoves << moveList(piece.x, piece.y).flatten
end
m = {}
allValidMoves.flatten.each do |move|
piece = @board[move.toSquare.y][move.toSquare.x]
if piece.upcase == 'K'
self.move(move)
return move
end
copyOfBoard = Marshal.load( Marshal.dump(@board) )
score = scoreGen(move, copyOfBoard)
m[move] = score
end
if $sideOMove == 'W'
bestMove = m.max_by { |move,val| val }.first
else
bestMove = m.min_by { |move,val| val }.first
end
move(bestMove)
self.nextTurn
return bestMove.to_s
end
def scoreGen(move, copyOfBoard)
score = 0
updateBoard(move, copyOfBoard)
getPiecesForSide('W').each do |piece|
piece = copyOfBoard[piece.y][piece.x]
score += getPieceValue(piece)
end
getPiecesForSide('B').each do |piece|
piece = copyOfBoard[piece.y][piece.x]
score += getPieceValue(piece)
end
return score
end
# Not part of homework just strategy for comparision
def chooseBestKill
allValidMoves = []
getPiecesForSide($sideOnMove).each do |piece|
allValidMoves << moveList(piece.x, piece.y).flatten
end
m = {}
allValidMoves.flatten.each do |move|
piece = @board[move.toSquare.y][move.toSquare.x]
if piece.upcase == 'K'
self.move(move)
return move
end
m[move] = getPieceValue(piece.upcase)
end
bestMove = m.max_by { |move,val| val }.first
move(bestMove)
self.nextTurn
return bestMove
end
def getPieceValue(p)
case p.upcase
when 'P'
score = 100
when 'B', 'N'
score = 300
when 'R'
score = 500
when 'Q'
score = 900
when 'K', '.'
score = 0
end
return p.upcase == p ? score : - score
end
def getPiecesForSide(color)
pieces = []
for y in 0..5 do
for x in 0..4 do
piece = Square.new(x,y)
if isPiece?(piece) and color == getColor(x,y)
pieces << piece
end
end
end
return pieces
end
def humanTurn
puts "Enter Move:"
input = gets.chomp
begin
humanMove(input)
rescue
humanTurn
end
end
def humanMove(hmove)
m = hmove.split("-")
fs = getChessSquare(m.first)
ts = getChessSquare(m.last)
hmove = Move.new(fs, ts)
moves = []
moveList(fs.x, fs.y).flatten.each do |m|
moves << m.to_s
end
if moves.include?(hmove.to_s)
move(hmove)
else
puts "Invalid chess move #{hmove.to_s} please try again"
throw "Invalid Human move"
end
self.nextTurn
end
def move(m)
if isPiece?(m.fromSquare) and (getColor(m.fromSquare.x, m.fromSquare.y) == $sideOnMove)
puts "Move is #{m.to_s}"
moves = []
moveList(m.fromSquare.x, m.fromSquare.y).flatten.each do |m|
moves << m.to_s
end
if not moves.include?(m.to_s)
throw "Error: Not a valid move x, y is fromSquare: #{m.fromSquare.x} #{m.fromSquare.y}, toSquare is #{m.toSquare.x} #{m.toSquare.y}"
else
updateBoard(m, @board)
end
$moveCounter += 1
else
throw "move error"
end
end
def updateBoard(m, board)
# move piece to toSquare
board[m.toSquare.y][m.toSquare.x] = board[m.fromSquare.y][m.fromSquare.x]
# set from square to empty
board[m.fromSquare.y][m.fromSquare.x] = '.'
# if piece is pawn and reaches the end of the board, then its becomes a queen
if board[m.toSquare.y][m.toSquare.x].upcase == 'P'
if m.toSquare.y == 5 and getColor(m.toSquare.x, m.toSquare.y) == 'B'
board[m.toSquare.y][m.toSquare.x] = 'q'
end
if m.toSquare.y == 0 and getColor(m.toSquare.x, m.toSquare.y) == 'W'
board[m.toSquare.y][m.toSquare.x] = 'Q'
end
end
end
def moveScan(x0, y0, dx, dy, capture, stop_short)
x = x0
y = y0
c = getColor(x0,y0)
moves = []
loop do
x += dx
y += dy
break if not inBounds?(x,y)
if isOccupied?(@board[y][x])
break if getColor(x,y) == c
break if not capture
stop_short = true
end
validMove = Move.new(Square.new(x0,y0), Square.new(x,y))
moves << validMove
break if stop_short == true
end
return moves
end
def getColor(x, y)
if @board[y][x].to_s.upcase == @board[y][x]
return "W"
else
return "B"
end
end
def isPiece?(square)
return ['Q','K','R','N','B','P'].include?(@board[square.y][square.x].upcase)
end
def isCapture?(fs, ts)
if @board[ts.y][ts.x] == '.'
return false
else
return @board[fs.y][fs.x].to_s != @board[ts.y][ts.x].to_s
end
end
def isOccupied?(value)
return value != '.'
end
def inBounds?(x,y)
return (x < 5 and x > -1 and y < 6 and y > -1)
end
def moveList(x,y)
# To list the moves of a piece at x, y:
p = @board[y][x]
moves = []
case p
when 'Q', 'K', 'q', 'k'
(-1..1).each do |dx|
(-1..1).each do |dy|
if dx == 0 and dy == 0
next
end
stop_short = (p == 'K' or p == 'k')
moves << moveScan(x, y, dx, dy, capture=true, stop_short)
end
end
return moves
when 'R', 'r'
dx = 1
dy = 0
stop_short = false
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'B', 'b'
dx = 1
dy = 0
stop_short = true
capture = false
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
dx = 1
dy = 1
stop_short = false
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'N', 'n'
dx = 1
dy = 2
stop_short = true
capture = true
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
dx = -1
dy = 2
for i in 1..4
moves << moveScan(x, y, dx, dy, capture, stop_short)
dx,dy = -dy,dx
end
return moves
when 'P', 'p'
# inverse the direction of black pawns
dir = (getColor(x,y) == 'B') ? 1 : -1
stop_short = true
# West
m = moveScan(x, y, -1, dir, true, stop_short)
# check if I can capture diag (NW or SW)
if m.size == 1 and isCapture?(m[0].fromSquare, m[0].toSquare)
moves << m
end
# East
m = moveScan(x, y, 1, dir, true, stop_short)
# check if I can capture diag ( NE or SE)
if m.size == 1 and isCapture?(m[0].fromSquare, m[0].toSquare)
moves << m
end
moves << moveScan(x, y, 0, dir, false, stop_short)
return moves
else
throw "Error: moveList called on invalid piece '#{p}' with coordinates x: #{x} y: #{y}"
end
end
def getChessSquare(square)
row = ['a','b','c','d','e']
x = row.index(square[0])
y = [ 6, 5, 4, 3, 2, 1][square[1].to_i]
return Square.new(x,y)
end
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{subdomain_routes}
s.version = "0.2.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthew Hollingworth"]
s.date = %q{2009-08-14}
s.description = %q{SubdomainRoutes add subdomain conditions to the Rails routing system. Routes may be restricted to one or many specified subdomains. An URL will be recognised only if the host subdomain matches the subdomain specified in the route. Route generation is also enhanced, so that the subdomain of a generated URL (or path) will be changed if the requested route has a different subdomain to that of the current request. Model-based subdomain routes can also be defined.}
s.email = %q{mdholling@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
"LICENSE",
"README.textile",
"Rakefile",
"VERSION.yml",
"lib/subdomain_routes.rb",
"lib/subdomain_routes/assertions.rb",
"lib/subdomain_routes/config.rb",
"lib/subdomain_routes/mapper.rb",
"lib/subdomain_routes/request.rb",
"lib/subdomain_routes/resources.rb",
"lib/subdomain_routes/routes.rb",
"lib/subdomain_routes/split_host.rb",
"lib/subdomain_routes/url_writer.rb",
"lib/subdomain_routes/validations.rb",
"rails/init.rb",
"spec/assertions_spec.rb",
"spec/extraction_spec.rb",
"spec/mapping_spec.rb",
"spec/recognition_spec.rb",
"spec/resources_spec.rb",
"spec/routes_spec.rb",
"spec/spec_helper.rb",
"spec/test_unit_matcher.rb",
"spec/url_writing_spec.rb",
"spec/validations_spec.rb"
]
s.homepage = %q{http://github.com/mholling/subdomain_routes}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{A Rails library for incorporating subdomains into route generation and recognition.}
s.test_files = [
"spec/assertions_spec.rb",
"spec/extraction_spec.rb",
"spec/mapping_spec.rb",
"spec/recognition_spec.rb",
"spec/resources_spec.rb",
"spec/routes_spec.rb",
"spec/spec_helper.rb",
"spec/test_unit_matcher.rb",
"spec/url_writing_spec.rb",
"spec/validations_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<actionpack>, [">= 2.2.1"])
else
s.add_dependency(%q<actionpack>, [">= 2.2.1"])
end
else
s.add_dependency(%q<actionpack>, [">= 2.2.1"])
end
end
Regenerated gemspec for version 0.3.0
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{subdomain_routes}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthew Hollingworth"]
s.date = %q{2009-08-25}
s.description = %q{SubdomainRoutes add subdomain conditions to the Rails routing system. Routes may be restricted to one or many specified subdomains. An URL will be recognised only if the host subdomain matches the subdomain specified in the route. Route generation is also enhanced, so that the subdomain of a generated URL (or path) will be changed if the requested route has a different subdomain to that of the current request. Model-based subdomain routes can also be defined.}
s.email = %q{mdholling@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
"LICENSE",
"README.textile",
"Rakefile",
"VERSION.yml",
"lib/subdomain_routes.rb",
"lib/subdomain_routes/assertions.rb",
"lib/subdomain_routes/config.rb",
"lib/subdomain_routes/mapper.rb",
"lib/subdomain_routes/request.rb",
"lib/subdomain_routes/resources.rb",
"lib/subdomain_routes/routes.rb",
"lib/subdomain_routes/split_host.rb",
"lib/subdomain_routes/url_writer.rb",
"lib/subdomain_routes/validations.rb",
"rails/init.rb",
"spec/assertions_spec.rb",
"spec/extraction_spec.rb",
"spec/mapping_spec.rb",
"spec/recognition_spec.rb",
"spec/resources_spec.rb",
"spec/routes_spec.rb",
"spec/spec_helper.rb",
"spec/test_unit_matcher.rb",
"spec/url_writing_spec.rb",
"spec/validations_spec.rb"
]
s.homepage = %q{http://github.com/mholling/subdomain_routes}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.1}
s.summary = %q{A Rails library for incorporating subdomains into route generation and recognition.}
s.test_files = [
"spec/assertions_spec.rb",
"spec/extraction_spec.rb",
"spec/mapping_spec.rb",
"spec/recognition_spec.rb",
"spec/resources_spec.rb",
"spec/routes_spec.rb",
"spec/spec_helper.rb",
"spec/test_unit_matcher.rb",
"spec/url_writing_spec.rb",
"spec/validations_spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<actionpack>, [">= 2.2.1"])
else
s.add_dependency(%q<actionpack>, [">= 2.2.1"])
end
else
s.add_dependency(%q<actionpack>, [">= 2.2.1"])
end
end
|
# -*- coding: utf-8 -*-
require File.expand_path('utils')
# ブロックを、後で時間があいたときに順次実行する。
# 名前deferのほうがよかったんじゃね
class Delayer
CRITICAL = 0
FASTER = 0
NORMAL = 1
LATER = 2
LAST = 2
extend MonitorMixin
@@routines = [[],[],[]]
@frozen = false
attr_reader :backtrace, :status
# あとで実行するブロックを登録する。
def initialize(prio = NORMAL, *args, &block)
@routine = block
@args = args
@backtrace = caller
@status = :wait
regist(prio)
end
# このDelayerを取り消す。処理が呼ばれる前に呼べば、処理をキャンセルできる
def reject
@status = nil
end
# このブロックを実行する。内部で呼ぶためにあるので、明示的に呼ばないこと
def run
return if @status != :wait
@status = :run
now = caller.size
begin
@routine.call(*@args)
rescue Exception => e
$@ = e.backtrace[0, now] + @backtrace
raise e
end
@routine = nil
@status = nil
end
# 登録されたDelayerオブジェクトをいくつか実行する。
# 0.1秒以内に実行が終わらなければ、残りは保留してとりあえず処理を戻す。
def self.run
return if @frozen
debugging_wait
begin
@busy = true
st = Process.times.utime
3.times{ |cnt|
procs = []
if not @@routines[cnt].empty? then
procs = @@routines[cnt].clone
procs.each{ |routine|
@@routines[cnt].delete(routine)
routine.run
return if ((Process.times.utime - st) > 0.1) } end }
ensure
@busy = false end end
# Delayerのタスクを消化中ならtrueを返す
def self.busy?
@busy end
# 仕事がなければtrue
def self.empty?
@@routines.all?{|r| r.empty? } end
# 残っているDelayerの数を返す
def self.size
@@routines.map{|r| r.size }.sum end
# このメソッドが呼ばれたら、以後 Delayer.run が呼ばれても、Delayerオブジェクト
# を実行せずにすぐにreturnするようになる。
def self.freeze
@frozen = true
end
# freezeのはんたい
def self.melt
@frozen = false
end
private
def regist(prio)
self.class.synchronize{
@@routines[prio] << self
}
end
end
Delayerで発生したバックトレースのスタックがおかしかった refs #416
git-svn-id: 2d5b2631527c13f1be6d28af81e06d7b0891ab74@651 03aab468-d3d2-4883-8b12-f661bbf03fa8
# -*- coding: utf-8 -*-
require File.expand_path('utils')
# ブロックを、後で時間があいたときに順次実行する。
# 名前deferのほうがよかったんじゃね
class Delayer
CRITICAL = 0
FASTER = 0
NORMAL = 1
LATER = 2
LAST = 2
extend MonitorMixin
@@routines = [[],[],[]]
@frozen = false
attr_reader :backtrace, :status
# あとで実行するブロックを登録する。
def initialize(prio = NORMAL, *args, &block)
@routine = block
@args = args
@backtrace = caller
@status = :wait
regist(prio)
end
# このDelayerを取り消す。処理が呼ばれる前に呼べば、処理をキャンセルできる
def reject
@status = nil
end
# このブロックを実行する。内部で呼ぶためにあるので、明示的に呼ばないこと
def run
return if @status != :wait
@status = :run
begin
@routine.call(*@args)
rescue Exception => e
now = caller.size + 1 # @routine.callのぶんスタックが1つ多い
$@ = e.backtrace[0, e.backtrace.size - now] + @backtrace
raise e
end
@routine = nil
@status = nil
end
# 登録されたDelayerオブジェクトをいくつか実行する。
# 0.1秒以内に実行が終わらなければ、残りは保留してとりあえず処理を戻す。
def self.run
return if @frozen
debugging_wait
begin
@busy = true
st = Process.times.utime
3.times{ |cnt|
procs = []
if not @@routines[cnt].empty? then
procs = @@routines[cnt].clone
procs.each{ |routine|
@@routines[cnt].delete(routine)
routine.run
return if ((Process.times.utime - st) > 0.1) } end }
ensure
@busy = false end end
# Delayerのタスクを消化中ならtrueを返す
def self.busy?
@busy end
# 仕事がなければtrue
def self.empty?
@@routines.all?{|r| r.empty? } end
# 残っているDelayerの数を返す
def self.size
@@routines.map{|r| r.size }.sum end
# このメソッドが呼ばれたら、以後 Delayer.run が呼ばれても、Delayerオブジェクト
# を実行せずにすぐにreturnするようになる。
def self.freeze
@frozen = true
end
# freezeのはんたい
def self.melt
@frozen = false
end
private
def regist(prio)
self.class.synchronize{
@@routines[prio] << self
}
end
end
|
require 'etc'
require 'fileutils'
module SvnTestUtil
module Windows
module Svnserve
SERVICE_NAME = 'test-svn-server'
class << self
def escape_value(value)
escaped_value = value.gsub(/"/, '\\"') # "
"\"#{escaped_value}\""
end
end
def service_control(command, args={})
args = args.collect do |key, value|
"#{key}= #{Svnserve.escape_value(value)}"
end.join(" ")
if `sc #{command} #{SERVICE_NAME} #{args}`.match(/FAILED/)
raise "Failed to #{command} #{SERVICE_NAME}: #{args}"
end
end
def grant_everyone_full_access(dir)
dir = dir.tr(File::SEPARATOR, File::ALT_SEPARATOR)
`cacls #{Svnserve.escape_value(dir)} /T /E /P Everyone:F`
end
def service_exists?
begin
service_control("query")
true
rescue
false
end
end
def setup_svnserve
@svnserve_port = @svnserve_ports.first
@repos_svnserve_uri = "svn://#{@svnserve_host}:#{@svnserve_port}"
grant_everyone_full_access(@full_repos_path)
unless service_exists?
svnserve_dir = File.expand_path(File.join("test", "svnserve"))
FileUtils.mkdir_p(svnserve_dir)
at_exit do
service_control('delete') if service_exists?
FileUtils.rm_rf(svnserve_dir)
end
targets = %w(svnserve.exe libsvn_subr-1.dll libsvn_repos-1.dll
libsvn_fs-1.dll libsvn_delta-1.dll
libaprutil.dll libapr.dll sqlite3.dll)
ENV["PATH"].split(";").each do |path|
found_targets = []
targets.each do |target|
target_path = "#{path}\\#{target}"
if File.exists?(target_path)
found_targets << target
FileUtils.cp(target_path, svnserve_dir)
end
end
targets -= found_targets
break if targets.empty?
end
unless targets.empty?
raise "can't find libraries to work svnserve: #{targets.join(' ')}"
end
grant_everyone_full_access(svnserve_dir)
svnserve_path = File.join(svnserve_dir, "svnserve.exe")
svnserve_path = svnserve_path.tr(File::SEPARATOR,
File::ALT_SEPARATOR)
svnserve_path = Svnserve.escape_value(svnserve_path)
root = @full_repos_path.tr(File::SEPARATOR, File::ALT_SEPARATOR)
args = ["--service", "--root", Svnserve.escape_value(root),
"--listen-host", @svnserve_host,
"--listen-port", @svnserve_port]
user = ENV["USERNAME"] || Etc.getlogin
service_control('create',
[["binPath", "#{svnserve_path} #{args.join(' ')}"],
["DisplayName", SERVICE_NAME],
["type", "own"]])
end
service_control('start')
end
def teardown_svnserve
service_control('stop') if service_exists?
end
def add_pre_revprop_change_hook
File.open("#{@repos.pre_revprop_change_hook}.cmd", "w") do |hook|
hook.print <<-HOOK
set REPOS=%1
set REV=%2
set USER=%3
set PROPNAME=%4
if "%PROPNAME%" == "#{Svn::Core::PROP_REVISION_LOG}" if "%USER%" == "#{@author}" exit 0
exit 1
HOOK
end
end
end
module SetupEnvironment
def setup_test_environment(top_dir, base_dir, ext_dir)
build_type = "Release"
FileUtils.mkdir_p(ext_dir)
relative_base_dir =
base_dir.sub(/^#{Regexp.escape(top_dir + File::SEPARATOR)}/, '')
build_base_dir = File.join(top_dir, build_type, relative_base_dir)
dll_dir = File.expand_path(build_base_dir)
subversion_dir = File.join(build_base_dir, "..", "..", "..")
subversion_dir = File.expand_path(subversion_dir)
util_name = "util"
build_conf = File.join(top_dir, "build.conf")
File.open(File.join(ext_dir, "#{util_name}.rb" ), 'w') do |util|
setup_dll_wrapper_util(dll_dir, util)
add_apr_dll_path_to_dll_wrapper_util(top_dir, build_type, util)
add_svn_dll_path_to_dll_wrapper_util(build_conf, subversion_dir, util)
setup_dll_wrappers(build_conf, ext_dir, dll_dir, util_name) do |lib|
svn_lib_dir = File.join(subversion_dir, "libsvn_#{lib}")
util.puts("add_path.call(#{svn_lib_dir.dump})")
end
svnserve_dir = File.join(subversion_dir, "svnserve")
util.puts("add_path.call(#{svnserve_dir.dump})")
end
end
private
def setup_dll_wrapper_util(dll_dir, util)
libsvn_swig_ruby_dll_dir = File.join(dll_dir, "libsvn_swig_ruby")
util.puts(<<-EOC)
paths = ENV["PATH"].split(';')
add_path = Proc.new do |path|
win_path = path.tr(File::SEPARATOR, File::ALT_SEPARATOR)
unless paths.include?(win_path)
ENV["PATH"] = "\#{win_path};\#{ENV['PATH']}"
end
end
add_path.call(#{dll_dir.dump})
add_path.call(#{libsvn_swig_ruby_dll_dir.dump})
EOC
end
def add_apr_dll_path_to_dll_wrapper_util(top_dir, build_type, util)
lines = []
gen_make_opts = File.join(top_dir, "gen-make.opts")
lines = File.read(gen_make_opts).to_a if File.exists?(gen_make_opts)
config = {}
lines.each do |line|
name, value = line.split(/\s*=\s*/, 2)
config[name] = value if value
end
["apr", "apr-util", "apr-iconv"].each do |lib|
lib_dir = config["--with-#{lib}"] || lib
dll_dir = File.expand_path(File.join(top_dir, lib_dir, build_type))
util.puts("add_path.call(#{dll_dir.dump})")
end
end
def add_svn_dll_path_to_dll_wrapper_util(build_conf, subversion_dir, util)
File.open(build_conf) do |f|
f.each do |line|
if /^\[(libsvn_.+)\]\s*$/ =~ line
lib_name = $1
lib_dir = File.join(subversion_dir, lib_name)
util.puts("add_path.call(#{lib_dir.dump})")
end
end
end
end
def setup_dll_wrappers(build_conf, ext_dir, dll_dir, util_name)
File.open(build_conf) do |f|
f.each do |line|
if /^\[swig_(.+)\]\s*$/ =~ line
lib_name = $1
File.open(File.join(ext_dir, "#{lib_name}.rb" ), 'w') do |rb|
rb.puts(<<-EOC)
require File.join(File.dirname(__FILE__), #{util_name.dump})
require File.join(#{dll_dir.dump}, File.basename(__FILE__, '.rb')) + '.so'
EOC
end
yield(lib_name)
end
end
end
end
end
end
end
SWIG: Assure config paths don't include any "\n" characters.
* subversion/bindings/swig/ruby/test/windows_util.rb
(SvnTestUtil::Windows::SetupEnvironment#add_apr_dll_path_to_dll_wrapper_util):
Trim any trailing newlines from configuration read from gen-make.opts.
Patch by: Joe Swatosh <joe.swatosh@gmail.com>
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@864133 13f79535-47bb-0310-9956-ffa450edef68
require 'etc'
require 'fileutils'
module SvnTestUtil
module Windows
module Svnserve
SERVICE_NAME = 'test-svn-server'
class << self
def escape_value(value)
escaped_value = value.gsub(/"/, '\\"') # "
"\"#{escaped_value}\""
end
end
def service_control(command, args={})
args = args.collect do |key, value|
"#{key}= #{Svnserve.escape_value(value)}"
end.join(" ")
if `sc #{command} #{SERVICE_NAME} #{args}`.match(/FAILED/)
raise "Failed to #{command} #{SERVICE_NAME}: #{args}"
end
end
def grant_everyone_full_access(dir)
dir = dir.tr(File::SEPARATOR, File::ALT_SEPARATOR)
`cacls #{Svnserve.escape_value(dir)} /T /E /P Everyone:F`
end
def service_exists?
begin
service_control("query")
true
rescue
false
end
end
def setup_svnserve
@svnserve_port = @svnserve_ports.first
@repos_svnserve_uri = "svn://#{@svnserve_host}:#{@svnserve_port}"
grant_everyone_full_access(@full_repos_path)
unless service_exists?
svnserve_dir = File.expand_path(File.join("test", "svnserve"))
FileUtils.mkdir_p(svnserve_dir)
at_exit do
service_control('delete') if service_exists?
FileUtils.rm_rf(svnserve_dir)
end
targets = %w(svnserve.exe libsvn_subr-1.dll libsvn_repos-1.dll
libsvn_fs-1.dll libsvn_delta-1.dll
libaprutil.dll libapr.dll sqlite3.dll)
ENV["PATH"].split(";").each do |path|
found_targets = []
targets.each do |target|
target_path = "#{path}\\#{target}"
if File.exists?(target_path)
found_targets << target
FileUtils.cp(target_path, svnserve_dir)
end
end
targets -= found_targets
break if targets.empty?
end
unless targets.empty?
raise "can't find libraries to work svnserve: #{targets.join(' ')}"
end
grant_everyone_full_access(svnserve_dir)
svnserve_path = File.join(svnserve_dir, "svnserve.exe")
svnserve_path = svnserve_path.tr(File::SEPARATOR,
File::ALT_SEPARATOR)
svnserve_path = Svnserve.escape_value(svnserve_path)
root = @full_repos_path.tr(File::SEPARATOR, File::ALT_SEPARATOR)
args = ["--service", "--root", Svnserve.escape_value(root),
"--listen-host", @svnserve_host,
"--listen-port", @svnserve_port]
user = ENV["USERNAME"] || Etc.getlogin
service_control('create',
[["binPath", "#{svnserve_path} #{args.join(' ')}"],
["DisplayName", SERVICE_NAME],
["type", "own"]])
end
service_control('start')
end
def teardown_svnserve
service_control('stop') if service_exists?
end
def add_pre_revprop_change_hook
File.open("#{@repos.pre_revprop_change_hook}.cmd", "w") do |hook|
hook.print <<-HOOK
set REPOS=%1
set REV=%2
set USER=%3
set PROPNAME=%4
if "%PROPNAME%" == "#{Svn::Core::PROP_REVISION_LOG}" if "%USER%" == "#{@author}" exit 0
exit 1
HOOK
end
end
end
module SetupEnvironment
def setup_test_environment(top_dir, base_dir, ext_dir)
build_type = "Release"
FileUtils.mkdir_p(ext_dir)
relative_base_dir =
base_dir.sub(/^#{Regexp.escape(top_dir + File::SEPARATOR)}/, '')
build_base_dir = File.join(top_dir, build_type, relative_base_dir)
dll_dir = File.expand_path(build_base_dir)
subversion_dir = File.join(build_base_dir, "..", "..", "..")
subversion_dir = File.expand_path(subversion_dir)
util_name = "util"
build_conf = File.join(top_dir, "build.conf")
File.open(File.join(ext_dir, "#{util_name}.rb" ), 'w') do |util|
setup_dll_wrapper_util(dll_dir, util)
add_apr_dll_path_to_dll_wrapper_util(top_dir, build_type, util)
add_svn_dll_path_to_dll_wrapper_util(build_conf, subversion_dir, util)
setup_dll_wrappers(build_conf, ext_dir, dll_dir, util_name) do |lib|
svn_lib_dir = File.join(subversion_dir, "libsvn_#{lib}")
util.puts("add_path.call(#{svn_lib_dir.dump})")
end
svnserve_dir = File.join(subversion_dir, "svnserve")
util.puts("add_path.call(#{svnserve_dir.dump})")
end
end
private
def setup_dll_wrapper_util(dll_dir, util)
libsvn_swig_ruby_dll_dir = File.join(dll_dir, "libsvn_swig_ruby")
util.puts(<<-EOC)
paths = ENV["PATH"].split(';')
add_path = Proc.new do |path|
win_path = path.tr(File::SEPARATOR, File::ALT_SEPARATOR)
unless paths.include?(win_path)
ENV["PATH"] = "\#{win_path};\#{ENV['PATH']}"
end
end
add_path.call(#{dll_dir.dump})
add_path.call(#{libsvn_swig_ruby_dll_dir.dump})
EOC
end
def add_apr_dll_path_to_dll_wrapper_util(top_dir, build_type, util)
lines = []
gen_make_opts = File.join(top_dir, "gen-make.opts")
lines = File.read(gen_make_opts).to_a if File.exists?(gen_make_opts)
config = {}
lines.each do |line|
name, value = line.chomp.split(/\s*=\s*/, 2)
config[name] = value if value
end
["apr", "apr-util", "apr-iconv"].each do |lib|
lib_dir = config["--with-#{lib}"] || lib
dll_dir = File.expand_path(File.join(top_dir, lib_dir, build_type))
util.puts("add_path.call(#{dll_dir.dump})")
end
end
def add_svn_dll_path_to_dll_wrapper_util(build_conf, subversion_dir, util)
File.open(build_conf) do |f|
f.each do |line|
if /^\[(libsvn_.+)\]\s*$/ =~ line
lib_name = $1
lib_dir = File.join(subversion_dir, lib_name)
util.puts("add_path.call(#{lib_dir.dump})")
end
end
end
end
def setup_dll_wrappers(build_conf, ext_dir, dll_dir, util_name)
File.open(build_conf) do |f|
f.each do |line|
if /^\[swig_(.+)\]\s*$/ =~ line
lib_name = $1
File.open(File.join(ext_dir, "#{lib_name}.rb" ), 'w') do |rb|
rb.puts(<<-EOC)
require File.join(File.dirname(__FILE__), #{util_name.dump})
require File.join(#{dll_dir.dump}, File.basename(__FILE__, '.rb')) + '.so'
EOC
end
yield(lib_name)
end
end
end
end
end
end
end
|
class Square
attr_reader :x, :y
def initialize(x, y)
@x = x
@y = y
end
def ==(other)
x == other.x and y == other.y
end
def out_of_the_board
[x, y].any? { |coordinate| coordinate < 0 or coordinate > 7 }
end
def to_a
[x, y]
end
def delta_x(other)
(x - other.x).abs
end
def delta_y(other)
(y - other.y).abs
end
end
class Piece
WHITE = "white".freeze
BLACK = "black".freeze
attr_reader :color
def initialize(color, board)
@color = color
@board = board
end
def obstructions?(dx, dy, steps, position)
(1...steps).each do |step|
x = position.x + step * dx
y = position.y + step * dy
return true unless @board.empty?(Square.new(x, y))
end
false
end
def move(from, to)
@board.move(from, to) if valid_move?(from, to)
end
def any_moves?(from, in_directions, max_steps=8)
in_directions.each do |dx, dy|
to, steps = Square.new(from.x, from.y), 0
while true
to, steps = Square.new(to.x + dx, to.y + dy), steps.succ
break if to.out_of_the_board
if @board.empty?(to) or @board.color_of_piece_on(to) != color
return true if @board.king_remains_safe_after_move?(from, to)
end
break if @board.color_of_piece_on(to) == color or steps == max_steps
end
end
false
end
end
class Queen < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♕' : '♛'
end
def valid_move?(from, to)
Rook.new(color, @board).valid_move?(from, to) or Bishop.new(color, @board).valid_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
super(from, in_directions)
end
end
class Bishop < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♗' : '♝'
end
def valid_move?(from, to)
return false if from.delta_x(to) != from.delta_y(to)
dx = to.x <=> from.x
dy = to.y <=> from.y
steps = from.delta_x(to)
return false if obstructions?(dx, dy, steps, from)
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 1], [-1, 1], [1, -1], [-1, -1]]
super(from, in_directions)
end
end
class Knight < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♘' : '♞'
end
def valid_move?(from, to)
horizontal = from.delta_x(to) == 2 and from.delta_y(to) == 1
vertical = from.delta_x(to) == 1 and from.delta_y(to) == 2
return false unless vertical or horizontal
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
positions = [[from.x + 1, from.y + 2], [from.x + 2, from.y + 1],
[from.x + 2, from.y - 1], [from.x + 1, from.y - 2],
[from.x - 1, from.y + 2], [from.x - 2, from.y + 1],
[from.x - 1, from.y - 2], [from.x - 2, from.y - 1]]
positions.each do |position|
next unless position.all? { |coordinate| coordinate.between?(0, 7) }
return true if valid_move?(from, Square.new(*position))
end
end
end
class Pawn < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♙' : '♟'
end
def valid_move?(from, to)
return false unless valid_direction?(from, to)
if from.delta_y(to) == 1 and from.delta_x(to) <= 1
return false if from.x == to.x and not @board.empty?(to)
return false if from.delta_x(to) == 1 and @board.empty?(to)
elsif from.delta_y(to) == 2
return false if moved or from.x != to.x or obstructions?(0, to.y <=> from.y , 3, from)
else
return false
end
@board.king_remains_safe_after_move?(from, to)
end
def valid_direction?(from, to)
color == WHITE ? to.y < from.y : to.y > from.y
end
def empty_or_opponent_on(position)
@board.empty?(position) or @board.color_of_piece_on(position) != color
end
def any_moves?(from)
positions = [[from.x + 1, from.y - 1], [from.x, from.y - 1],
[from.x - 1, from.y - 1], [from.x, from.y + 1],
[from.x + 1, from.y + 1], [from.x - 1, from.y + 1]]
positions.each do |position|
next unless position.all? { |coordinate| coordinate.between?(0, 7) }
position = Square.new(*position)
return true if empty_or_opponent_on(position) and valid_move?(from, position)
end
end
def move(from, to)
if super
@board.pawn_promotion_position = to if to.y == 0 or to.y == 7
@moved = true
end
end
end
class King < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♔' : '♚'
end
def castle?(king_position, rook_position)
return false if moved or not @board.piece_on(rook_position).is_a? Rook
return false if @board.piece_on(rook_position).moved
square_between_king_and_rook = Square.new(king_position.x, king_position.y)
dx, dy, steps = rook_position.x > king_position.x ? [1, 0, 3] : [-1, 0, 4]
return false if obstructions?(dx, dy, steps, king_position)
3.times do
return false unless king_safe?(square_between_king_and_rook)
square_between_king_and_rook.x += dx
end
true
end
def valid_move?(from, to)
return false if from.delta_y(to) > 1
if from.delta_x(to) > 1
if to.x == from.x + 2 and from.y == to.y
rook_position = Square.new(7, from.y)
return false unless castle?(from, rook_position)
elsif to.x == from.x - 2 and from.y == to.y
rook_position = Square.new(0, from.y)
return false unless castle?(from, rook_position)
else
return false
end
end
@board.king_remains_safe_after_move?(from, to)
end
def safe_from?(position)
not (attacked_by_a_pawn?(position) or attacked_by_a_knight?(position) or attacked_by_other?(position))
end
def attacked_by_a_pawn?(from)
if color == WHITE
positions = [[from.x + 1, from.y - 1], [from.x - 1, from.y - 1]]
else
positions = [[from.x + 1, from.y + 1], [from.x + 1, from.y + 1]]
end
positions.any? do |position|
@board.piece_on(position).is_a? Pawn and @board.piece_on(position).color != color
end
end
def attacked_by_a_knight?(from)
positions = [[from.x + 2, from.y + 1], [from.x + 2, from.y - 1],
[from.x - 2, from.y + 1], [from.x - 2, from.y - 1],
[from.x + 1, from.y + 2], [from.x + 1, from.y - 2],
[from.x - 1, from.y + 2], [from.x - 1, from.y - 2]]
positions.any? do |position|
@board.piece_on(position).is_a? Knight and @board.piece_on(position).color != color
end
end
def attacked_by_other?(position)
directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
directions.each do |dx, dy|
to, steps = Square.new(position.x, position.y), 0
while true
to, steps = Square.new(to.x + dx, to.y + dy), steps.succ
break if to.out_of_the_board
next if @board.empty?(to)
break if @board.color_of_piece_on(to) == color
case @board.piece_on(to)
when King then return true if steps == 1
when Queen then return true
when Rook then return true if dx.abs != dy.abs
when Bishop then return true if dx.abs == dy.abs
end
break
end
end
false
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
return true if super(from, in_directions)
right_rook_position = Square.new(from.x + 3, from.y)
left_rook_position = Square.new(from.x - 4, from.y)
castle?(from, right_rook_position) or castle?(from, left_rook_position)
end
def move(from, to)
if valid_move?(from, to)
if to.x == from.x + 2
@board.move(Square.new(7, to.y), Square.new(5, to.y))
elsif to.x == from.x - 2
@board.move(Square.new(0, to.y), Square.new(3, to.y))
end
@board.move(from, to)
@moved = true
end
end
end
class Rook < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♖' : '♜'
end
def valid_move?(from, to)
return false if from.x != to.x and from.y != to.y
dx = to.x <=> from.x
dy = to.y <=> from.y
steps = [from.delta_x(to), from.delta_y(to)].max
return false if obstructions?(dx, dy, steps, from)
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1]]
super(from, in_directions)
end
def move(from, to)
@moved = true if super
end
end
class ChessBoard
attr_reader :game_status, :turn
attr_writer :pawn_promotion_position
WHITE = "white".freeze
BLACK = "black".freeze
GAME_IN_PROGRESS = "Game in progress.".freeze
BLACK_WIN = "Black win!".freeze
WHITE_WIN = "White win!".freeze
STALEMATE = "Stalemate!".freeze
def initialize
@board = {
[0, 0] => Rook.new(BLACK, self), [1, 0] => Knight.new(BLACK, self),
[2, 0] => Bishop.new(BLACK, self), [3, 0] => Queen.new(BLACK, self),
[4, 0] => King.new(BLACK, self), [5, 0] => Bishop.new(BLACK, self),
[6, 0] => Knight.new(BLACK, self), [7, 0] => Rook.new(BLACK, self),
[0, 7] => Rook.new(WHITE, self), [1, 7] => Knight.new(WHITE, self),
[2, 7] => Bishop.new(WHITE, self), [3, 7] => Queen.new(WHITE, self),
[4, 7] => King.new(WHITE, self), [5, 7] => Bishop.new(WHITE, self),
[6, 7] => Knight.new(WHITE, self), [7, 7] => Rook.new(WHITE, self),
}
0.upto(7).each do |column|
2.upto(5).each do |row|
@board[[column, 1]] = Pawn.new(BLACK, self)
@board[[column, 6]] = Pawn.new(WHITE, self)
@board[[column, row]] = nil
end
end
@turn = WHITE
@game_status = GAME_IN_PROGRESS
@pawn_promotion_position = nil
end
def move(from, to)
@board[to.to_a] = @board[from.to_a]
@board[from.to_a] = nil
true
end
def king_remains_safe_after_move?(from, to)
board = @board.dup
move(from, to)
x, y, king = king_of(turn)
king_position = Square.new(x, y)
result = king.safe_from?(king_position)
@board = board
result
end
def out_of_the_board?(from, to)
from.out_of_the_board or to.out_of_the_board
end
def color_of_piece_on(position)
@board[position.to_a].color
end
def king_of(color)
@board.select { |_, piece| piece.is_a? King and piece.color == color }.to_a.flatten
end
def empty?(position)
@board[position.to_a].nil?
end
def piece_on(position)
@board[position.to_a]
end
def pieces_of_the_same_color?(from, to)
not empty?(to) and color_of_piece_on(from) == color_of_piece_on(to)
end
def any_valid_moves_for_player_on_turn?
@board.each do |from, piece|
return true if piece.color == turn and piece.any_moves?(Square.new(*from))
end
false
end
def king_of_current_player_is_in_check?
x, y, king = king_of(turn)
true unless king.safe_from?(Square.new(x, y))
end
def switch_players
@turn = turn == WHITE ? BLACK : WHITE
end
def player_owns_piece_on?(position)
turn == color_of_piece_on(position)
end
def allowed_to_move_piece_on?(from, to)
piece_on(from).move(from, to)
end
def game_over?
unless any_valid_moves_for_player_on_turn?
if king_of_current_player_is_in_check?
@game_status = turn == WHITE ? BLACK_WIN : WHITE_WIN
else
@game_status = STALEMATE
end
end
end
def make_a_move(from, to)
return if empty?(from)
return if out_of_the_board?(from, to)
return if pieces_of_the_same_color?(from, to)
return if from == to
return unless player_owns_piece_on?(from)
return unless allowed_to_move_piece_on?(from, to)
switch_players
game_over?
end
def white_win?
@game_status == WHITE_WIN
end
def black_win?
@game_status == BLACK_WIN
end
def stalemate?
@game_status == STALEMATE
end
def promote_pawn_to(piece)
@board[pawn_promotion_position.to_a] = piece
@pawn_promotion_position = nil
game_over?
end
def promotion?
@pawn_promotion_position
end
def print
result = ""
0.upto(7).each do |row|
0.upto(7).each do |column|
square = Square.new(column, row)
result << (empty?(square) ? '-' : piece_on(square).symbol)
end
result << "\n"
end
result.chomp
end
end
Fix King#attacked_by_a_pawn? and King#attacked_by_a_knight
class Square
attr_reader :x, :y
def initialize(x, y)
@x = x
@y = y
end
def ==(other)
x == other.x and y == other.y
end
def out_of_the_board
[x, y].any? { |coordinate| coordinate < 0 or coordinate > 7 }
end
def to_a
[x, y]
end
def delta_x(other)
(x - other.x).abs
end
def delta_y(other)
(y - other.y).abs
end
end
class Piece
WHITE = "white".freeze
BLACK = "black".freeze
attr_reader :color
def initialize(color, board)
@color = color
@board = board
end
def obstructions?(dx, dy, steps, position)
(1...steps).each do |step|
x = position.x + step * dx
y = position.y + step * dy
return true unless @board.empty?(Square.new(x, y))
end
false
end
def move(from, to)
@board.move(from, to) if valid_move?(from, to)
end
def any_moves?(from, in_directions, max_steps=8)
in_directions.each do |dx, dy|
to, steps = Square.new(from.x, from.y), 0
while true
to, steps = Square.new(to.x + dx, to.y + dy), steps.succ
break if to.out_of_the_board
if @board.empty?(to) or @board.color_of_piece_on(to) != color
return true if @board.king_remains_safe_after_move?(from, to)
end
break if @board.color_of_piece_on(to) == color or steps == max_steps
end
end
false
end
end
class Queen < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♕' : '♛'
end
def valid_move?(from, to)
Rook.new(color, @board).valid_move?(from, to) or Bishop.new(color, @board).valid_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
super(from, in_directions)
end
end
class Bishop < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♗' : '♝'
end
def valid_move?(from, to)
return false if from.delta_x(to) != from.delta_y(to)
dx = to.x <=> from.x
dy = to.y <=> from.y
steps = from.delta_x(to)
return false if obstructions?(dx, dy, steps, from)
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 1], [-1, 1], [1, -1], [-1, -1]]
super(from, in_directions)
end
end
class Knight < Piece
attr_reader :symbol
def initialize(color, board)
super
@symbol = color == WHITE ? '♘' : '♞'
end
def valid_move?(from, to)
horizontal = from.delta_x(to) == 2 and from.delta_y(to) == 1
vertical = from.delta_x(to) == 1 and from.delta_y(to) == 2
return false unless vertical or horizontal
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
positions = [[from.x + 1, from.y + 2], [from.x + 2, from.y + 1],
[from.x + 2, from.y - 1], [from.x + 1, from.y - 2],
[from.x - 1, from.y + 2], [from.x - 2, from.y + 1],
[from.x - 1, from.y - 2], [from.x - 2, from.y - 1]]
positions.each do |position|
next unless position.all? { |coordinate| coordinate.between?(0, 7) }
return true if valid_move?(from, Square.new(*position))
end
end
end
class Pawn < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♙' : '♟'
end
def valid_move?(from, to)
return false unless valid_direction?(from, to)
if from.delta_y(to) == 1 and from.delta_x(to) <= 1
return false if from.x == to.x and not @board.empty?(to)
return false if from.delta_x(to) == 1 and @board.empty?(to)
elsif from.delta_y(to) == 2
return false if moved or from.x != to.x or obstructions?(0, to.y <=> from.y , 3, from)
else
return false
end
@board.king_remains_safe_after_move?(from, to)
end
def valid_direction?(from, to)
color == WHITE ? to.y < from.y : to.y > from.y
end
def empty_or_opponent_on(position)
@board.empty?(position) or @board.color_of_piece_on(position) != color
end
def any_moves?(from)
positions = [[from.x + 1, from.y - 1], [from.x, from.y - 1],
[from.x - 1, from.y - 1], [from.x, from.y + 1],
[from.x + 1, from.y + 1], [from.x - 1, from.y + 1]]
positions.each do |position|
next unless position.all? { |coordinate| coordinate.between?(0, 7) }
position = Square.new(*position)
return true if empty_or_opponent_on(position) and valid_move?(from, position)
end
end
def move(from, to)
if super
@board.pawn_promotion_position = to if to.y == 0 or to.y == 7
@moved = true
end
end
end
class King < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♔' : '♚'
end
def castle?(king_position, rook_position)
return false if moved or not @board.piece_on(rook_position).is_a? Rook
return false if @board.piece_on(rook_position).moved
square_between_king_and_rook = Square.new(king_position.x, king_position.y)
dx, dy, steps = rook_position.x > king_position.x ? [1, 0, 3] : [-1, 0, 4]
return false if obstructions?(dx, dy, steps, king_position)
3.times do
return false unless king_safe?(square_between_king_and_rook)
square_between_king_and_rook.x += dx
end
true
end
def valid_move?(from, to)
return false if from.delta_y(to) > 1
if from.delta_x(to) > 1
if to.x == from.x + 2 and from.y == to.y
rook_position = Square.new(7, from.y)
return false unless castle?(from, rook_position)
elsif to.x == from.x - 2 and from.y == to.y
rook_position = Square.new(0, from.y)
return false unless castle?(from, rook_position)
else
return false
end
end
@board.king_remains_safe_after_move?(from, to)
end
def safe_from?(position)
not (attacked_by_a_pawn?(position) or attacked_by_a_knight?(position) or attacked_by_other?(position))
end
def attacked_by_a_pawn?(from)
if color == WHITE
positions = [[from.x + 1, from.y - 1], [from.x - 1, from.y - 1]]
else
positions = [[from.x + 1, from.y + 1], [from.x + 1, from.y + 1]]
end
positions.any? do |position|
square = Square.new(*position)
@board.piece_on(square).is_a? Pawn and @board.piece_on(square).color != color
end
end
def attacked_by_a_knight?(from)
positions = [[from.x + 2, from.y + 1], [from.x + 2, from.y - 1],
[from.x - 2, from.y + 1], [from.x - 2, from.y - 1],
[from.x + 1, from.y + 2], [from.x + 1, from.y - 2],
[from.x - 1, from.y + 2], [from.x - 1, from.y - 2]]
positions.any? do |position|
square = Square.new(*position)
@board.piece_on(square).is_a? Knight and @board.piece_on(square).color != color
end
end
def attacked_by_other?(position)
directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
directions.each do |dx, dy|
to, steps = Square.new(position.x, position.y), 0
while true
to, steps = Square.new(to.x + dx, to.y + dy), steps.succ
break if to.out_of_the_board
next if @board.empty?(to)
break if @board.color_of_piece_on(to) == color
case @board.piece_on(to)
when King then return true if steps == 1
when Queen then return true
when Rook then return true if dx.abs != dy.abs
when Bishop then return true if dx.abs == dy.abs
end
break
end
end
false
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1],
[1, 1], [-1, 1], [1, -1], [-1, -1]]
return true if super(from, in_directions)
right_rook_position = Square.new(from.x + 3, from.y)
left_rook_position = Square.new(from.x - 4, from.y)
castle?(from, right_rook_position) or castle?(from, left_rook_position)
end
def move(from, to)
if valid_move?(from, to)
if to.x == from.x + 2
@board.move(Square.new(7, to.y), Square.new(5, to.y))
elsif to.x == from.x - 2
@board.move(Square.new(0, to.y), Square.new(3, to.y))
end
@board.move(from, to)
@moved = true
end
end
end
class Rook < Piece
attr_reader :moved, :symbol
def initialize(color, board)
super
@moved = false
@symbol = color == WHITE ? '♖' : '♜'
end
def valid_move?(from, to)
return false if from.x != to.x and from.y != to.y
dx = to.x <=> from.x
dy = to.y <=> from.y
steps = [from.delta_x(to), from.delta_y(to)].max
return false if obstructions?(dx, dy, steps, from)
@board.king_remains_safe_after_move?(from, to)
end
def any_moves?(from)
in_directions = [[1, 0], [-1, 0], [0, 1], [0, -1]]
super(from, in_directions)
end
def move(from, to)
@moved = true if super
end
end
class ChessBoard
attr_reader :game_status, :turn
attr_writer :pawn_promotion_position
WHITE = "white".freeze
BLACK = "black".freeze
GAME_IN_PROGRESS = "Game in progress.".freeze
BLACK_WIN = "Black win!".freeze
WHITE_WIN = "White win!".freeze
STALEMATE = "Stalemate!".freeze
def initialize
@board = {
[0, 0] => Rook.new(BLACK, self), [1, 0] => Knight.new(BLACK, self),
[2, 0] => Bishop.new(BLACK, self), [3, 0] => Queen.new(BLACK, self),
[4, 0] => King.new(BLACK, self), [5, 0] => Bishop.new(BLACK, self),
[6, 0] => Knight.new(BLACK, self), [7, 0] => Rook.new(BLACK, self),
[0, 7] => Rook.new(WHITE, self), [1, 7] => Knight.new(WHITE, self),
[2, 7] => Bishop.new(WHITE, self), [3, 7] => Queen.new(WHITE, self),
[4, 7] => King.new(WHITE, self), [5, 7] => Bishop.new(WHITE, self),
[6, 7] => Knight.new(WHITE, self), [7, 7] => Rook.new(WHITE, self),
}
0.upto(7).each do |column|
2.upto(5).each do |row|
@board[[column, 1]] = Pawn.new(BLACK, self)
@board[[column, 6]] = Pawn.new(WHITE, self)
@board[[column, row]] = nil
end
end
@turn = WHITE
@game_status = GAME_IN_PROGRESS
@pawn_promotion_position = nil
end
def move(from, to)
@board[to.to_a] = @board[from.to_a]
@board[from.to_a] = nil
true
end
def king_remains_safe_after_move?(from, to)
board = @board.dup
move(from, to)
x, y, king = king_of(turn)
king_position = Square.new(x, y)
result = king.safe_from?(king_position)
@board = board
result
end
def out_of_the_board?(from, to)
from.out_of_the_board or to.out_of_the_board
end
def color_of_piece_on(position)
@board[position.to_a].color
end
def king_of(color)
@board.select { |_, piece| piece.is_a? King and piece.color == color }.to_a.flatten
end
def empty?(position)
@board[position.to_a].nil?
end
def piece_on(position)
@board[position.to_a]
end
def pieces_of_the_same_color?(from, to)
not empty?(to) and color_of_piece_on(from) == color_of_piece_on(to)
end
def any_valid_moves_for_player_on_turn?
@board.each do |from, piece|
return true if piece.color == turn and piece.any_moves?(Square.new(*from))
end
false
end
def king_of_current_player_is_in_check?
x, y, king = king_of(turn)
true unless king.safe_from?(Square.new(x, y))
end
def switch_players
@turn = turn == WHITE ? BLACK : WHITE
end
def player_owns_piece_on?(position)
turn == color_of_piece_on(position)
end
def allowed_to_move_piece_on?(from, to)
piece_on(from).move(from, to)
end
def game_over?
unless any_valid_moves_for_player_on_turn?
if king_of_current_player_is_in_check?
@game_status = turn == WHITE ? BLACK_WIN : WHITE_WIN
else
@game_status = STALEMATE
end
end
end
def make_a_move(from, to)
return if empty?(from)
return if out_of_the_board?(from, to)
return if pieces_of_the_same_color?(from, to)
return if from == to
return unless player_owns_piece_on?(from)
return unless allowed_to_move_piece_on?(from, to)
switch_players
game_over?
end
def white_win?
@game_status == WHITE_WIN
end
def black_win?
@game_status == BLACK_WIN
end
def stalemate?
@game_status == STALEMATE
end
def promote_pawn_to(piece)
@board[pawn_promotion_position.to_a] = piece
@pawn_promotion_position = nil
game_over?
end
def promotion?
@pawn_promotion_position
end
def print
result = ""
0.upto(7).each do |row|
0.upto(7).each do |column|
square = Square.new(column, row)
result << (empty?(square) ? '-' : piece_on(square).symbol)
end
result << "\n"
end
result.chomp
end
end
|
# Documentation: https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/Formula-Cookbook.md
# /usr/local/Library/Contributions/example-formula.rb
# PLEASE REMOVE ALL GENERATED COMMENTS BEFORE SUBMITTING YOUR PULL REQUEST!
class Cisst < Formula
desc "cisst combines robotics, stereo vision, intraoperative imaging, and related infrastructure"
homepage "https://github.com/jhu-cisst/cisst"
url "https://github.com/jhu-cisst/cisst.git", :tag => "1.0.5"
head "https://github.com/jhu-cisst/cisst.git", :branch => "devel"
version "1.0.4"
#sha256 ""
option "with-debug","build library with debug symbols enabled"
depends_on "cmake" => :build
depends_on "cisstnetlib" => :recommended
depends_on "libxml2" => :recommended
depends_on "opencv" => :optional
depends_on "qt" => :optional
depends_on "qt5" => :optional
depends_on "fltk" => :optional
def install
# ENV.deparallelize # if your formula fails when building in parallel
cmake_args = std_cmake_args + %W[
-DCISST_BUILD_EXAMPLES=OFF
-DCISST_BUILD_SHARED_LIBS=OFF
-DCISST_BUILD_TESTS=OFF
-DCISST_HAS_CISSTNETLIB=ON
-DCISST_HAS_IOS=OFF
-DCISST_HAS_JSON=OFF
-DCISST_HAS_QT4=OFF
-DCISST_MTS_HAS_ICE=OFF
-DCISST_USE_EXTERNAL=OFF
-DCISST_XML_LIB=LibXml2
-DCISST_cisst3DUserInterface=OFF
-DCISST_cisstCommon=ON
-DCISST_cisstCommonXML=ON
-DCISST_cisstInteractive=OFF
-DCISST_cisstMultiTask=ON
-DCISST_cisstNumerical=ON
-DCISST_cisstOSAbstraction=ON
-DCISST_cisstParameterTypes=ON
-DCISST_cisstRobot=ON
-DCISST_cisstStereoVision=OFF
-DCISST_cisstVector=ON
-DFORCE_CISSTNETLIB_CONFIG=ON
-DCisstNetlib_DIR=#{Formula["cisstnetlib"].opt_prefix}/cmake
]
if build.with? "debug"
cmake_args << "-DCMAKE_BUILD_TYPE=Debug"
else
cmake_args << "-DCMAKE_BUILD_TYPE=Release"
end
system "cmake", ".", *cmake_args
system "make", "install" # if this fails, try separate make/make install steps
end
test do
# `test do` will create, run in and delete a temporary directory.
#
# This test will fail and we won't accept that! It's enough to just replace
# "false" with the main program this formula installs, but it'd be nice if you
# were more thorough. Run the test with `brew test azmq`. Options passed
# to `brew install` such as `--HEAD` also need to be provided to `brew test`.
#
# The installed folder is not in the path, so use the entire path to any
# executables being tested: `system "#{bin}/program", "do", "something"`.
system "false"
end
end
cisst.rb version number corrected to 1.0.5
# Documentation: https://github.com/Homebrew/homebrew/blob/master/share/doc/homebrew/Formula-Cookbook.md
# /usr/local/Library/Contributions/example-formula.rb
# PLEASE REMOVE ALL GENERATED COMMENTS BEFORE SUBMITTING YOUR PULL REQUEST!
class Cisst < Formula
desc "cisst combines robotics, stereo vision, intraoperative imaging, and related infrastructure"
homepage "https://github.com/jhu-cisst/cisst"
url "https://github.com/jhu-cisst/cisst.git", :tag => "1.0.5"
head "https://github.com/jhu-cisst/cisst.git", :branch => "devel"
version "1.0.5"
#sha256 ""
option "with-debug","build library with debug symbols enabled"
depends_on "cmake" => :build
depends_on "cisstnetlib" => :recommended
depends_on "libxml2" => :recommended
depends_on "opencv" => :optional
depends_on "qt" => :optional
depends_on "qt5" => :optional
depends_on "fltk" => :optional
def install
# ENV.deparallelize # if your formula fails when building in parallel
cmake_args = std_cmake_args + %W[
-DCISST_BUILD_EXAMPLES=OFF
-DCISST_BUILD_SHARED_LIBS=OFF
-DCISST_BUILD_TESTS=OFF
-DCISST_HAS_CISSTNETLIB=ON
-DCISST_HAS_IOS=OFF
-DCISST_HAS_JSON=OFF
-DCISST_HAS_QT4=OFF
-DCISST_MTS_HAS_ICE=OFF
-DCISST_USE_EXTERNAL=OFF
-DCISST_XML_LIB=LibXml2
-DCISST_cisst3DUserInterface=OFF
-DCISST_cisstCommon=ON
-DCISST_cisstCommonXML=ON
-DCISST_cisstInteractive=OFF
-DCISST_cisstMultiTask=ON
-DCISST_cisstNumerical=ON
-DCISST_cisstOSAbstraction=ON
-DCISST_cisstParameterTypes=ON
-DCISST_cisstRobot=ON
-DCISST_cisstStereoVision=OFF
-DCISST_cisstVector=ON
-DFORCE_CISSTNETLIB_CONFIG=ON
-DCisstNetlib_DIR=#{Formula["cisstnetlib"].opt_prefix}/cmake
]
if build.with? "debug"
cmake_args << "-DCMAKE_BUILD_TYPE=Debug"
else
cmake_args << "-DCMAKE_BUILD_TYPE=Release"
end
system "cmake", ".", *cmake_args
system "make", "install" # if this fails, try separate make/make install steps
end
test do
# `test do` will create, run in and delete a temporary directory.
#
# This test will fail and we won't accept that! It's enough to just replace
# "false" with the main program this formula installs, but it'd be nice if you
# were more thorough. Run the test with `brew test azmq`. Options passed
# to `brew install` such as `--HEAD` also need to be provided to `brew test`.
#
# The installed folder is not in the path, so use the entire path to any
# executables being tested: `system "#{bin}/program", "do", "something"`.
system "false"
end
end
|
First version. Simple count function to use within Puppet DSL.
Signed-off-by: Krzysztof Wilczynski <9bf091559fc98493329f7d619638c79e91ccf029@linux.com>
#
# count.rb
#
# TODO(Krzysztof Wilczynski): We need to add support for regular expression ...
module Puppet::Parser::Functions
newfunction(:count, :type => :rvalue, :doc => <<-EOS
EOS
) do |arguments|
# Technically we support two arguments but only first is mandatory ...
raise(Puppet::ParseError, "count(): Wrong number of arguments " +
"given (#{arguments.size} for 1)") if arguments.size < 1
array = arguments[0]
if not array.is_a?(Array)
raise(Puppet::ParseError, 'count(): Requires an array to work with')
end
item = arguments[1] if arguments[1]
result = item ? array.count(item) : array.count
return result
end
end
# vim: set ts=2 sw=2 et :
|
require 'colored'
require 'ostruct'
describe Pilot::TesterManager do
describe "Manages adding/removing/displaying testers" do
let(:tester_manager) { Pilot::TesterManager.new }
let(:global_testers) do
[
OpenStruct.new(
first_name: 'First',
last_name: 'Last',
email: 'my@email.addr',
groups: ['testers'],
devices: ["d"],
full_version: '1.0 (21)',
pretty_install_date: '2016-01-01',
something_else: 'blah'
),
OpenStruct.new(
first_name: 'Fabricio',
last_name: 'Devtoolio',
email: 'fabric-devtools@gmail.com',
groups: ['testers'],
devices: ["d", "d2"],
full_version: '1.1 (22)',
pretty_install_date: '2016-02-02',
something_else: 'blah'
)
]
end
let(:app_context_testers) do
[
OpenStruct.new(
first_name: 'First',
last_name: 'Last',
email: 'my@email.addr',
something_else: 'blah'
),
OpenStruct.new(
first_name: 'Fabricio',
last_name: 'Devtoolio',
email: 'fabric-devtools@gmail.com',
something_else: 'blah'
)
]
end
let(:custom_tester_group) do
OpenStruct.new(
id: "CustomID",
name: "Test Group",
is_internal_group: false,
app_id: "com.whatever",
is_default_external_group: false
)
end
let(:current_user) do
Spaceship::Tunes::Member.new({ "firstname" => "Josh",
"lastname" => "Liebowitz",
"email_address" => "taquitos+nospam@gmail.com" })
end
let(:fake_tester) do
OpenStruct.new(
first_name: 'fake',
last_name: 'tester',
email: 'fabric-devtools@gmail.com+fake@gmail.com'
)
end
let(:default_add_tester_options) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
apple_id: 'com.whatever',
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name
})
end
let(:remove_tester_options) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name
})
end
let(:default_add_tester_options_with_group) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
apple_id: 'com.whatever',
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name,
groups: ["Test Group"]
})
end
let(:fake_app) { "fake_app_object" }
let(:fake_client) { "fake client" }
before(:each) do
allow(fake_app).to receive(:apple_id).and_return("com.whatever")
allow(fake_app).to receive(:name).and_return("My Fake App")
allow(Spaceship::Application).to receive(:find).and_return(fake_app)
allow(Spaceship::Tunes).to receive(:client).and_return(fake_client)
allow(Spaceship::Members).to receive(:find).and_return(current_user)
allow(tester_manager).to receive(:login) # prevent attempting to log in with iTC
allow(fake_client).to receive(:user).and_return(current_user)
end
describe "when invoked from a global context" do
it "prints a table with columns including device and version info" do
allow(Spaceship::Tunes::Tester::Internal).to receive(:all).and_return(global_testers)
allow(Spaceship::Tunes::Tester::External).to receive(:all).and_return(global_testers)
headings = ["First", "Last", "Email", "Groups", "Devices", "Latest Version", "Latest Install Date"]
rows = global_testers.map do |tester|
[
tester.first_name,
tester.last_name,
tester.email,
tester.group_names,
tester.devices.count,
tester.full_version,
tester.pretty_install_date
]
end
expect(Terminal::Table).to receive(:new).with(title: "Internal Testers".green,
headings: headings,
rows: rows)
expect(Terminal::Table).to receive(:new).with(title: "External Testers".green,
headings: headings,
rows: rows)
tester_manager.list_testers({})
end
end
describe "when invoked from the context of an app" do
it "prints a table without columns showing device and version info" do
allow(Spaceship::Tunes::Tester::Internal).to receive(:all_by_app).and_return(app_context_testers)
allow(Spaceship::Tunes::Tester::External).to receive(:all_by_app).and_return(app_context_testers)
headings = ["First", "Last", "Email", "Groups"]
rows = app_context_testers.map do |tester|
[tester.first_name, tester.last_name, tester.email, tester.group_names]
end
expect(Terminal::Table).to receive(:new).with(title: "Internal Testers".green,
headings: headings,
rows: rows)
expect(Terminal::Table).to receive(:new).with(title: "External Testers".green,
headings: headings,
rows: rows)
tester_manager.list_testers(app_identifier: 'com.whatever')
end
end
describe "when app manager asks to create new tester in the default group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(fake_client).to receive(:testers_by_app).and_return([])
expect(Spaceship::TestFlight::Tester).to receive(:create_app_level_tester).with(
app_id: 'com.whatever',
first_name: 'fake',
last_name: 'tester',
email: 'fabric-devtools@gmail.com+fake@gmail.com'
)
expect(Spaceship::Tunes::Tester::External).to receive(:find_by_app).and_return(nil) # before creating, no testers
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!)
expect(Spaceship::Tunes::Tester::External).to receive(:find_by_app).and_return(fake_tester) # after creating a tester, we return one
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester: fabric-devtools@gmail.com+fake@gmail.com to app: My Fake App')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to the default tester group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options)
end
end
describe "when admin asks to create new tester in the default group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["admin"])
allow(tester_manager).to receive(:find_app_tester).and_return(nil)
expect(Spaceship::Tunes::Tester::External).to receive(:create!).with(
email: 'fabric-devtools@gmail.com+fake@gmail.com',
first_name: 'fake',
last_name: 'tester'
)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!)
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester: fabric-devtools@gmail.com+fake@gmail.com to your account')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to the default tester group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options)
end
end
describe "when admin asks to create new tester to a specific existing custom group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["admin"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::Tunes::Tester::External).to_not receive(:create!)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when asked to add an existing external tester to a specific existing custom group" do
it "adds the tester to the custom group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when asked to add an existing internal tester to a specific existing custom group" do
it "adds the tester to the custom group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(fake_tester).to receive(:kind_of?).with(Spaceship::Tunes::Tester::Internal).and_return(true)
expect(Spaceship::Tunes::Tester::Internal).to receive(:find_by_app).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Found existing tester fabric-devtools@gmail.com+fake@gmail.com')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to app My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when app manager asks to add an existing external tester to a specific existing custom group" do
it "adds the tester without calling create" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when external tester is removed without providing app" do
it "removes the tester without error" do
allow(current_user).to receive(:roles).and_return(["admin"])
expect(Spaceship::Tunes::Tester::External).to receive(:find).and_return(fake_tester) # before creating, no testers
expect(Spaceship::TestFlight::Group).to_not receive(:remove_tester_from_groups!)
expect(FastlaneCore::UI).to receive(:success).with('Successfully removed tester fabric-devtools@gmail.com+fake@gmail.com from Users and Roles')
tester_manager.remove_tester(remove_tester_options)
end
end
end
end
fixes test failing in CI
require 'colored'
require 'ostruct'
describe Pilot::TesterManager do
describe "Manages adding/removing/displaying testers" do
let(:tester_manager) { Pilot::TesterManager.new }
let(:global_testers) do
[
OpenStruct.new(
first_name: 'First',
last_name: 'Last',
email: 'my@email.addr',
groups: ['testers'],
devices: ["d"],
full_version: '1.0 (21)',
pretty_install_date: '2016-01-01',
something_else: 'blah'
),
OpenStruct.new(
first_name: 'Fabricio',
last_name: 'Devtoolio',
email: 'fabric-devtools@gmail.com',
groups: ['testers'],
devices: ["d", "d2"],
full_version: '1.1 (22)',
pretty_install_date: '2016-02-02',
something_else: 'blah'
)
]
end
let(:app_context_testers) do
[
OpenStruct.new(
first_name: 'First',
last_name: 'Last',
email: 'my@email.addr',
something_else: 'blah'
),
OpenStruct.new(
first_name: 'Fabricio',
last_name: 'Devtoolio',
email: 'fabric-devtools@gmail.com',
something_else: 'blah'
)
]
end
let(:custom_tester_group) do
OpenStruct.new(
id: "CustomID",
name: "Test Group",
is_internal_group: false,
app_id: "com.whatever",
is_default_external_group: false
)
end
let(:current_user) do
Spaceship::Tunes::Member.new({ "firstname" => "Josh",
"lastname" => "Liebowitz",
"email_address" => "taquitos+nospam@gmail.com" })
end
let(:fake_tester) do
OpenStruct.new(
first_name: 'fake',
last_name: 'tester',
email: 'fabric-devtools@gmail.com+fake@gmail.com'
)
end
let(:default_add_tester_options) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
apple_id: 'com.whatever',
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name
})
end
let(:remove_tester_options) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name
})
end
let(:default_add_tester_options_with_group) do
FastlaneCore::Configuration.create(Pilot::Options.available_options, {
apple_id: 'com.whatever',
email: fake_tester.email,
first_name: fake_tester.first_name,
last_name: fake_tester.last_name,
groups: ["Test Group"]
})
end
let(:fake_app) { "fake_app_object" }
let(:fake_client) { "fake client" }
before(:each) do
allow(fake_app).to receive(:apple_id).and_return("com.whatever")
allow(fake_app).to receive(:name).and_return("My Fake App")
allow(Spaceship::Application).to receive(:find).and_return(fake_app)
allow(Spaceship::Tunes).to receive(:client).and_return(fake_client)
allow(Spaceship::Members).to receive(:find).and_return(current_user)
allow(tester_manager).to receive(:login) # prevent attempting to log in with iTC
allow(fake_client).to receive(:user).and_return(current_user)
end
describe "when invoked from a global context" do
it "prints a table with columns including device and version info" do
allow(Spaceship::Tunes::Tester::Internal).to receive(:all).and_return(global_testers)
allow(Spaceship::Tunes::Tester::External).to receive(:all).and_return(global_testers)
headings = ["First", "Last", "Email", "Groups", "Devices", "Latest Version", "Latest Install Date"]
rows = global_testers.map do |tester|
[
tester.first_name,
tester.last_name,
tester.email,
tester.group_names,
tester.devices.count,
tester.full_version,
tester.pretty_install_date
]
end
expect(Terminal::Table).to receive(:new).with(title: "Internal Testers".green,
headings: headings,
rows: rows)
expect(Terminal::Table).to receive(:new).with(title: "External Testers".green,
headings: headings,
rows: rows)
tester_manager.list_testers({})
end
end
describe "when invoked from the context of an app" do
it "prints a table without columns showing device and version info" do
allow(Spaceship::Tunes::Tester::Internal).to receive(:all_by_app).and_return(app_context_testers)
allow(Spaceship::Tunes::Tester::External).to receive(:all_by_app).and_return(app_context_testers)
headings = ["First", "Last", "Email", "Groups"]
rows = app_context_testers.map do |tester|
[tester.first_name, tester.last_name, tester.email, tester.group_names]
end
expect(Terminal::Table).to receive(:new).with(title: "Internal Testers".green,
headings: headings,
rows: rows)
expect(Terminal::Table).to receive(:new).with(title: "External Testers".green,
headings: headings,
rows: rows)
tester_manager.list_testers(app_identifier: 'com.whatever')
end
end
describe "when app manager asks to create new tester in the default group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(fake_client).to receive(:testers_by_app).and_return([])
expect(Spaceship::TestFlight::Tester).to receive(:create_app_level_tester).with(
app_id: 'com.whatever',
first_name: 'fake',
last_name: 'tester',
email: 'fabric-devtools@gmail.com+fake@gmail.com'
)
expect(Spaceship::Tunes::Tester::External).to receive(:find_by_app).and_return(nil) # before creating, no testers
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!)
expect(Spaceship::Tunes::Tester::External).to receive(:find_by_app).and_return(fake_tester) # after creating a tester, we return one
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester: fabric-devtools@gmail.com+fake@gmail.com to app: My Fake App')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to the default tester group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options)
end
end
describe "when admin asks to create new tester in the default group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["admin"])
allow(tester_manager).to receive(:find_app_tester).and_return(nil)
expect(Spaceship::Tunes::Tester::External).to receive(:create!).with(
email: 'fabric-devtools@gmail.com+fake@gmail.com',
first_name: 'fake',
last_name: 'tester'
)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!)
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester: fabric-devtools@gmail.com+fake@gmail.com to your account')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to the default tester group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options)
end
end
describe "when admin asks to create new tester to a specific existing custom group" do
it "creates a new tester and adds it to the default group" do
allow(current_user).to receive(:roles).and_return(["admin"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::Tunes::Tester::External).to_not receive(:create!)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when asked to add an existing external tester to a specific existing custom group" do
it "adds the tester to the custom group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when asked to add an existing internal tester to a specific existing custom group" do
it "adds the tester to the custom group" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(fake_tester).to receive(:kind_of?).with(Spaceship::Tunes::Tester::Internal).and_return(true)
expect(Spaceship::Tunes::Tester::Internal).to receive(:find_by_app).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Found existing tester fabric-devtools@gmail.com+fake@gmail.com')
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to app My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when app manager asks to add an existing external tester to a specific existing custom group" do
it "adds the tester without calling create" do
allow(current_user).to receive(:roles).and_return(["appmanager"])
allow(tester_manager).to receive(:find_app_tester).and_return(fake_tester)
expect(Spaceship::TestFlight::Tester).to_not receive(:create_app_level_tester)
expect(Spaceship::TestFlight::Group).to receive(:add_tester_to_groups!).and_return([custom_tester_group])
expect(FastlaneCore::UI).to receive(:success).with('Successfully added tester to group(s): Test Group in app: My Fake App')
tester_manager.add_tester(default_add_tester_options_with_group)
end
end
describe "when external tester is removed without providing app" do
it "removes the tester without error" do
allow(current_user).to receive(:roles).and_return(["admin"])
allow(Spaceship::Application).to receive(:find).and_return(nil)
expect(Spaceship::Tunes::Tester::External).to receive(:find).and_return(fake_tester) # before creating, no testers
expect(Spaceship::TestFlight::Group).to_not receive(:remove_tester_from_groups!)
expect(FastlaneCore::UI).to receive(:success).with('Successfully removed tester fabric-devtools@gmail.com+fake@gmail.com from Users and Roles')
tester_manager.remove_tester(remove_tester_options)
end
end
end
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'synapse_pay_rest/version'
Gem::Specification.new do |s|
s.name = 'synapse_pay_rest'
s.version = SynapsePayRest::VERSION
s.date = Date.today.to_s
s.authors = ['Steven Broderick', 'Thomas Hipps']
s.email = 'help@synapsepay.com'
s.summary = 'SynapsePay v3 Rest Native API Library'
s.homepage = 'https://rubygems.org/gems/synapse_pay_rest'
s.license = 'MIT'
s.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.bindir = 'exe'
s.executables = s.files.grep(%r{^exe/}) { |f| File.basename(f) }
s.require_paths = ['lib']
s.add_dependency 'rest-client', '~> 2.0'
s.add_development_dependency 'bundler', '~> 1.10'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'minitest', '~> 5.8.2'
s.add_development_dependency 'minitest-reporters', '~> 1.1.5'
s.add_development_dependency 'dotenv', '~> 2.1.1'
s.add_development_dependency 'faker', '~> 1.6.6'
s.add_development_dependency 'simplecov', '~> 0.12.0'
end
Add `m` as a development dependency
No need to have others install this separately. Its absolutely vital to
have this by default without any added work due to the time it takes to
run tests. This helps speed up setup and should allow easier time to
have others contribute.
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'synapse_pay_rest/version'
Gem::Specification.new do |s|
s.name = 'synapse_pay_rest'
s.version = SynapsePayRest::VERSION
s.date = Date.today.to_s
s.authors = ['Steven Broderick', 'Thomas Hipps']
s.email = 'help@synapsepay.com'
s.summary = 'SynapsePay v3 Rest Native API Library'
s.homepage = 'https://rubygems.org/gems/synapse_pay_rest'
s.license = 'MIT'
s.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.bindir = 'exe'
s.executables = s.files.grep(%r{^exe/}) { |f| File.basename(f) }
s.require_paths = ['lib']
s.add_dependency 'rest-client', '~> 2.0'
s.add_development_dependency 'bundler', '~> 1.10'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'minitest', '~> 5.8.2'
s.add_development_dependency 'minitest-reporters', '~> 1.1.5'
s.add_development_dependency 'dotenv', '~> 2.1.1'
s.add_development_dependency 'faker', '~> 1.6.6'
s.add_development_dependency 'simplecov', '~> 0.12.0'
s.add_development_dependency 'm', '~> 1.5.0'
end
|
added proxy server program
#!/usr/bin/env ruby
require 'webrick'
require 'webrick/httpproxy'
require 'uri'
s = WEBrick::HTTPProxyServer.new({
:BindAddress => '127.0.0.1',
:Port => 8080,
:Logger => WEBrick::Log::new("log.txt", WEBrick::Log::DEBUG),
:ProxyVia => false,
:ProxyURI => URI.parse('http://api.treasure-data.com:80/')
})
Signal.trap('INT') do
s.shutdown
end
s.start
|
require 'openssl'
require 'securerandom'
class Crypt
def self.encrypt(opts = {})
crypt :encrypt, opts
end
def self.decrypt(opts = {})
crypt :decrypt, opts
end
def self.new_iv
new_cipher.random_iv
end
def self.new_salt
SecureRandom.uuid
end
private
def self.new_cipher
OpenSSL::Cipher::AES.new(256, :CBC)
end
def self.crypt(method, opts)
raise ArgumentError if
opts.nil? || opts.empty? || opts[:value].nil? ||
opts[:password].nil? || opts[:salt].nil? ||
opts[:iv].nil? || opts[:stretch].nil?
cipher = new_cipher
cipher.send(method)
cipher.key = crypt_key(opts)
cipher.iv = opts[:iv]
result = cipher.update(opts[:value])
result << cipher.final
return result
end
def self.crypt_key(opts)
password = opts[:password]
salt = opts[:salt]
iterations = opts[:stretch]
key_length = 32 # 256 bits
hash = OpenSSL::Digest::SHA512.new
return OpenSSL::PKCS5.pbkdf2_hmac(password, salt, iterations, key_length, hash)
end
end
Adding more parameter validation for Crypt.
require 'openssl'
require 'securerandom'
class Crypt
def self.encrypt(opts = {})
crypt :encrypt, opts
end
def self.decrypt(opts = {})
crypt :decrypt, opts
end
def self.new_iv
new_cipher.random_iv
end
def self.new_salt
SecureRandom.uuid
end
private
def self.new_cipher
OpenSSL::Cipher::AES.new(256, :CBC)
end
def self.crypt(method, opts)
raise ArgumentError if
opts.nil? || opts.empty? || opts[:value].nil? ||
opts[:password].nil? || opts[:password].empty? ||
opts[:salt].nil? || opts[:salt].empty? ||
opts[:iv].nil? || opts[:iv].empty? ||
opts[:stretch].nil? || opts[:stretch].nil?
cipher = new_cipher
cipher.send(method)
cipher.key = crypt_key(opts)
cipher.iv = opts[:iv]
result = cipher.update(opts[:value])
result << cipher.final
return result
end
def self.crypt_key(opts)
password = opts[:password]
salt = opts[:salt]
iterations = opts[:stretch]
key_length = 32 # 256 bits
hash = OpenSSL::Digest::SHA512.new
return OpenSSL::PKCS5.pbkdf2_hmac(password, salt, iterations, key_length, hash)
end
end
|
require 'leveldb-native/version'
Gem::Specification.new do |s|
s.name = "leveldb-native"
s.version = LevelDBNative::VERSION
s.required_rubygems_version = Gem::Requirement.new(">= 0")
s.authors = ["Joel VanderWerf"]
s.date = Time.now.strftime "%Y-%m-%d"
s.description = "Ruby binding to LevelDB."
s.email = "vjoel@users.sourceforge.net"
s.extra_rdoc_files = ["README.md", "LICENSE"]
s.files = Dir[
"README.md", "LICENSE", "Rakefile",
"lib/**/*.rb",
"ext/**/*.{rb,c,cc,h}",
"bin/**/*.rb",
"bench/**/*.rb",
"example/**/*.rb",
"test/**/*.rb"
]
s.bindir = 'bin'
s.extensions = Dir["ext/**/extconf.rb"]
s.test_files = Dir["test/*.rb"]
s.homepage = "https://github.com/vjoel/ruby-leveldb-native"
s.license = "MIT"
s.rdoc_options = [
"--quiet", "--line-numbers", "--inline-source",
"--title", "LevelDB Native", "--main", "README.md"]
s.require_paths = ["lib"]
s.summary = "Ruby binding to LevelDB."
end
force `gem rdoc` to see the ext dir
require 'leveldb-native/version'
Gem::Specification.new do |s|
s.name = "leveldb-native"
s.version = LevelDBNative::VERSION
s.required_rubygems_version = Gem::Requirement.new(">= 0")
s.authors = ["Joel VanderWerf"]
s.date = Time.now.strftime "%Y-%m-%d"
s.description = "Ruby binding to LevelDB."
s.email = "vjoel@users.sourceforge.net"
s.extra_rdoc_files = Dir["README.md", "LICENSE", "ext/**/*.cc"]
s.files = Dir[
"README.md", "LICENSE", "Rakefile",
"lib/**/*.rb",
"ext/**/*.{rb,c,cc,h}",
"bin/**/*.rb",
"bench/**/*.rb",
"example/**/*.rb",
"test/**/*.rb"
]
s.bindir = 'bin'
s.extensions = Dir["ext/**/extconf.rb"]
s.test_files = Dir["test/*.rb"]
s.homepage = "https://github.com/vjoel/ruby-leveldb-native"
s.license = "MIT"
s.rdoc_options = [
"--quiet", "--line-numbers", "--inline-source",
"--title", "LevelDB Native", "--main", "README.md"]
s.require_paths = ["lib"]
s.summary = "Ruby binding to LevelDB."
end
|
require_relative 'unit'
module DataAbstraction
class Location
include ::DataAbstraction::Unit
STANDARD_DATUM = "WGS84"
DATUMS = [
[ "wgs84", "WGS84" ],
[ "jgd2000", "JGD2000" ],
[ "jgd2011", "JGD2011" ],
[ "tokyo", "Tokyo Datum" ]
]
def self.datum_table(a)
h = Hash.new
a.each do | ent |
val = ent[0]
ent[1..-1].each do | e |
h[e] = val
end
end
h
end
@@datum_table = datum_table(DATUMS)
def initialize(values)
@dimension_unit = values['dimension_unit'] ? values['dimension_unit'] : "m"
@unit = values['unit'] ? values['unit'] : "degree"
@datum = ( values['datum'] ) ? values['datum'] : 'WGS84',
@values = Array.new
if ( values['values'] )
@values[0] = LocationValue.new(values['values'][0].to_f, @unit)
@values[1] = LocationValue.new(values['values'][1].to_f, @unit)
if ( values['elevation'] )
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
else
if ( values['values'].size == 3 )
@values[2] = DimensionValue.new(values['values'][2].to_f, @dimansion_unit)
else
@values[2] = DimensionValue.new(0.0, @dimansion_unit)
end
end
elsif ( values['location'] )
@values[0] = LocationValue.new(values['location'][0].to_f, @unit)
@values[1] = LocationValue.new(values['location'][1].to_f, @unit)
if ( values['elevation'] )
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
else
if ( values['location'].size == 3 )
@values[2] = DimensionValue.new(values['location'][2].to_f, @dimansion_unit)
else
@values[2] = DimensionValue.new(0.0, @dimansion_unit)
end
end
else
@values[0] = LocationValue.new(values['latitude'].to_f, @unit)
@values[1] = LocationValue.new(values['longitude'].to_f, @unit)
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
end
end
def location(dim = 2)
if ( dim == 2 )
[ @values[0], @values[1] ]
else
[ @values[0], @values[1], @values[2] ]
end
end
def values
@values
end
def value
@values
end
def datum
@datum
end
def unit
@unit
end
def elevation
@values[2]
end
def longitude
@values[1]
end
def latitude
@values[0]
end
def standard_datum
STANDARD_DATUM
end
def self.standard_unit
standard_datum
end
def standard_unit
standard_datum
end
def self.datums
DATUMS
end
def self.units
datums
end
def to_standard
case @@datum_table[@datum]
when "tokyo"
Location.new('latitude' => @latitude - 0.00010695 * @latitude + 0.000017464 * @longitude + 0.0046017,
'longitude' => @longitude - 0.000046038 * @longitude - 0.000083043 * @latitude + 0.010040,
'elevation' => @elevation,
'datum' => STANDARD_DATUM)
else
self
end
end
def to_requested(datum = STANDARD_UNIT)
if ( datum != @datum )
standard = self.to_standard
case @@datum_table[datum]
when "tokyo"
Location.new('latitude' => @latitude + @latitude * 0.00010696 - @longitude * 0.000017467 - 0.0046020,
'longitude' => @longitude + @longitude * 0.000046047 + @latitude * 0.000083049 - 0.010041,
'elevatuon' => @elevation,
'datum' => datum)
when "wgs84"
standard
else
nil
end
else
self
end
end
end
end
bug fix
require_relative 'unit'
module DataAbstraction
class Location
include ::DataAbstraction::Unit
STANDARD_DATUM = "WGS84"
DATUMS = [
[ "wgs84", "WGS84" ],
[ "jgd2000", "JGD2000" ],
[ "jgd2011", "JGD2011" ],
[ "tokyo", "Tokyo Datum" ]
]
def self.datum_table(a)
h = Hash.new
a.each do | ent |
val = ent[0]
ent[1..-1].each do | e |
h[e] = val
end
end
h
end
@@datum_table = datum_table(DATUMS)
def initialize(values)
@dimension_unit = values['dimension_unit'] ? values['dimension_unit'] : "m"
@unit = values['unit'] ? values['unit'] : "degree"
@datum = ( values['datum'] ) ? values['datum'] : 'WGS84'
@values = Array.new
if ( values['values'] )
@values[0] = LocationValue.new(values['values'][0].to_f, @unit)
@values[1] = LocationValue.new(values['values'][1].to_f, @unit)
if ( values['elevation'] )
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
else
if ( values['values'].size == 3 )
@values[2] = DimensionValue.new(values['values'][2].to_f, @dimansion_unit)
else
@values[2] = DimensionValue.new(0.0, @dimansion_unit)
end
end
elsif ( values['location'] )
@values[0] = LocationValue.new(values['location'][0].to_f, @unit)
@values[1] = LocationValue.new(values['location'][1].to_f, @unit)
if ( values['elevation'] )
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
else
if ( values['location'].size == 3 )
@values[2] = DimensionValue.new(values['location'][2].to_f, @dimansion_unit)
else
@values[2] = DimensionValue.new(0.0, @dimansion_unit)
end
end
else
@values[0] = LocationValue.new(values['latitude'].to_f, @unit)
@values[1] = LocationValue.new(values['longitude'].to_f, @unit)
@values[2] = DimensionValue.new(values['elevation'].to_f, @dimansion_unit)
end
end
def location(dim = 2)
if ( dim == 2 )
[ @values[0], @values[1] ]
else
[ @values[0], @values[1], @values[2] ]
end
end
def values
@values
end
def value
@values
end
def datum
@datum
end
def unit
@unit
end
def elevation
@values[2]
end
def longitude
@values[1]
end
def latitude
@values[0]
end
def standard_datum
STANDARD_DATUM
end
def self.standard_unit
standard_datum
end
def standard_unit
standard_datum
end
def self.datums
DATUMS
end
def self.units
datums
end
def to_standard
case @@datum_table[@datum]
when "tokyo"
Location.new('latitude' => @latitude - 0.00010695 * @latitude + 0.000017464 * @longitude + 0.0046017,
'longitude' => @longitude - 0.000046038 * @longitude - 0.000083043 * @latitude + 0.010040,
'elevation' => @elevation,
'datum' => STANDARD_DATUM)
else
self
end
end
def to_requested(datum = STANDARD_UNIT)
if ( datum != @datum )
standard = self.to_standard
case @@datum_table[datum]
when "tokyo"
Location.new('latitude' => @latitude + @latitude * 0.00010696 - @longitude * 0.000017467 - 0.0046020,
'longitude' => @longitude + @longitude * 0.000046047 + @latitude * 0.000083049 - 0.010041,
'elevatuon' => @elevation,
'datum' => datum)
when "wgs84"
standard
else
nil
end
else
self
end
end
end
end
|
Apps::Engine.routes.draw do
# root of the plugin
root :to => 'apps#index'
# examples of controllers built in this generator. delete at will
match 'installed' => 'apps#installed', via: [:get,:post]
match 'webapps' => 'apps#advanced', via: [:get,:post]
post 'install/:id' => 'apps#install', as: 'install'
match 'install_progress/:id' => 'apps#install_progress', as: 'install_progress', via: [:get,:post]
post 'uninstall/:id' => 'apps#uninstall', as: 'uninstall'
match 'uninstall_progress/:id' => 'apps#uninstall_progress', as: 'uninstall_progress', via: [:get,:post]
put 'toggle_in_dashboard/:id' => 'apps#toggle_in_dashboard', as: 'toggle_in_dashboard'
end
conflicting routes with webapps
Signed-off-by: Arpit Goyal <72df09b02047b5b6857897f0b5d0495073ea24a3@gmail.com>
Apps::Engine.routes.draw do
# root of the plugin
root :to => 'apps#index'
# examples of controllers built in this generator. delete at will
match 'installed' => 'apps#installed', via: [:get,:post]
post 'install/:id' => 'apps#install', as: 'install'
match 'install_progress/:id' => 'apps#install_progress', as: 'install_progress', via: [:get,:post]
post 'uninstall/:id' => 'apps#uninstall', as: 'uninstall'
match 'uninstall_progress/:id' => 'apps#uninstall_progress', as: 'uninstall_progress', via: [:get,:post]
put 'toggle_in_dashboard/:id' => 'apps#toggle_in_dashboard', as: 'toggle_in_dashboard'
end
|
require_relative '../textmate_tools.rb'
require_relative './tokens.rb'
require_relative '../shared/numeric.rb'
# todo
# fix initializer list "functions" e.g. `int a{5};`
# fix the ... inside of macros
# have all patterns with keywords be dynamically generated
cpp_grammar = Grammar.new(
name:"C++",
scope_name: "source.cpp",
version: "https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/syntaxes/cpp.tmLanguage.json",
information_for_contributors: [
"This code was auto generated by a much-more-readble ruby file: https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
"This file essentially an updated/improved fork of the atom syntax https://github.com/atom/language-c/blob/master/grammars/c%2B%2B.cson",
],
)
#
# Utils
#
cpp_grammar[:semicolon] = @semicolon = newPattern(
match: /;/,
tag_as: "punctuation.terminator.statement",
)
cpp_grammar[:comma] = newPattern(
match: /,/,
tag_as: "comma punctuation.separator.delimiter"
)
def blockFinderFor( name:"", tag_as:"", start_pattern:nil, needs_semicolon: true, primary_includes: [], head_includes:[], body_includes: [ :$initial_context ], tail_includes: [ :$initial_context ], secondary_includes:[])
lookahead_endings = /[;>\[\]=]/
if needs_semicolon
end_pattern = newPattern(
match: newPattern(
lookBehindFor(/}/).maybe(@spaces).then(@semicolon)
).or(
@semicolon
).or(
lookAheadFor(lookahead_endings)
)
)
else
end_pattern = lookBehindFor(/\}/).or(lookAheadFor(lookahead_endings))
end
return Range.new(
tag_as: tag_as,
start_pattern: newPattern(
match: start_pattern,
tag_as: "meta.head."+name,
),
end_pattern: end_pattern,
includes: [
*primary_includes,
# Head
Range.new(
tag_as: "meta.head."+name,
start_pattern: /\G| /,
end_pattern: newPattern(
match: /\{/.or(lookAheadFor(/;/)),
tag_as: "punctuation.section.block.begin.bracket.curly."+name
),
includes: head_includes
),
# Body
Range.new(
tag_as: "meta.body."+name, # body is everything in the {}'s
start_pattern: lookBehindFor(/\{/),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly."+name
),
includes: body_includes
),
# Tail
Range.new(
tag_as: "meta.tail."+name,
start_pattern: lookBehindFor(/}/).then(/[\s\n]*/),
end_pattern: newPattern(/[\s\n]*/).lookAheadFor(/;/),
includes: tail_includes
),
*secondary_includes
]
)
end
#
#
# Contexts
#
#
cpp_grammar[:$initial_context] = [
:parameter_struct, # TODO this is here because it needs to activate inside of function-pointer parameters. Once function-pointer syntax is implemented, remove it from here
:struct_declare,
:special_block_context,
:macro_argument,
:string_context,
:functional_specifiers_pre_parameters,
:qualifiers_and_specifiers_post_parameters,
:storage_specifiers,
:access_control_keywords,
:exception_keywords,
:other_keywords,
:memory_operators,
:the_this_keyword,
:language_constants,
:template_isolated_definition,
:template_definition,
:scope_resolution,
:misc_storage_modifiers_1,
:destructor,
:destructor_prototype,
:lambdas,
:preprocessor_context,
:comments_context,
:switch_statement,
:control_flow_keywords,
:storage_types,
:assembly,
:misc_storage_modifiers_2,
:operator_overload,
:number_literal,
:string_context_c,
:meta_preprocessor_macro,
:meta_preprocessor_diagnostic,
:meta_preprocessor_include,
:pragma_mark,
:meta_preprocessor_line,
:meta_preprocessor_undef,
:meta_preprocessor_pragma,
:operators,
:block,
:parentheses,
:function_definition,
:line_continuation_character,
:square_brackets,
:empty_square_brackets,
:semicolon,
:comma,
]
cpp_grammar[:preprocessor_context] = [
:preprocessor_rule_enabled,
:preprocessor_rule_disabled,
:preprocessor_rule_conditional,
:hacky_fix_for_stray_directive,
]
cpp_grammar[:storage_types] = [
:primitive_types,
:non_primitive_types,
:pthread_types,
:posix_reserved_types,
]
# eventually this context will be more exclusive (can't have class definitons inside of an evaluation)
# but for now it just includes everything
cpp_grammar[:evaluation_context] = [
:$initial_context
# function call
# number literal
# lambdas
]
# eventually this context will be more exclusive (can't have class definitons inside of an if statement)
# but for now it just includes everything
cpp_grammar[:conditional_context] = [
:$initial_context
]
cpp_grammar[:template_definition_context] = [
:scope_resolution,
:template_definition_argument,
:template_argument_defaulted,
:template_call_innards,
:evaluation_context
]
cpp_grammar[:template_call_context] = [
:storage_types,
:language_constants,
:scope_resolution,
:user_defined_template_type,
:operators,
:number_literal,
:string_context,
:comma_in_template_argument
]
#
#
# Numbers
#
#
#
# Number Literal
#
cpp_grammar[:number_literal] = numeric_constant(allow_user_defined_literals: true)
#
# Variable
#
universal_character = /\\u[0-9a-fA-F]{4}/.or(/\\U000[0-9a-fA-F]/)
first_character = /[a-zA-Z_]/.or(universal_character)
subsequent_character = /[a-zA-Z0-9_]/.or(universal_character)
identifier = first_character.then(zeroOrMoreOf(subsequent_character))
# todo: make a better name for this function
variableBounds = ->(regex_pattern) do
lookBehindToAvoid(@standard_character).then(regex_pattern).lookAheadToAvoid(@standard_character)
end
variable_name_without_bounds = identifier
# word bounds are inefficient, but they are accurate
variable_name = variableBounds[variable_name_without_bounds]
#
# Constants
#
cpp_grammar[:language_constants] = newPattern(
match: variableBounds[@cpp_tokens.that(:isLiteral)],
tag_as: "constant.language.$match"
)
#
# Built-In Types
#
look_behind_for_type = lookBehindFor(/\w |\*\/|[&*>\]\)]|\.\.\./).maybe(@spaces)
cpp_grammar[:primitive_types] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isPrimitive) ],
tag_as: "storage.type.primitive"
)
cpp_grammar[:non_primitive_types] = newPattern(
match: variableBounds[@cpp_tokens.that(not(:isPrimitive), :isType)],
tag_as: "storage.type"
)
#
# Keywords and Keyword-ish things
#
cpp_grammar[:functional_specifiers_pre_parameters] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isFunctionSpecifier) ],
tag_as: "storage.modifier.specificer.functional.pre-parameters.$match"
)
cpp_grammar[:qualifiers_and_specifiers_post_parameters] = newPattern(
match: variableBounds[ @cpp_tokens.that(:canAppearAfterParametersBeforeBody) ].lookAheadFor(/\s*/.then(/\{/.or(/;/).or(/[\n\r]/))),
tag_as: "storage.modifier.specifier.functional.post-parameters.$match"
)
cpp_grammar[:storage_specifiers] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isStorageSpecifier) ],
tag_as: "storage.modifier.specifier.$match"
)
cpp_grammar[:access_control_keywords] = newPattern(
match: lookBehindToAvoid(@standard_character).then(@cpp_tokens.that(:isAccessSpecifier)).maybe(@spaces).then(/:/),
tag_as: "storage.type.modifier.access.control.$match"
)
cpp_grammar[:exception_keywords] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isExceptionRelated) ],
tag_as: "keyword.control.exception.$match"
)
cpp_grammar[:other_keywords] = newPattern(
match: variableBounds[ /(using|typedef)/ ],
tag_as: "keyword.other.$match"
)
cpp_grammar[:the_this_keyword] = the_this_keyword = newPattern(
match: variableBounds[ /this/ ],
tag_as: "variable.language.this"
)
# TODO: enhance casting operators to include <>'s
type_casting_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isTypeCastingOperator) ],
tag_as: "keyword.operator.wordlike keyword.operator.cast.$match"
)
cpp_grammar[:memory_operators] = newPattern(
tag_as: "keyword.operator.wordlike memory",
match: lookBehindToAvoid(
@standard_character
).then(
newPattern(
newPattern(
match: /delete/,
tag_as: "keyword.operator.delete.array"
).maybe(@spaces).then(
match: /\[\]/,
tag_as: "keyword.operator.delete.array.bracket"
)
).or(
match: /delete/,
tag_as: "keyword.operator.delete"
).or(
match: /new/,
tag_as: "keyword.operator.new"
)
).lookAheadToAvoid(@standard_character)
)
cpp_grammar[:control_flow_keywords] = control_flow_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isControlFlow) ],
tag_as: "keyword.control.$match"
)
#
# Control flow
#
cpp_grammar[:default_statement] = Range.new(
tag_as: "meta.conditional.case",
start_pattern: newPattern(
match: variableBounds[ /default/ ],
tag_as: "keyword.control.default"
),
end_pattern: newPattern(
match: /:/,
tag_as: "colon punctuation.separator.case.default"
),
includes: [:conditional_context]
)
cpp_grammar[:case_statement] = Range.new(
tag_as: "meta.conditional.case",
start_pattern: newPattern(
match: variableBounds[ /case/ ],
tag_as: "keyword.control.case"
),
end_pattern: newPattern(
match: /:/,
tag_as: "colon punctuation.separator.case"
),
includes: [:conditional_context]
)
cpp_grammar[:switch_conditional_parentheses] = Range.new(
tag_as: "meta.conditional.switch",
start_pattern: newPattern(
match: /\(/,
tag_as: 'punctuation.section.parens.begin.bracket.round.conditional.switch'
),
end_pattern: newPattern(
match: /\)/,
tag_as: 'punctuation.section.parens.end.bracket.round.conditional.switch'
),
includes: [ :conditional_context ]
)
cpp_grammar[:switch_statement] = blockFinderFor(
name: "switch",
tag_as: "meta.block.switch",
start_pattern: newPattern(
match: variableBounds[/switch/],
tag_as: "keyword.control.switch"
),
primary_includes: [
:switch_conditional_parentheses
],
head_includes: [
:switch_conditional_parentheses,
:$initial_context
],
body_includes: [
:default_statement,
:case_statement,
:$initial_context,
],
needs_semicolon: false,
)
#
# C++ Attributes
#
cpp_grammar[:attributes] = Range.new(
tag_as: "support.other.attribute",
start_pattern: newPattern(
match: @cpp_tokens.that(:isAttributeStart),
tag_as: "punctuation.section.attribute.begin",
),
end_pattern: newPattern(
match: @cpp_tokens.that(:isAttributeEnd),
tag_as: "punctuation.section.attribute.end",
),
includes: [
# allow nested attributes
:attributes,
Range.new(
start_pattern: newPattern(/\(/),
end_pattern: newPattern(/\)/),
includes: [
:attributes,
:string_context_c,
],
),
newPattern(match: /using/, tag_as: "keyword.other.using.directive")
.then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.namespace",
),
newPattern(match: /,/, tag_as: "punctuation.separator.attribute"),
newPattern(match: /:/, tag_as: "punctuation.accessor.attribute"),
newPattern(
match: variable_name.lookAheadFor(/::/),
tag_as: "entity.name.type.namespace"
),
newPattern(match: variable_name, tag_as: "entity.other.attribute.$match"),
],
)
inline_attribute = newPattern(
should_fully_match:["[[nodiscard]]","__attribute((packed))","__declspec(fastcall)"],
should_partial_match: ["struct [[deprecated]] st"],
# match one of the three attribute styles
match: newPattern(
@cpp_tokens.that(:isAttributeStart, :isCppAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isCppAttribute))
).or(
@cpp_tokens.that(:isAttributeStart, :isGccAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isGccAttribute))
).or(
@cpp_tokens.that(:isAttributeStart, :isMsAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isMsAttribute))
).lookAheadToAvoid(/\)/),
includes: [
:attributes,
],
)
#
# Templates
#
characters_in_template_call = /[\s<>:,\w]/
cpp_grammar[:user_defined_template_type] = newPattern(
match: variable_name,
tag_as: 'storage.type.user-defined'
)
cpp_grammar[:comma_in_template_argument] = newPattern(
match: /,/,
tag_as: "comma punctuation.separator.template.argument"
)
# note: template_call should indeally be a Range(), the reason its not is
# because it's embedded inside of other patterns
cpp_grammar[:template_call_innards] = template_call = newPattern(
tag_as: 'meta.template.call',
match: /</.zeroOrMoreOf(characters_in_template_call).then(/>/).maybe(@spaces),
includes: [:template_call_context]
)
cpp_grammar[:template_call_range] = Range.new(
tag_as: 'meta.template.call',
start_pattern: newPattern(
match: /</,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.call"
),
includes: [:template_call_context]
)
template_start = lookBehindToAvoid(@standard_character).then(
match: /template/,
tag_as: "storage.type.template"
).maybe(@spaces).then(
match: /</,
tag_as: "punctuation.section.angle-brackets.start.template.definition"
)
# a template definition that is by itself on a line (this is ideal)
cpp_grammar[:template_isolated_definition] = newPattern(
match: template_start.then(
match: zeroOrMoreOf(/./),
tag_as: "meta.template.definition",
includes: [:template_definition_context],
).then(
match: />/.maybe(@spaces).then(/$/),
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
)
cpp_grammar[:template_definition] = Range.new(
tag_as: 'meta.template.definition',
start_pattern: template_start,
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
includes: [
# a template call inside of a non-isolated template definition
# however this is rolling the dice: because if there is a less-than operator in a defaulted argument, then this pattern will screw everything up
# a better solution would be nice, but its going to be difficult/impossible
Range.new(
start_pattern: newPattern(
match: lookBehindFor(/\w/).maybe(@spaces).then(/</),
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
includes: [:template_call_context]
),
:template_definition_context,
]
)
cpp_grammar[:template_argument_defaulted] = newPattern(
match: lookBehindFor(/<|,/).maybe(@spaces).then(
match: zeroOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
).maybe(@spaces).then(
match: /[=]/,
tag_as: "keyword.operator.assignment"
)
)
cpp_grammar[:template_definition_argument] = newPattern(
match: maybe(
@spaces
# case 1: only one word
).then(
match: variable_name_without_bounds,
tag_as: "storage.type.template.argument.$match",
# case 2: normal situation (ex: "typename T")
).or(
newPattern(
match: oneOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template.argument.$match",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template",
)
# case 3: ellipses (ex: "typename... Args")
).or(
newPattern(
match: variable_name_without_bounds,
tag_as: "storage.type.template",
).maybe(@spaces).then(
match: /\.\.\./,
tag_as: "ellipses punctuation.vararg-ellipses.template.definition",
).maybe(@spaces).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
)
).maybe(@spaces).then(
newPattern(
match: /,/,
tag_as: "comma punctuation.separator.template.argument",
).or(
lookAheadFor(/>|$/)
)
)
)
#
# Scope resolution
#
one_scope_resolution = variable_name_without_bounds.maybe(@spaces).maybe(template_call.without_numbered_capture_groups).then(/::/)
preceding_scopes = newPattern(
match: zeroOrMoreOf(one_scope_resolution).maybe(@spaces),
includes: [ :scope_resolution ]
)
cpp_grammar[:scope_resolution] = scope_resolution = newPattern(
tag_as: "meta.scope-resolution",
match: preceding_scopes.then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.namespace.scope-resolution"
).maybe(@spaces).maybe(
template_call
).then(
match: /::/,
tag_as: "punctuation.separator.namespace.access"
)
)
#
# Functions
#
functionTemplate = ->(repository_name:nil, match_name: nil, tag_name_as: nil, tag_content_as: nil, tag_parenthese_as: nil) do
new_range = Range.new(
tag_content_as: "meta.#{tag_content_as}",
start_pattern: newPattern(
match: match_name,
tag_as: tag_name_as,
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.#{tag_parenthese_as}"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.#{tag_parenthese_as}"
),
includes: [
:evaluation_context
]
)
if repository_name
cpp_grammar[repository_name] = new_range
end
return new_range
end
cant_be_a_function_name = @cpp_tokens.that(:isWord, not(:isPreprocessorDirective), not(:isValidFunctionName))
avoid_invalid_function_names = lookBehindToAvoid(@standard_character).lookAheadToAvoid(maybe(@spaces).then(cant_be_a_function_name).maybe(@spaces).then(/\(/))
look_ahead_for_function_name = lookAheadFor(variable_name_without_bounds.maybe(@spaces).maybe(inline_attribute).maybe(@spaces).then(/\(/))
cpp_grammar[:struct_declare] = struct_declare = newPattern(
should_partial_match: [ "struct crypto_aead *tfm = crypto_aead_reqtfm(req);", "struct aegis_block blocks[AEGIS128L_STATE_BLOCKS];" ],
match: newPattern(
match: /struct/,
tag_as: "storage.type.struct.declare",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.struct",
).then(@spaces).zeroOrMoreOf(
match: /\*|&/.maybe(@spaces),
includes: [
newPattern(
match: /\*/,
tag_as: "keyword.operator.dereference"
),
newPattern(
match: /&/,
tag_as: "keyword.operator.reference"
),
]
).then(
match: variable_name,
tag_as: "variable.other.object.declare",
)
)
cpp_grammar[:parameter_struct] = newPattern(
should_partial_match: [ "struct skcipher_walk *walk," ],
match: newPattern(
match: /struct/,
tag_as: "storage.type.struct.parameter",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.struct.parameter",
).then(@spaces).zeroOrMoreOf(
match: /\*|&/.maybe(@spaces),
includes: [
newPattern(
match: /\*/,
tag_as: "keyword.operator.dereference"
),
newPattern(
match: /&/,
tag_as: "keyword.operator.reference"
),
]
# this is a maybe because its possible to have a type declare without an actual parameter
).maybe(
match: variable_name,
tag_as: "variable.other.object.declare",
).maybe(@spaces).maybe(
/\[/.maybe(@spaces).then(/\]/).maybe(@spaces),
).lookAheadFor(/,|\)|\n/)
)
cpp_grammar[:function_definition] = Range.new(
tag_as: "meta.function.definition.parameters",
start_pattern: avoid_invalid_function_names.then(look_ahead_for_function_name),
end_pattern: lookBehindFor(/\)/),
includes: [ :parameter_struct, :function_context_c ]
)
# a full match example of function call would be: aNameSpace::subClass<TemplateArg>FunctionName<5>(
cpp_grammar[:function_call] = Range.new(
start_pattern: avoid_invalid_function_names.then(
preceding_scopes
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.call"
).maybe(@spaces).maybe(
template_call
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round"
),
includes: [ :function_call_context_c ]
)
#
# Operators
#
cpp_grammar[:operators] = []
normal_word_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isOperator, :isWord, not(:isTypeCastingOperator), not(:isControlFlow), not(:isFunctionLike)) ],
tag_as: "keyword.operator.wordlike alias keyword.operator.$match",
)
array_of_function_like_operators = @cpp_tokens.tokens.select { |each| each[:isFunctionLike] && !each[:isSpecifier] }
for each in array_of_function_like_operators
name = each[:name]
cpp_grammar[:operators].push(functionTemplate[
repository_name: "#{name}_operator",
match_name: variableBounds[/#{name}/],
tag_name_as: "keyword.operator.functionlike keyword.operator.#{name}",
tag_content_as: "arguments.operator.#{name}",
tag_parenthese_as: "operator.#{name}"
])
end
cpp_grammar[:operators] += [
functionTemplate[
repository_name: "decltype_specifier",
match_name: variableBounds[/decltype/],
tag_name_as: "keyword.operator.functionlike keyword.other.decltype storage.type.decltype",
tag_content_as: "arguments.decltype",
tag_parenthese_as: "decltype"
],
type_casting_operators,
:method_access,
:member_access,
normal_word_operators,
:vararg_ellipses,
{
match: "--",
name: "keyword.operator.decrement"
},
{
match: "\\+\\+",
name: "keyword.operator.increment"
},
{
match: "%=|\\+=|-=|\\*=|(?<!\\()/=",
name: "keyword.operator.assignment.compound"
},
{
match: "&=|\\^=|<<=|>>=|\\|=",
name: "keyword.operator.assignment.compound.bitwise"
},
{
match: "<<|>>",
name: "keyword.operator.bitwise.shift"
},
{
match: "!=|<=|>=|==|<|>",
name: "keyword.operator.comparison"
},
{
match: "&&|!|\\|\\|",
name: "keyword.operator.logical"
},
{
match: "&|\\||\\^|~",
name: "keyword.operator"
},
{
match: "=",
name: "keyword.operator.assignment"
},
{
match: "%|\\*|/|-|\\+",
name: "keyword.operator"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
applyEndPatternLast: true,
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#function_call_c"
},
{
include: "$initial_context"
}
]
}
]
#
# Probably a parameter
#
array_brackets = /\[\]/.maybe(@spaces)
comma_or_closing_paraenthese = /,/.or(/\)/)
stuff_after_a_parameter = maybe(@spaces).lookAheadFor(maybe(array_brackets).then(comma_or_closing_paraenthese))
cpp_grammar[:probably_a_parameter] = newPattern(
match: newPattern(
match: variable_name_without_bounds.maybe(@spaces).lookAheadFor("="),
tag_as: "variable.parameter.defaulted"
).or(
match: look_behind_for_type.then(variable_name_without_bounds).then(stuff_after_a_parameter),
tag_as: "variable.parameter"
)
)
#
# Operator overload
#
# symbols can have spaces
operator_symbols = maybe(@spaces).then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isSymbol))
# words must have spaces, the variable_name_without_bounds is for implicit overloads
operator_wordish = @spaces.then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isWordish).or(zeroOrMoreOf(one_scope_resolution).then(variable_name_without_bounds).maybe(@spaces).maybe(/&/)))
after_operator_keyword = operator_symbols.or(operator_wordish)
cpp_grammar[:operator_overload] = operator_overload = Range.new(
tag_as: "meta.function.definition.parameters.operator-overload",
start_pattern: newPattern(
match: /operator/,
tag_as: "keyword.other.operator.overload",
).then(
match: after_operator_keyword,
tag_as: "entity.name.operator.overloadee",
includes: [:scope_resolution]
).maybe(@spaces).then(
match: /\(/,
tag_as: "punctuation.section.parameters.begin.bracket.round.operator-overload"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.parameters.end.bracket.round.operator-overload"
),
includes: [:probably_a_parameter, :function_context_c ]
)
#
# Access . .* -> ->*
#
dot_operator = /\.\*/.or(/\./)
arrow_operator = /->\*/.or(/->/)
dot_or_arrow_operator = /(?:\.\*|\.|->|->\*)/
member_operator = newPattern(
match: dot_operator,
tag_as: "punctuation.separator.dot-access"
).or(
match: arrow_operator,
tag_as: "punctuation.separator.pointer-access"
)
subsequent_object_with_operator = variable_name_without_bounds.maybe(@spaces).then(member_operator.without_numbered_capture_groups).maybe(@spaces)
# TODO: the member_access and method_access can probably be simplified considerably
# TODO: member_access and method_access might also need additional matching to handle scope resolutions
partial_member = the_this_keyword.or(
newPattern(
match: variable_name_without_bounds.or(lookBehindFor(/\]|\)/)).maybe(@spaces),
tag_as: "variable.other.object.access",
)
).then(
member_operator
)
member_context = [
mid_member = newPattern(
tag_as: "variable.other.object.property",
match: lookBehindFor(dot_or_arrow_operator).maybe(
@spaces
).then(
partial_member.without_numbered_capture_groups
)
),
partial_member,
:member_access,
:method_access,
]
member_start = partial_member.then(
match: zeroOrMoreOf(subsequent_object_with_operator),
includes: member_context
).maybe(@spaces)
# access to attribute
type_represenetations = @cpp_tokens.representationsThat(:isType)
lookahead_friedly_types_pattern = /#{type_represenetations.map { |each| each+"[^#{@standard_character}]" } .join('|')}/
cpp_grammar[:member_access] = member_access = newPattern(
match: member_start.then(
match: @word_boundary.lookAheadToAvoid(lookahead_friedly_types_pattern).then(variable_name_without_bounds).then(@word_boundary).lookAheadToAvoid(/\(/),
tag_as: "variable.other.property"
)
)
# access to method
cpp_grammar[:method_access] = method_access = Range.new(
tag_content_as: "meta.function-call.member",
start_pattern: member_start.then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.member"
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.function.member"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.function.member"
),
includes: [:function_call_context_c],
)
#
# Namespace
#
# see https://en.cppreference.com/w/cpp/language/namespace
cpp_grammar[:using_namespace] = Range.new(
tag_as: "meta.using-namespace",
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /using/,
tag_as: "keyword.other.using.directive",
).then(@spaces).then(
match: /namespace/,
tag_as: "keyword.other.namespace.directive storage.type.namespace.directive"
).then(@spaces).maybe(
preceding_scopes
).then(
match: variable_name,
tag_as: "entity.name.type.namespace"
).lookAheadFor(
/;|\n/
),
end_pattern: @semicolon,
)
# TODO: add support for namespace name = qualified-namespace ;
cpp_grammar[:namespace_block] = blockFinderFor(
name: "namespace",
tag_as: "meta.block.namespace",
needs_semicolon: false,
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /namespace/,
tag_as: "keyword.other.namespace.definition storage.type.namespace.definition"
).then(@spaces).maybe(inline_attribute).maybe(@spaces).then(
# Named namespace (with possible scope )
preceding_scopes
).maybe(@spaces).then(
newPattern(
match: variable_name,
tag_as: "entity.name.type.namespace",
# anonymous namespaces
).or(
lookAheadFor(/\{/)
)
),
)
#
# Preprocessor
#
# not sure if this pattern is actually accurate (it was the one provided by atom/c.tmLanguage)
preprocessor_name_no_bounds = /[a-zA-Z_$][\w$]*/
preprocessor_function_name = preprocessor_name_no_bounds.lookAheadFor(maybe(@spaces).then(/\(/))
cpp_grammar[:macro_argument] = newPattern(
match: /##/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
tag_as: "variable.other.macro.argument"
)
#
# Lambdas
#
array_of_invalid_function_names = @cpp_tokens.representationsThat(:canAppearBeforeLambdaCapture)
non_variable_name = /#{array_of_invalid_function_names.map { |each| '\W'+each+'|^'+each } .join('|')}/
cpp_grammar[:lambdas] = lambdas = Range.new(
start_pattern: newPattern(
should_fully_match: [ "[]", "[=]", "[&]", "[x,y,x]", "[x, y, &z, w = 1 + 1]", "[ a = blah[1324], b, c ]" ],
should_partial_match: [ "[]", "[=](", "[&]{", "[x,y,x]", "[x, y, &z, w = 1 + 1] (", "[ a = blah[1324], b, c ] {" ],
should_not_partial_match: [ "delete[]", "thing[]", "thing []", "thing []", "thing[0][0] = 0" ],
match: lookBehindFor(/[^\s]|^/).lookBehindToAvoid(/[\w\]\)\[]/).or(lookBehindFor(non_variable_name)).maybe(@spaces).then(
match: /\[/.lookAheadToAvoid(/\[/),
tag_as: "punctuation.definition.capture.begin.lambda",
).then(
match: /(?:.*\[.*?\].*?)*.*?/,
tag_as: "meta.lambda.capture",
# the zeroOrMoreOf() is for other []'s that are inside of the lambda capture
# this pattern is still imperfect: if someone had a string literal with ['s in it, it could fail
includes: [ :probably_a_parameter, :function_context_c ],
).then(
match: /\]/,
tag_as: "punctuation.definition.capture.end.lambda",
)
),
end_pattern: newPattern(
match: lookBehindFor(/}/),
),
includes: [
# check for parameters first
Range.new(
tag_as: 'meta.function.definition.parameters.lambda',
start_pattern: newPattern(
match: /\(/,
tag_as: "punctuation.definition.parameters.begin.lambda",
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.definition.parameters.end.lambda",
),
includes: [ :probably_a_parameter, :function_context_c ]
),
# specificers
newPattern(
match: variableBounds[ @cpp_tokens.that(:isLambdaSpecifier) ],
tag_as: "storage.modifier.lambda.$match"
),
# check for the -> syntax
newPattern(
match: /->/,
tag_as: "punctuation.definition.lambda.return-type"
).maybe(
match: /.+?/.lookAheadFor(/\{|$/),
tag_as: "storage.type.return-type.lambda"
),
# then find the body
Range.new(
tag_as: "meta.function.definition.body.lambda",
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.section.block.begin.bracket.curly.lambda",
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly.lambda",
),
includes: [ :$initial_context ]
),
]
)
#
# Support
#
# generally this section is for things that need a #include, (the support category)
# it will be for things such as cout, cin, vector, string, map, etc
cpp_grammar[:pthread_types] = pthread_types = newPattern(
tag_as: "support.type.posix-reserved.pthread",
match: variableBounds[ /pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t/ ],
)
cpp_grammar[:posix_reserved_types] = posix_reserved_types = newPattern(
match: variableBounds[ /[a-zA-Z_]/.zeroOrMoreOf(@standard_character).then(/_t/) ],
tag_as: "support.type.posix-reserved"
)
#
# Classes, structs, unions, enums
#
# see https://en.cppreference.com/w/cpp/language/enum
# this range matches both the case with brackets and the case without brackets
cpp_grammar[:enum_block] = blockFinderFor(
name: "enum",
tag_as: "meta.block.enum",
start_pattern: newPattern(
match: /enum/,
tag_as: "storage.type.enum"
).then(@spaces).maybe(
# see "Scoped enumerations" on https://en.cppreference.com/w/cpp/language/enum
newPattern(
match: /class|struct/,
tag_as: "storage.type.enum.enum-key.$match",
).then(@spaces.or(inline_attribute).or(lookAheadFor(/{/)))
).maybe(inline_attribute).maybe(@spaces).maybe(
match: variable_name,
tag_as: "entity.name.type.enum",
).maybe(
maybe(@spaces).then(
match: /:/,
tag_as: "colon punctuation.separator.type-specifier",
).maybe(@spaces).maybe(
scope_resolution
).maybe(@spaces).then(
match: variable_name,
tag_as: "storage.type.integral.$match",
)
),
head_includes: [ :$initial_context ]
)
# the following are basically the equivlent of:
# @cpp_tokens.that(:isAccessSpecifier).or(/,/).or(/:/)
# that ^ causes an error in the lookBehindFor() so it has to be manually spread
can_come_before_a_inherited_class = @cpp_tokens.representationsThat(:isAccessSpecifier) + [ ',', ':' ]
can_come_before_a_inherited_class_regex = /#{can_come_before_a_inherited_class.join('|')}/
cpp_grammar[:inhertance_context] = [
newPattern(
match: /,/,
tag_as: "comma punctuation.separator.delimiter.inhertance"
),
newPattern(
match: variableBounds[ @cpp_tokens.that(:isAccessSpecifier) ],
tag_as: "storage.type.modifier.access.$match",
),
lookBehindFor(can_come_before_a_inherited_class_regex).maybe(@spaces).lookAheadToAvoid(@cpp_tokens.that(:isAccessSpecifier)).then(
match: variable_name,
tag_as: "entity.name.type.inherited"
)
]
final_modifier = newPattern(
match: /final/,
tag_as: "storage.type.modifier.final",
)
generateClassOrStructBlockFinder = ->(name) do
return blockFinderFor(
tag_as: "meta.block.#{name}",
name: name,
start_pattern: newPattern(
should_fully_match: ["#{name} foo: bar", "#{name} foo: public baz"],
should_not_fully_match: ["#{name} foo {","#{name} foo{"],
should_partial_match: ["#{name} foo f;", "#{name} st s;"],
match: newPattern(
reference: "storage_type",
match: variableBounds[ /#{name}/ ],
tag_as: "storage.type.$match",
).then(
@spaces.or(
inline_attribute
).or(
lookAheadFor(/{/)
)
).maybe(inline_attribute).maybe(@spaces).maybe(
match: variable_name,
tag_as: "entity.name.type.$reference(storage_type)",
).maybe(
@spaces.then(final_modifier).maybe(@spaces)
).maybe(
#
# inheritance
#
maybe(@spaces).then(
match: /:/,
tag_as: "colon punctuation.separator.inhertance"
# the following may seem redundant (removing it shouldn't change anything)
# this is because the follow are matched by what is inside of this Range
# However its preferable to match things here, in the Start (using a pattern), over matching it inside of the range
# this is because the start pattern typically fails safely (is limited to 1 line), while typically Ranges fail dangerously (can match the whole document)
).zeroOrMoreOf(
match: maybe(@spaces).maybe(/,/).maybe(
@spaces
).maybe(
@cpp_tokens.that(:isAccessSpecifier)
).maybe(@spaces).oneOrMoreOf(
maybe(@spaces).maybe(/,/).maybe(
@spaces
).lookAheadToAvoid(
@cpp_tokens.that(:isAccessSpecifier)
).then(variable_name)
),
includes: [ :inhertance_context ]
)
),
),
head_includes: [
:preprocessor_context,
:inhertance_context,
:template_call_range,
:comments_context,
],
body_includes: [ :constructor_context, :$initial_context ],
)
end
cpp_grammar[:class_block] = generateClassOrStructBlockFinder["class"]
cpp_grammar[:struct_block] = generateClassOrStructBlockFinder["struct"]
cpp_grammar[:union_block] = generateClassOrStructBlockFinder["union"]
# the following is a legacy pattern, I'm not sure if it is still accurate
# I have no idea why it matches a double quote
cpp_grammar[:extern_block] = blockFinderFor(
name: 'extern',
tag_as: "meta.block.extern",
start_pattern: newPattern(
match: /\bextern/,
tag_as: "storage.type.extern"
).lookAheadFor(/\s*\"/),
head_includes: [ :$initial_context ],
secondary_includes: [ :$initial_context ]
)
#
# preprocessor directives
#
# TODO, change all blocks/paraentheses so that they end and the end of a macro
# TODO, find a good solution to dealing with if statments that cross in to/out of blocks
cpp_grammar[:hacky_fix_for_stray_directive] = hacky_fix_for_stray_directive = newPattern(
match: variableBounds[/#(?:endif|else|elif)/],
tag_as: "keyword.control.directive.$match"
)
#
# Misc Legacy
#
cpp_grammar[:square_brackets] = {
name: "meta.bracket.square.access",
begin: "([a-zA-Z_][a-zA-Z_0-9]*|(?<=[\\]\\)]))?(\\[)(?!\\])",
beginCaptures: {
"1" => {
name: "variable.other.object"
},
"2" => {
name: "punctuation.definition.begin.bracket.square"
}
},
end: "\\]",
endCaptures: {
"0" => {
name: "punctuation.definition.end.bracket.square"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
}
cpp_grammar[:empty_square_brackets] = {
name: "storage.modifier.array.bracket.square",
match: /#{lookBehindToAvoid(/delete/)}\\[\\s*\\]/
}
cpp_grammar[:assembly] = newPattern(
match: variableBounds[ /(asm|__asm__)/ ],
tag_as: "storage.type.$match"
)
cpp_grammar[:misc_storage_modifiers_1] = {
match: /\b(constexpr|export|mutable|typename|thread_local)\b/,
name: "storage.modifier"
}
cpp_grammar[:misc_storage_modifiers_2] = {
match: /\b(const|extern|register|restrict|static|volatile|inline)\b/,
name: "storage.modifier"
}
cpp_grammar[:destructor] = {
name: "meta.function.destructor",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.destructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.destructor"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.destructor"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:destructor_prototype] = {
name: "meta.function.destructor.prototype",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.definition.parameters.begin"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:meta_preprocessor_macro] = {
name: "meta.preprocessor.macro",
begin: "(?x)\n^\\s* ((\\#)\\s*define) \\s+\t# define\n((?<id>#{preprocessor_name_no_bounds}))\t # macro name\n(?:\n (\\()\n\t(\n\t \\s* \\g<id> \\s*\t\t # first argument\n\t ((,) \\s* \\g<id> \\s*)* # additional arguments\n\t (?:\\.\\.\\.)?\t\t\t# varargs ellipsis?\n\t)\n (\\))\n)?",
beginCaptures: {
"1" => {
name: "keyword.control.directive.define"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "entity.name.function.preprocessor"
},
"5" => {
name: "punctuation.definition.parameters.begin"
},
"6" => {
name: "variable.parameter.preprocessor"
},
"8" => {
name: "punctuation.separator.parameters"
},
"9" => {
name: "punctuation.definition.parameters.end"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#preprocessor_rule_define_line_context"
}
]
}
cpp_grammar[:meta_preprocessor_diagnostic] = {
name: "meta.preprocessor.diagnostic",
begin: "^\\s*((#)\\s*(error|warning))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.diagnostic.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?<!\\\\)(?=\\n)",
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "[^'\"]",
end: "(?<!\\\\)(?=\\s*\\n)",
name: "string.unquoted.single",
patterns: [
{
include: "#line_continuation_character"
},
{
include: "#comments_context"
}
]
}
]
}
cpp_grammar[:meta_preprocessor_include] = {
name: "meta.preprocessor.include",
begin: "^\\s*((#)\\s*(include(?:_next)?|import))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#line_continuation_character"
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double.include"
},
{
begin: "<",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: ">",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.other.lt-gt.include"
}
]
}
cpp_grammar[:meta_preprocessor_line] = {
name: "meta.preprocessor",
begin: "^\\s*((#)\\s*line)\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.line"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#string_context_c"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:meta_preprocessor_undef] = {
name: "meta.preprocessor",
begin: "^\\s*(?:((#)\\s*undef))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.undef"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:meta_preprocessor_pragma] = {
name: "meta.preprocessor.pragma",
begin: "^\\s*(?:((#)\\s*pragma))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.pragma"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#string_context_c"
},
{
match: "[a-zA-Z_$][\\w\\-$]*",
name: "entity.other.attribute-name.pragma.preprocessor"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:constructor_context] = [
{
begin: "(?x)\n(?:^\\s*) # beginning of line\n((?!while|for|do|if|else|switch|catch)[A-Za-z_][A-Za-z0-9_:]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.constructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.constructor"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.constructor"
}
},
name: "meta.function.constructor",
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function_context_c"
}
]
},
{
begin: "(?x)\n(:)\n(\n (?=\n \\s*[A-Za-z_][A-Za-z0-9_:]* # actual name\n \\s* (\\() # opening bracket\n )\n)",
beginCaptures: {
"1" => {
name: "punctuation.definition.initializer-list.parameters"
}
},
end: "(?=\\{)",
name: "meta.function.constructor.initializer-list",
patterns: [
{
include: "$initial_context"
}
]
}
]
cpp_grammar[:special_block_context] = [
:attributes,
:using_namespace,
:namespace_block,
:class_block,
:struct_block,
:union_block,
:enum_block,
:extern_block,
]
cpp_grammar[:string_context] = [
{
begin: "(u|u8|U|L)?\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
match: "\\\\u\\h{4}|\\\\U\\h{8}",
name: "constant.character.escape"
},
{
match: "\\\\['\"?\\\\abfnrtv]",
name: "constant.character.escape"
},
{
match: "\\\\[0-7]{1,3}",
name: "constant.character.escape"
},
{
match: "\\\\x\\h+",
name: "constant.character.escape"
},
{
include: "#string_escapes_context_c"
}
]
},
{
begin: "(u|u8|U|L)?R\"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\(",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
},
"3" => {
name: "invalid.illegal.delimiter-too-long"
}
},
end: "\\)\\2(\\3)\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
},
"1" => {
name: "invalid.illegal.delimiter-too-long"
}
},
name: "string.quoted.double.raw"
}
]
cpp_grammar[:block] = {
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#block_context"
}
]
}
cpp_grammar[:block_context] = [
:preprocessor_rule_enabled_block,
:preprocessor_rule_disabled_block,
:preprocessor_rule_conditional_block,
:method_access,
:member_access,
:function_call_c,
{
name: "meta.initialization",
begin: "(?x)\n(?:\n (?:\n\t(?=\\s)(?<!else|new|return)\n\t(?<=\\w) \\s+(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas) # or word + space before name\n )\n)\n(\n (?:[A-Za-z_][A-Za-z0-9_]*+ | :: )++ # actual name\n |\n (?:(?<=operator) (?:[-*&<>=+!]+ | \\(\\) | \\[\\]))\n)\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "variable.other"
},
"2" => {
name: "punctuation.section.parens.begin.bracket.round.initialization"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round.initialization"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#block_context"
}
]
},
:parentheses_block,
:$initial_context
]
cpp_grammar[:function_call_c] = {
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|constexpr|volatile|operator|(?:::)?new|(?:::)?delete)\\s*\\()\n(?=\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*#{maybe(template_call.without_numbered_capture_groups)}\\( # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)",
name: "meta.function-call",
patterns: [
{
include: "#function_call_context_c"
}
]
}
cpp_grammar[:comments_context] = {
patterns: [
{
captures: {
"1" => {
name: "meta.toc-list.banner.block"
}
},
match: "^/\\* =(\\s*.*?)\\s*= \\*/$\\n?",
name: "comment.block"
},
{
begin: "/\\*",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment.begin"
}
},
end: "\\*/",
endCaptures: {
"0" => {
name: "punctuation.definition.comment.end"
}
},
name: "comment.block"
},
{
captures: {
"1" => {
name: "meta.toc-list.banner.line"
}
},
match: "^// =(\\s*.*?)\\s*=\\s*$\\n?",
name: "comment.line.banner"
},
{
begin: "(^[ \\t]+)?(?=//)",
beginCaptures: {
"1" => {
name: "punctuation.whitespace.comment.leading"
}
},
end: "(?!\\G)",
patterns: [
{
begin: "//",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment"
}
},
end: "(?=\\n)",
name: "comment.line.double-slash",
patterns: [
{
include: "#line_continuation_character"
}
]
}
]
}
]
}
cpp_grammar[:disabled] = {
begin: "^\\s*#\\s*if(n?def)?\\b.*$",
end: "^\\s*#\\s*endif\\b",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
cpp_grammar[:line_continuation_character] = {
match: "(\\\\)\\n",
captures: {
"1" => {
name: "constant.character.escape.line-continuation"
}
}
}
cpp_grammar[:parentheses] = {
name: "meta.parens",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:parentheses_block] = {
name: "meta.parens.block",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#block_context"
},
{
match: lookBehindToAvoid(/:/).then(/:/).lookAheadToAvoid(/:/),
name: "colon punctuation.separator.range-based"
}
]
}
cpp_grammar[:pragma_mark] = {
captures: {
"1" => {
name: "meta.preprocessor.pragma"
},
"2" => {
name: "keyword.control.directive.pragma.pragma-mark"
},
"3" => {
name: "punctuation.definition.directive"
},
"4" => {
name: "entity.name.tag.pragma-mark"
}
},
match: "^\\s*(((#)\\s*pragma\\s+mark)\\s+(.*))",
name: "meta.section"
}
cpp_grammar[:string_context_c] = [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: lookBehindToAvoid(/[\da-fA-F]/).then(/'/),
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
}
]
cpp_grammar[:string_escapes_context_c] = [
{
match: "(?x)\\\\ (\n\\\\\t\t\t |\n[abefnprtv'\"?] |\n[0-3]\\d{,2}\t |\n[4-7]\\d?\t\t|\nx[a-fA-F0-9]{,2} |\nu[a-fA-F0-9]{,4} |\nU[a-fA-F0-9]{,8} )",
name: "constant.character.escape"
},
{
match: "\\\\.",
name: "invalid.illegal.unknown-escape"
},
{
match: "(?x) %\n(\\d+\\$)?\t\t\t\t\t\t # field (argument #)\n[#0\\- +']*\t\t\t\t\t\t # flags\n[,;:_]?\t\t\t\t\t\t\t # separator character (AltiVec)\n((-?\\d+)|\\*(-?\\d+\\$)?)?\t\t # minimum field width\n(\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)?\t# precision\n(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier\n[diouxXDOUeEfFgGaACcSspn%]\t\t # conversion type",
name: "constant.other.placeholder"
},
# I don't think these are actual escapes, and they incorrectly mark valid strings
# It might be related to printf and format from C (which is low priority for C++)
# {
# match: "(%)(?!\"\\s*(PRI|SCN))",
# captures: {
# "1" => {
# name: "constant.other.placeholder"
# }
# }
# }
]
cpp_grammar[:vararg_ellipses] = {
match: "(?<!\\.)\\.\\.\\.(?!\\.)",
name: "punctuation.vararg-ellipses"
}
cpp_grammar[:preprocessor_rule_conditional] = {
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#preprocessor_rule_enabled_elif"
},
{
include: "#preprocessor_rule_enabled_else"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "$initial_context"
}
]
}
cpp_grammar[:preprocessor_rule_conditional_block] = {
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#preprocessor_rule_enabled_elif_block"
},
{
include: "#preprocessor_rule_enabled_else_block"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#block_context"
}
]
}
cpp_grammar[:preprocessor_rule_conditional_line_context] = [
{
match: "(?:\\bdefined\\b\\s*$)|(?:\\bdefined\\b(?=\\s*\\(*\\s*(?:(?!defined\\b)[a-zA-Z_$][\\w$]*\\b)\\s*\\)*\\s*(?:\\n|//|/\\*|\\?|\\:|&&|\\|\\||\\\\\\s*\\n)))",
name: "keyword.control.directive.conditional"
},
{
match: "\\bdefined\\b",
name: "invalid.illegal.macro-name"
},
:comments_context,
:string_context_c,
:number_literal,
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
:operators,
:language_constants,
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
:line_continuation_character,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)|(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
}
]
cpp_grammar[:preprocessor_rule_disabled] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
include: "#preprocessor_rule_enabled_elif"
},
{
include: "#preprocessor_rule_enabled_else"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "$initial_context"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_disabled_block] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
include: "#preprocessor_rule_enabled_elif_block"
},
{
include: "#preprocessor_rule_enabled_else_block"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#block_context"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_disabled_elif] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "constant.numeric.preprocessor"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "$initial_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_block] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "#block_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_elif] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
include: "$initial_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_elif_block] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
include: "#block_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_else] = {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:preprocessor_rule_enabled_else_block] = {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "#block_context"
}
]
}
cpp_grammar[:preprocessor_rule_define_line_context] = [
:vararg_ellipses,
{
match: /##?/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
name: "variable.other.macro.argument"
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#preprocessor_rule_define_line_blocks_context"
}
]
},
{
match: "\\(",
name: "punctuation.section.parens.begin.bracket.round"
},
{
match: "\\)",
name: "punctuation.section.parens.end.bracket.round"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\\s*\\()\n(?=\n (?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*\\( # actual name\n |\n (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)|(?<!\\\\)(?=\\s*\\n)",
name: "meta.function",
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
:method_access,
:member_access,
:$initial_context
]
cpp_grammar[:preprocessor_rule_define_line_blocks_context] = [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_blocks_context"
},
{
include: "#preprocessor_rule_define_line_context"
}
]
},
{
include: "#preprocessor_rule_define_line_context"
}
]
cpp_grammar[:preprocessor_rule_define_line_functions_context] = [
:comments_context,
:storage_types,
:vararg_ellipses,
:method_access,
:member_access,
:operators,
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.arguments.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
:preprocessor_rule_define_line_context
]
cpp_grammar[:function_context_c] = [
:attributes,
:comments_context,
:storage_types,
:operators,
:vararg_ellipses,
{
name: "meta.function.definition.parameters",
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.parameters.begin.bracket.round"
},
},
end: /\)|:/,
endCaptures: {
"0" => {
name: "punctuation.section.parameters.end.bracket.round"
}
},
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function_context_c"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function_context_c"
}
]
},
:$initial_context
]
cpp_grammar[:function_call_context_c] = [
:attributes,
:comments_context,
:storage_types,
:method_access,
:member_access,
:operators,
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:new)\\s*(#{maybe(template_call.without_numbered_capture_groups)}) # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "keyword.operator.wordlike memory keyword.operator.new"
},
"2" => {
patterns: [
{
include: "#template_call_innards"
}
]
},
"3" => {
name: "punctuation.section.arguments.begin.bracket.round"
},
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
:function_call,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
:block_context
]
Dir.chdir __dir__
# Save
@syntax_location = "../syntaxes/cpp.tmLanguage"
cpp_grammar.saveAsYamlTo(@syntax_location)
cpp_grammar.saveAsJsonTo(@syntax_location)
cpp_grammar.saveTagsTo("tags.txt")
# TODO, upgrade the code so this is not necessary
# for exporting to C
@cpp_grammar = cpp_grammar
fix for array of pointers
require_relative '../textmate_tools.rb'
require_relative './tokens.rb'
require_relative '../shared/numeric.rb'
# todo
# fix initializer list "functions" e.g. `int a{5};`
# fix the ... inside of macros
# have all patterns with keywords be dynamically generated
cpp_grammar = Grammar.new(
name:"C++",
scope_name: "source.cpp",
version: "https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/syntaxes/cpp.tmLanguage.json",
information_for_contributors: [
"This code was auto generated by a much-more-readble ruby file: https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
"This file essentially an updated/improved fork of the atom syntax https://github.com/atom/language-c/blob/master/grammars/c%2B%2B.cson",
],
)
#
# Utils
#
cpp_grammar[:semicolon] = @semicolon = newPattern(
match: /;/,
tag_as: "punctuation.terminator.statement",
)
cpp_grammar[:comma] = newPattern(
match: /,/,
tag_as: "comma punctuation.separator.delimiter"
)
def blockFinderFor( name:"", tag_as:"", start_pattern:nil, needs_semicolon: true, primary_includes: [], head_includes:[], body_includes: [ :$initial_context ], tail_includes: [ :$initial_context ], secondary_includes:[])
lookahead_endings = /[;>\[\]=]/
if needs_semicolon
end_pattern = newPattern(
match: newPattern(
lookBehindFor(/}/).maybe(@spaces).then(@semicolon)
).or(
@semicolon
).or(
lookAheadFor(lookahead_endings)
)
)
else
end_pattern = lookBehindFor(/\}/).or(lookAheadFor(lookahead_endings))
end
return Range.new(
tag_as: tag_as,
start_pattern: newPattern(
match: start_pattern,
tag_as: "meta.head."+name,
),
end_pattern: end_pattern,
includes: [
*primary_includes,
# Head
Range.new(
tag_as: "meta.head."+name,
start_pattern: /\G| /,
end_pattern: newPattern(
match: /\{/.or(lookAheadFor(/;/)),
tag_as: "punctuation.section.block.begin.bracket.curly."+name
),
includes: head_includes
),
# Body
Range.new(
tag_as: "meta.body."+name, # body is everything in the {}'s
start_pattern: lookBehindFor(/\{/),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly."+name
),
includes: body_includes
),
# Tail
Range.new(
tag_as: "meta.tail."+name,
start_pattern: lookBehindFor(/}/).then(/[\s\n]*/),
end_pattern: newPattern(/[\s\n]*/).lookAheadFor(/;/),
includes: tail_includes
),
*secondary_includes
]
)
end
#
#
# Contexts
#
#
cpp_grammar[:$initial_context] = [
:parameter_struct, # TODO this is here because it needs to activate inside of function-pointer parameters. Once function-pointer syntax is implemented, remove it from here
:struct_declare,
:special_block_context,
:macro_argument,
:string_context,
:functional_specifiers_pre_parameters,
:qualifiers_and_specifiers_post_parameters,
:storage_specifiers,
:access_control_keywords,
:exception_keywords,
:other_keywords,
:memory_operators,
:the_this_keyword,
:language_constants,
:template_isolated_definition,
:template_definition,
:scope_resolution,
:misc_storage_modifiers_1,
:destructor,
:destructor_prototype,
:lambdas,
:preprocessor_context,
:comments_context,
:switch_statement,
:control_flow_keywords,
:storage_types,
:assembly,
:misc_storage_modifiers_2,
:operator_overload,
:number_literal,
:string_context_c,
:meta_preprocessor_macro,
:meta_preprocessor_diagnostic,
:meta_preprocessor_include,
:pragma_mark,
:meta_preprocessor_line,
:meta_preprocessor_undef,
:meta_preprocessor_pragma,
:operators,
:block,
:parentheses,
:function_definition,
:line_continuation_character,
:square_brackets,
:empty_square_brackets,
:semicolon,
:comma,
]
cpp_grammar[:preprocessor_context] = [
:preprocessor_rule_enabled,
:preprocessor_rule_disabled,
:preprocessor_rule_conditional,
:hacky_fix_for_stray_directive,
]
cpp_grammar[:storage_types] = [
:primitive_types,
:non_primitive_types,
:pthread_types,
:posix_reserved_types,
]
# eventually this context will be more exclusive (can't have class definitons inside of an evaluation)
# but for now it just includes everything
cpp_grammar[:evaluation_context] = [
:$initial_context
# function call
# number literal
# lambdas
]
# eventually this context will be more exclusive (can't have class definitons inside of an if statement)
# but for now it just includes everything
cpp_grammar[:conditional_context] = [
:$initial_context
]
cpp_grammar[:template_definition_context] = [
:scope_resolution,
:template_definition_argument,
:template_argument_defaulted,
:template_call_innards,
:evaluation_context
]
cpp_grammar[:template_call_context] = [
:storage_types,
:language_constants,
:scope_resolution,
:user_defined_template_type,
:operators,
:number_literal,
:string_context,
:comma_in_template_argument
]
#
#
# Numbers
#
#
#
# Number Literal
#
cpp_grammar[:number_literal] = numeric_constant(allow_user_defined_literals: true)
#
# Variable
#
universal_character = /\\u[0-9a-fA-F]{4}/.or(/\\U000[0-9a-fA-F]/)
first_character = /[a-zA-Z_]/.or(universal_character)
subsequent_character = /[a-zA-Z0-9_]/.or(universal_character)
identifier = first_character.then(zeroOrMoreOf(subsequent_character))
# todo: make a better name for this function
variableBounds = ->(regex_pattern) do
lookBehindToAvoid(@standard_character).then(regex_pattern).lookAheadToAvoid(@standard_character)
end
variable_name_without_bounds = identifier
# word bounds are inefficient, but they are accurate
variable_name = variableBounds[variable_name_without_bounds]
#
# Constants
#
cpp_grammar[:language_constants] = newPattern(
match: variableBounds[@cpp_tokens.that(:isLiteral)],
tag_as: "constant.language.$match"
)
#
# Built-In Types
#
look_behind_for_type = lookBehindFor(/\w |\*\/|[&*>\]\)]|\.\.\./).maybe(@spaces)
cpp_grammar[:primitive_types] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isPrimitive) ],
tag_as: "storage.type.primitive"
)
cpp_grammar[:non_primitive_types] = newPattern(
match: variableBounds[@cpp_tokens.that(not(:isPrimitive), :isType)],
tag_as: "storage.type"
)
#
# Keywords and Keyword-ish things
#
cpp_grammar[:functional_specifiers_pre_parameters] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isFunctionSpecifier) ],
tag_as: "storage.modifier.specificer.functional.pre-parameters.$match"
)
cpp_grammar[:qualifiers_and_specifiers_post_parameters] = newPattern(
match: variableBounds[ @cpp_tokens.that(:canAppearAfterParametersBeforeBody) ].lookAheadFor(/\s*/.then(/\{/.or(/;/).or(/[\n\r]/))),
tag_as: "storage.modifier.specifier.functional.post-parameters.$match"
)
cpp_grammar[:storage_specifiers] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isStorageSpecifier) ],
tag_as: "storage.modifier.specifier.$match"
)
cpp_grammar[:access_control_keywords] = newPattern(
match: lookBehindToAvoid(@standard_character).then(@cpp_tokens.that(:isAccessSpecifier)).maybe(@spaces).then(/:/),
tag_as: "storage.type.modifier.access.control.$match"
)
cpp_grammar[:exception_keywords] = newPattern(
match: variableBounds[ @cpp_tokens.that(:isExceptionRelated) ],
tag_as: "keyword.control.exception.$match"
)
cpp_grammar[:other_keywords] = newPattern(
match: variableBounds[ /(using|typedef)/ ],
tag_as: "keyword.other.$match"
)
cpp_grammar[:the_this_keyword] = the_this_keyword = newPattern(
match: variableBounds[ /this/ ],
tag_as: "variable.language.this"
)
# TODO: enhance casting operators to include <>'s
type_casting_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isTypeCastingOperator) ],
tag_as: "keyword.operator.wordlike keyword.operator.cast.$match"
)
cpp_grammar[:memory_operators] = newPattern(
tag_as: "keyword.operator.wordlike memory",
match: lookBehindToAvoid(
@standard_character
).then(
newPattern(
newPattern(
match: /delete/,
tag_as: "keyword.operator.delete.array"
).maybe(@spaces).then(
match: /\[\]/,
tag_as: "keyword.operator.delete.array.bracket"
)
).or(
match: /delete/,
tag_as: "keyword.operator.delete"
).or(
match: /new/,
tag_as: "keyword.operator.new"
)
).lookAheadToAvoid(@standard_character)
)
cpp_grammar[:control_flow_keywords] = control_flow_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isControlFlow) ],
tag_as: "keyword.control.$match"
)
#
# Control flow
#
cpp_grammar[:default_statement] = Range.new(
tag_as: "meta.conditional.case",
start_pattern: newPattern(
match: variableBounds[ /default/ ],
tag_as: "keyword.control.default"
),
end_pattern: newPattern(
match: /:/,
tag_as: "colon punctuation.separator.case.default"
),
includes: [:conditional_context]
)
cpp_grammar[:case_statement] = Range.new(
tag_as: "meta.conditional.case",
start_pattern: newPattern(
match: variableBounds[ /case/ ],
tag_as: "keyword.control.case"
),
end_pattern: newPattern(
match: /:/,
tag_as: "colon punctuation.separator.case"
),
includes: [:conditional_context]
)
cpp_grammar[:switch_conditional_parentheses] = Range.new(
tag_as: "meta.conditional.switch",
start_pattern: newPattern(
match: /\(/,
tag_as: 'punctuation.section.parens.begin.bracket.round.conditional.switch'
),
end_pattern: newPattern(
match: /\)/,
tag_as: 'punctuation.section.parens.end.bracket.round.conditional.switch'
),
includes: [ :conditional_context ]
)
cpp_grammar[:switch_statement] = blockFinderFor(
name: "switch",
tag_as: "meta.block.switch",
start_pattern: newPattern(
match: variableBounds[/switch/],
tag_as: "keyword.control.switch"
),
primary_includes: [
:switch_conditional_parentheses
],
head_includes: [
:switch_conditional_parentheses,
:$initial_context
],
body_includes: [
:default_statement,
:case_statement,
:$initial_context,
],
needs_semicolon: false,
)
#
# C++ Attributes
#
cpp_grammar[:attributes] = Range.new(
tag_as: "support.other.attribute",
start_pattern: newPattern(
match: @cpp_tokens.that(:isAttributeStart),
tag_as: "punctuation.section.attribute.begin",
),
end_pattern: newPattern(
match: @cpp_tokens.that(:isAttributeEnd),
tag_as: "punctuation.section.attribute.end",
),
includes: [
# allow nested attributes
:attributes,
Range.new(
start_pattern: newPattern(/\(/),
end_pattern: newPattern(/\)/),
includes: [
:attributes,
:string_context_c,
],
),
newPattern(match: /using/, tag_as: "keyword.other.using.directive")
.then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.namespace",
),
newPattern(match: /,/, tag_as: "punctuation.separator.attribute"),
newPattern(match: /:/, tag_as: "punctuation.accessor.attribute"),
newPattern(
match: variable_name.lookAheadFor(/::/),
tag_as: "entity.name.type.namespace"
),
newPattern(match: variable_name, tag_as: "entity.other.attribute.$match"),
],
)
inline_attribute = newPattern(
should_fully_match:["[[nodiscard]]","__attribute((packed))","__declspec(fastcall)"],
should_partial_match: ["struct [[deprecated]] st"],
# match one of the three attribute styles
match: newPattern(
@cpp_tokens.that(:isAttributeStart, :isCppAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isCppAttribute))
).or(
@cpp_tokens.that(:isAttributeStart, :isGccAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isGccAttribute))
).or(
@cpp_tokens.that(:isAttributeStart, :isMsAttribute).then(/.*?/).then(@cpp_tokens.that(:isAttributeEnd, :isMsAttribute))
).lookAheadToAvoid(/\)/),
includes: [
:attributes,
],
)
#
# Templates
#
characters_in_template_call = /[\s<>:,\w]/
cpp_grammar[:user_defined_template_type] = newPattern(
match: variable_name,
tag_as: 'storage.type.user-defined'
)
cpp_grammar[:comma_in_template_argument] = newPattern(
match: /,/,
tag_as: "comma punctuation.separator.template.argument"
)
# note: template_call should indeally be a Range(), the reason its not is
# because it's embedded inside of other patterns
cpp_grammar[:template_call_innards] = template_call = newPattern(
tag_as: 'meta.template.call',
match: /</.zeroOrMoreOf(characters_in_template_call).then(/>/).maybe(@spaces),
includes: [:template_call_context]
)
cpp_grammar[:template_call_range] = Range.new(
tag_as: 'meta.template.call',
start_pattern: newPattern(
match: /</,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.call"
),
includes: [:template_call_context]
)
template_start = lookBehindToAvoid(@standard_character).then(
match: /template/,
tag_as: "storage.type.template"
).maybe(@spaces).then(
match: /</,
tag_as: "punctuation.section.angle-brackets.start.template.definition"
)
# a template definition that is by itself on a line (this is ideal)
cpp_grammar[:template_isolated_definition] = newPattern(
match: template_start.then(
match: zeroOrMoreOf(/./),
tag_as: "meta.template.definition",
includes: [:template_definition_context],
).then(
match: />/.maybe(@spaces).then(/$/),
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
)
cpp_grammar[:template_definition] = Range.new(
tag_as: 'meta.template.definition',
start_pattern: template_start,
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
includes: [
# a template call inside of a non-isolated template definition
# however this is rolling the dice: because if there is a less-than operator in a defaulted argument, then this pattern will screw everything up
# a better solution would be nice, but its going to be difficult/impossible
Range.new(
start_pattern: newPattern(
match: lookBehindFor(/\w/).maybe(@spaces).then(/</),
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
includes: [:template_call_context]
),
:template_definition_context,
]
)
cpp_grammar[:template_argument_defaulted] = newPattern(
match: lookBehindFor(/<|,/).maybe(@spaces).then(
match: zeroOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
).maybe(@spaces).then(
match: /[=]/,
tag_as: "keyword.operator.assignment"
)
)
cpp_grammar[:template_definition_argument] = newPattern(
match: maybe(
@spaces
# case 1: only one word
).then(
match: variable_name_without_bounds,
tag_as: "storage.type.template.argument.$match",
# case 2: normal situation (ex: "typename T")
).or(
newPattern(
match: oneOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template.argument.$match",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template",
)
# case 3: ellipses (ex: "typename... Args")
).or(
newPattern(
match: variable_name_without_bounds,
tag_as: "storage.type.template",
).maybe(@spaces).then(
match: /\.\.\./,
tag_as: "ellipses punctuation.vararg-ellipses.template.definition",
).maybe(@spaces).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
)
).maybe(@spaces).then(
newPattern(
match: /,/,
tag_as: "comma punctuation.separator.template.argument",
).or(
lookAheadFor(/>|$/)
)
)
)
#
# Scope resolution
#
one_scope_resolution = variable_name_without_bounds.maybe(@spaces).maybe(template_call.without_numbered_capture_groups).then(/::/)
preceding_scopes = newPattern(
match: zeroOrMoreOf(one_scope_resolution).maybe(@spaces),
includes: [ :scope_resolution ]
)
cpp_grammar[:scope_resolution] = scope_resolution = newPattern(
tag_as: "meta.scope-resolution",
match: preceding_scopes.then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.namespace.scope-resolution"
).maybe(@spaces).maybe(
template_call
).then(
match: /::/,
tag_as: "punctuation.separator.namespace.access"
)
)
#
# Functions
#
functionTemplate = ->(repository_name:nil, match_name: nil, tag_name_as: nil, tag_content_as: nil, tag_parenthese_as: nil) do
new_range = Range.new(
tag_content_as: "meta.#{tag_content_as}",
start_pattern: newPattern(
match: match_name,
tag_as: tag_name_as,
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.#{tag_parenthese_as}"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.#{tag_parenthese_as}"
),
includes: [
:evaluation_context
]
)
if repository_name
cpp_grammar[repository_name] = new_range
end
return new_range
end
cant_be_a_function_name = @cpp_tokens.that(:isWord, not(:isPreprocessorDirective), not(:isValidFunctionName))
avoid_invalid_function_names = lookBehindToAvoid(@standard_character).lookAheadToAvoid(maybe(@spaces).then(cant_be_a_function_name).maybe(@spaces).then(/\(/))
look_ahead_for_function_name = lookAheadFor(variable_name_without_bounds.maybe(@spaces).maybe(inline_attribute).maybe(@spaces).then(/\(/))
cpp_grammar[:struct_declare] = struct_declare = newPattern(
should_partial_match: [ "struct crypto_aead *tfm = crypto_aead_reqtfm(req);", "struct aegis_block blocks[AEGIS128L_STATE_BLOCKS];" ],
match: newPattern(
match: /struct/,
tag_as: "storage.type.struct.declare",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.struct",
).then(@spaces).zeroOrMoreOf(
match: /\*|&/.maybe(@spaces),
includes: [
newPattern(
match: /\*/,
tag_as: "keyword.operator.dereference"
),
newPattern(
match: /&/,
tag_as: "keyword.operator.reference"
),
]
).then(
match: variable_name,
tag_as: "variable.other.object.declare",
)
)
cpp_grammar[:parameter_struct] = newPattern(
should_partial_match: [ "struct skcipher_walk *walk," ],
match: newPattern(
match: /struct/,
tag_as: "storage.type.struct.parameter",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.struct.parameter",
).then(@spaces).zeroOrMoreOf(
match: /\*|&/.maybe(@spaces),
includes: [
newPattern(
match: /\*/,
tag_as: "keyword.operator.dereference"
),
newPattern(
match: /&/,
tag_as: "keyword.operator.reference"
),
]
# this is a maybe because its possible to have a type declare without an actual parameter
).maybe(
match: variable_name,
tag_as: "variable.other.object.declare",
).maybe(@spaces).maybe(
/\[/.maybe(@spaces).then(/\]/).maybe(@spaces),
).lookAheadFor(/,|\)|\n/)
)
cpp_grammar[:function_definition] = Range.new(
tag_as: "meta.function.definition.parameters",
start_pattern: avoid_invalid_function_names.then(look_ahead_for_function_name),
end_pattern: lookBehindFor(/\)/),
includes: [ :parameter_struct, :function_context_c ]
)
# a full match example of function call would be: aNameSpace::subClass<TemplateArg>FunctionName<5>(
cpp_grammar[:function_call] = Range.new(
start_pattern: avoid_invalid_function_names.then(
preceding_scopes
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.call"
).maybe(@spaces).maybe(
template_call
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round"
),
includes: [ :function_call_context_c ]
)
#
# Operators
#
cpp_grammar[:operators] = []
normal_word_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isOperator, :isWord, not(:isTypeCastingOperator), not(:isControlFlow), not(:isFunctionLike)) ],
tag_as: "keyword.operator.wordlike alias keyword.operator.$match",
)
array_of_function_like_operators = @cpp_tokens.tokens.select { |each| each[:isFunctionLike] && !each[:isSpecifier] }
for each in array_of_function_like_operators
name = each[:name]
cpp_grammar[:operators].push(functionTemplate[
repository_name: "#{name}_operator",
match_name: variableBounds[/#{name}/],
tag_name_as: "keyword.operator.functionlike keyword.operator.#{name}",
tag_content_as: "arguments.operator.#{name}",
tag_parenthese_as: "operator.#{name}"
])
end
cpp_grammar[:operators] += [
functionTemplate[
repository_name: "decltype_specifier",
match_name: variableBounds[/decltype/],
tag_name_as: "keyword.operator.functionlike keyword.other.decltype storage.type.decltype",
tag_content_as: "arguments.decltype",
tag_parenthese_as: "decltype"
],
type_casting_operators,
:method_access,
:member_access,
normal_word_operators,
:vararg_ellipses,
{
match: "--",
name: "keyword.operator.decrement"
},
{
match: "\\+\\+",
name: "keyword.operator.increment"
},
{
match: "%=|\\+=|-=|\\*=|(?<!\\()/=",
name: "keyword.operator.assignment.compound"
},
{
match: "&=|\\^=|<<=|>>=|\\|=",
name: "keyword.operator.assignment.compound.bitwise"
},
{
match: "<<|>>",
name: "keyword.operator.bitwise.shift"
},
{
match: "!=|<=|>=|==|<|>",
name: "keyword.operator.comparison"
},
{
match: "&&|!|\\|\\|",
name: "keyword.operator.logical"
},
{
match: "&|\\||\\^|~",
name: "keyword.operator"
},
{
match: "=",
name: "keyword.operator.assignment"
},
{
match: "%|\\*|/|-|\\+",
name: "keyword.operator"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
applyEndPatternLast: true,
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#function_call_c"
},
{
include: "$initial_context"
}
]
}
]
#
# Probably a parameter
#
array_brackets = /\[\]/.maybe(@spaces)
comma_or_closing_paraenthese = /,/.or(/\)/)
stuff_after_a_parameter = maybe(@spaces).lookAheadFor(maybe(array_brackets).then(comma_or_closing_paraenthese))
cpp_grammar[:probably_a_parameter] = newPattern(
match: newPattern(
match: variable_name_without_bounds.maybe(@spaces).lookAheadFor("="),
tag_as: "variable.parameter.defaulted"
).or(
match: look_behind_for_type.then(variable_name_without_bounds).then(stuff_after_a_parameter),
tag_as: "variable.parameter"
)
)
#
# Operator overload
#
# symbols can have spaces
operator_symbols = maybe(@spaces).then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isSymbol))
# words must have spaces, the variable_name_without_bounds is for implicit overloads
operator_wordish = @spaces.then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isWordish).or(zeroOrMoreOf(one_scope_resolution).then(variable_name_without_bounds).maybe(@spaces).maybe(/&/)))
after_operator_keyword = operator_symbols.or(operator_wordish)
cpp_grammar[:operator_overload] = operator_overload = Range.new(
tag_as: "meta.function.definition.parameters.operator-overload",
start_pattern: newPattern(
match: /operator/,
tag_as: "keyword.other.operator.overload",
).then(
match: after_operator_keyword,
tag_as: "entity.name.operator.overloadee",
includes: [:scope_resolution]
).maybe(@spaces).then(
match: /\(/,
tag_as: "punctuation.section.parameters.begin.bracket.round.operator-overload"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.parameters.end.bracket.round.operator-overload"
),
includes: [:probably_a_parameter, :function_context_c ]
)
#
# Access . .* -> ->*
#
dot_operator = /\.\*/.or(/\./)
arrow_operator = /->\*/.or(/->/)
dot_or_arrow_operator = /(?:\.\*|\.|->|->\*)/
member_operator = newPattern(
match: dot_operator,
tag_as: "punctuation.separator.dot-access"
).or(
match: arrow_operator,
tag_as: "punctuation.separator.pointer-access"
)
subsequent_object_with_operator = variable_name_without_bounds.maybe(@spaces).then(member_operator.without_numbered_capture_groups).maybe(@spaces)
# TODO: the member_access and method_access can probably be simplified considerably
# TODO: member_access and method_access might also need additional matching to handle scope resolutions
partial_member = the_this_keyword.or(
newPattern(
match: variable_name_without_bounds.or(lookBehindFor(/\]|\)/)).maybe(@spaces),
tag_as: "variable.other.object.access",
)
).then(
member_operator
)
member_context = [
mid_member = newPattern(
tag_as: "variable.other.object.property",
match: lookBehindFor(dot_or_arrow_operator).maybe(
@spaces
).then(
partial_member.without_numbered_capture_groups
)
),
partial_member,
:member_access,
:method_access,
]
member_start = partial_member.then(
match: zeroOrMoreOf(subsequent_object_with_operator),
includes: member_context
).maybe(@spaces)
# access to attribute
type_represenetations = @cpp_tokens.representationsThat(:isType)
lookahead_friedly_types_pattern = /#{type_represenetations.map { |each| each+"[^#{@standard_character}]" } .join('|')}/
cpp_grammar[:member_access] = member_access = newPattern(
match: member_start.then(
match: @word_boundary.lookAheadToAvoid(lookahead_friedly_types_pattern).then(variable_name_without_bounds).then(@word_boundary).lookAheadToAvoid(/\(/),
tag_as: "variable.other.property"
)
)
# access to method
cpp_grammar[:method_access] = method_access = Range.new(
tag_content_as: "meta.function-call.member",
start_pattern: member_start.then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.member"
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.function.member"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.function.member"
),
includes: [:function_call_context_c],
)
#
# Namespace
#
# see https://en.cppreference.com/w/cpp/language/namespace
cpp_grammar[:using_namespace] = Range.new(
tag_as: "meta.using-namespace",
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /using/,
tag_as: "keyword.other.using.directive",
).then(@spaces).then(
match: /namespace/,
tag_as: "keyword.other.namespace.directive storage.type.namespace.directive"
).then(@spaces).maybe(
preceding_scopes
).then(
match: variable_name,
tag_as: "entity.name.type.namespace"
).lookAheadFor(
/;|\n/
),
end_pattern: @semicolon,
)
# TODO: add support for namespace name = qualified-namespace ;
cpp_grammar[:namespace_block] = blockFinderFor(
name: "namespace",
tag_as: "meta.block.namespace",
needs_semicolon: false,
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /namespace/,
tag_as: "keyword.other.namespace.definition storage.type.namespace.definition"
).then(@spaces).maybe(inline_attribute).maybe(@spaces).then(
# Named namespace (with possible scope )
preceding_scopes
).maybe(@spaces).then(
newPattern(
match: variable_name,
tag_as: "entity.name.type.namespace",
# anonymous namespaces
).or(
lookAheadFor(/\{/)
)
),
)
#
# Preprocessor
#
# not sure if this pattern is actually accurate (it was the one provided by atom/c.tmLanguage)
preprocessor_name_no_bounds = /[a-zA-Z_$][\w$]*/
preprocessor_function_name = preprocessor_name_no_bounds.lookAheadFor(maybe(@spaces).then(/\(/))
cpp_grammar[:macro_argument] = newPattern(
match: /##/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
tag_as: "variable.other.macro.argument"
)
#
# Lambdas
#
array_of_invalid_function_names = @cpp_tokens.representationsThat(:canAppearBeforeLambdaCapture)
non_variable_name = /#{array_of_invalid_function_names.map { |each| '\W'+each+'|^'+each } .join('|')}/
cpp_grammar[:lambdas] = lambdas = Range.new(
start_pattern: newPattern(
should_fully_match: [ "[]", "[=]", "[&]", "[x,y,x]", "[x, y, &z, w = 1 + 1]", "[ a = blah[1324], b, c ]" ],
should_partial_match: [ "[]", "[=](", "[&]{", "[x,y,x]", "[x, y, &z, w = 1 + 1] (", "[ a = blah[1324], b, c ] {" ],
should_not_partial_match: [ "delete[]", "thing[]", "thing []", "thing []", "thing[0][0] = 0" ],
match: lookBehindFor(/[^\s]|^/).lookBehindToAvoid(/[\w\]\)\[\*]/).or(lookBehindFor(non_variable_name)).maybe(@spaces).then(
match: /\[/.lookAheadToAvoid(/\[/),
tag_as: "punctuation.definition.capture.begin.lambda",
).then(
match: /(?:.*\[.*?\].*?)*.*?/,
tag_as: "meta.lambda.capture",
# the zeroOrMoreOf() is for other []'s that are inside of the lambda capture
# this pattern is still imperfect: if someone had a string literal with ['s in it, it could fail
includes: [ :probably_a_parameter, :function_context_c ],
).then(
match: /\]/,
tag_as: "punctuation.definition.capture.end.lambda",
)
),
end_pattern: newPattern(
match: lookBehindFor(/}/),
),
includes: [
# check for parameters first
Range.new(
tag_as: 'meta.function.definition.parameters.lambda',
start_pattern: newPattern(
match: /\(/,
tag_as: "punctuation.definition.parameters.begin.lambda",
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.definition.parameters.end.lambda",
),
includes: [ :probably_a_parameter, :function_context_c ]
),
# specificers
newPattern(
match: variableBounds[ @cpp_tokens.that(:isLambdaSpecifier) ],
tag_as: "storage.modifier.lambda.$match"
),
# check for the -> syntax
newPattern(
match: /->/,
tag_as: "punctuation.definition.lambda.return-type"
).maybe(
match: /.+?/.lookAheadFor(/\{|$/),
tag_as: "storage.type.return-type.lambda"
),
# then find the body
Range.new(
tag_as: "meta.function.definition.body.lambda",
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.section.block.begin.bracket.curly.lambda",
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly.lambda",
),
includes: [ :$initial_context ]
),
]
)
#
# Support
#
# generally this section is for things that need a #include, (the support category)
# it will be for things such as cout, cin, vector, string, map, etc
cpp_grammar[:pthread_types] = pthread_types = newPattern(
tag_as: "support.type.posix-reserved.pthread",
match: variableBounds[ /pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t/ ],
)
cpp_grammar[:posix_reserved_types] = posix_reserved_types = newPattern(
match: variableBounds[ /[a-zA-Z_]/.zeroOrMoreOf(@standard_character).then(/_t/) ],
tag_as: "support.type.posix-reserved"
)
#
# Classes, structs, unions, enums
#
# see https://en.cppreference.com/w/cpp/language/enum
# this range matches both the case with brackets and the case without brackets
cpp_grammar[:enum_block] = blockFinderFor(
name: "enum",
tag_as: "meta.block.enum",
start_pattern: newPattern(
match: /enum/,
tag_as: "storage.type.enum"
).then(@spaces).maybe(
# see "Scoped enumerations" on https://en.cppreference.com/w/cpp/language/enum
newPattern(
match: /class|struct/,
tag_as: "storage.type.enum.enum-key.$match",
).then(@spaces.or(inline_attribute).or(lookAheadFor(/{/)))
).maybe(inline_attribute).maybe(@spaces).maybe(
match: variable_name,
tag_as: "entity.name.type.enum",
).maybe(
maybe(@spaces).then(
match: /:/,
tag_as: "colon punctuation.separator.type-specifier",
).maybe(@spaces).maybe(
scope_resolution
).maybe(@spaces).then(
match: variable_name,
tag_as: "storage.type.integral.$match",
)
),
head_includes: [ :$initial_context ]
)
# the following are basically the equivlent of:
# @cpp_tokens.that(:isAccessSpecifier).or(/,/).or(/:/)
# that ^ causes an error in the lookBehindFor() so it has to be manually spread
can_come_before_a_inherited_class = @cpp_tokens.representationsThat(:isAccessSpecifier) + [ ',', ':' ]
can_come_before_a_inherited_class_regex = /#{can_come_before_a_inherited_class.join('|')}/
cpp_grammar[:inhertance_context] = [
newPattern(
match: /,/,
tag_as: "comma punctuation.separator.delimiter.inhertance"
),
newPattern(
match: variableBounds[ @cpp_tokens.that(:isAccessSpecifier) ],
tag_as: "storage.type.modifier.access.$match",
),
lookBehindFor(can_come_before_a_inherited_class_regex).maybe(@spaces).lookAheadToAvoid(@cpp_tokens.that(:isAccessSpecifier)).then(
match: variable_name,
tag_as: "entity.name.type.inherited"
)
]
final_modifier = newPattern(
match: /final/,
tag_as: "storage.type.modifier.final",
)
generateClassOrStructBlockFinder = ->(name) do
return blockFinderFor(
tag_as: "meta.block.#{name}",
name: name,
start_pattern: newPattern(
should_fully_match: ["#{name} foo: bar", "#{name} foo: public baz"],
should_not_fully_match: ["#{name} foo {","#{name} foo{"],
should_partial_match: ["#{name} foo f;", "#{name} st s;"],
match: newPattern(
reference: "storage_type",
match: variableBounds[ /#{name}/ ],
tag_as: "storage.type.$match",
).then(
@spaces.or(
inline_attribute
).or(
lookAheadFor(/{/)
)
).maybe(inline_attribute).maybe(@spaces).maybe(
match: variable_name,
tag_as: "entity.name.type.$reference(storage_type)",
).maybe(
@spaces.then(final_modifier).maybe(@spaces)
).maybe(
#
# inheritance
#
maybe(@spaces).then(
match: /:/,
tag_as: "colon punctuation.separator.inhertance"
# the following may seem redundant (removing it shouldn't change anything)
# this is because the follow are matched by what is inside of this Range
# However its preferable to match things here, in the Start (using a pattern), over matching it inside of the range
# this is because the start pattern typically fails safely (is limited to 1 line), while typically Ranges fail dangerously (can match the whole document)
).zeroOrMoreOf(
match: maybe(@spaces).maybe(/,/).maybe(
@spaces
).maybe(
@cpp_tokens.that(:isAccessSpecifier)
).maybe(@spaces).oneOrMoreOf(
maybe(@spaces).maybe(/,/).maybe(
@spaces
).lookAheadToAvoid(
@cpp_tokens.that(:isAccessSpecifier)
).then(variable_name)
),
includes: [ :inhertance_context ]
)
),
),
head_includes: [
:preprocessor_context,
:inhertance_context,
:template_call_range,
:comments_context,
],
body_includes: [ :constructor_context, :$initial_context ],
)
end
cpp_grammar[:class_block] = generateClassOrStructBlockFinder["class"]
cpp_grammar[:struct_block] = generateClassOrStructBlockFinder["struct"]
cpp_grammar[:union_block] = generateClassOrStructBlockFinder["union"]
# the following is a legacy pattern, I'm not sure if it is still accurate
# I have no idea why it matches a double quote
cpp_grammar[:extern_block] = blockFinderFor(
name: 'extern',
tag_as: "meta.block.extern",
start_pattern: newPattern(
match: /\bextern/,
tag_as: "storage.type.extern"
).lookAheadFor(/\s*\"/),
head_includes: [ :$initial_context ],
secondary_includes: [ :$initial_context ]
)
#
# preprocessor directives
#
# TODO, change all blocks/paraentheses so that they end and the end of a macro
# TODO, find a good solution to dealing with if statments that cross in to/out of blocks
cpp_grammar[:hacky_fix_for_stray_directive] = hacky_fix_for_stray_directive = newPattern(
match: variableBounds[/#(?:endif|else|elif)/],
tag_as: "keyword.control.directive.$match"
)
#
# Misc Legacy
#
cpp_grammar[:square_brackets] = {
name: "meta.bracket.square.access",
begin: "([a-zA-Z_][a-zA-Z_0-9]*|(?<=[\\]\\)]))?(\\[)(?!\\])",
beginCaptures: {
"1" => {
name: "variable.other.object"
},
"2" => {
name: "punctuation.definition.begin.bracket.square"
}
},
end: "\\]",
endCaptures: {
"0" => {
name: "punctuation.definition.end.bracket.square"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
}
cpp_grammar[:empty_square_brackets] = {
name: "storage.modifier.array.bracket.square",
match: /#{lookBehindToAvoid(/delete/)}\\[\\s*\\]/
}
cpp_grammar[:assembly] = newPattern(
match: variableBounds[ /(asm|__asm__)/ ],
tag_as: "storage.type.$match"
)
cpp_grammar[:misc_storage_modifiers_1] = {
match: /\b(constexpr|export|mutable|typename|thread_local)\b/,
name: "storage.modifier"
}
cpp_grammar[:misc_storage_modifiers_2] = {
match: /\b(const|extern|register|restrict|static|volatile|inline)\b/,
name: "storage.modifier"
}
cpp_grammar[:destructor] = {
name: "meta.function.destructor",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.destructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.destructor"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.destructor"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:destructor_prototype] = {
name: "meta.function.destructor.prototype",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.definition.parameters.begin"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:meta_preprocessor_macro] = {
name: "meta.preprocessor.macro",
begin: "(?x)\n^\\s* ((\\#)\\s*define) \\s+\t# define\n((?<id>#{preprocessor_name_no_bounds}))\t # macro name\n(?:\n (\\()\n\t(\n\t \\s* \\g<id> \\s*\t\t # first argument\n\t ((,) \\s* \\g<id> \\s*)* # additional arguments\n\t (?:\\.\\.\\.)?\t\t\t# varargs ellipsis?\n\t)\n (\\))\n)?",
beginCaptures: {
"1" => {
name: "keyword.control.directive.define"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "entity.name.function.preprocessor"
},
"5" => {
name: "punctuation.definition.parameters.begin"
},
"6" => {
name: "variable.parameter.preprocessor"
},
"8" => {
name: "punctuation.separator.parameters"
},
"9" => {
name: "punctuation.definition.parameters.end"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#preprocessor_rule_define_line_context"
}
]
}
cpp_grammar[:meta_preprocessor_diagnostic] = {
name: "meta.preprocessor.diagnostic",
begin: "^\\s*((#)\\s*(error|warning))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.diagnostic.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?<!\\\\)(?=\\n)",
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "[^'\"]",
end: "(?<!\\\\)(?=\\s*\\n)",
name: "string.unquoted.single",
patterns: [
{
include: "#line_continuation_character"
},
{
include: "#comments_context"
}
]
}
]
}
cpp_grammar[:meta_preprocessor_include] = {
name: "meta.preprocessor.include",
begin: "^\\s*((#)\\s*(include(?:_next)?|import))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#line_continuation_character"
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double.include"
},
{
begin: "<",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: ">",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.other.lt-gt.include"
}
]
}
cpp_grammar[:meta_preprocessor_line] = {
name: "meta.preprocessor",
begin: "^\\s*((#)\\s*line)\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.line"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#string_context_c"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:meta_preprocessor_undef] = {
name: "meta.preprocessor",
begin: "^\\s*(?:((#)\\s*undef))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.undef"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:meta_preprocessor_pragma] = {
name: "meta.preprocessor.pragma",
begin: "^\\s*(?:((#)\\s*pragma))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.pragma"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#string_context_c"
},
{
match: "[a-zA-Z_$][\\w\\-$]*",
name: "entity.other.attribute-name.pragma.preprocessor"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
}
cpp_grammar[:constructor_context] = [
{
begin: "(?x)\n(?:^\\s*) # beginning of line\n((?!while|for|do|if|else|switch|catch)[A-Za-z_][A-Za-z0-9_:]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.constructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.constructor"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.constructor"
}
},
name: "meta.function.constructor",
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function_context_c"
}
]
},
{
begin: "(?x)\n(:)\n(\n (?=\n \\s*[A-Za-z_][A-Za-z0-9_:]* # actual name\n \\s* (\\() # opening bracket\n )\n)",
beginCaptures: {
"1" => {
name: "punctuation.definition.initializer-list.parameters"
}
},
end: "(?=\\{)",
name: "meta.function.constructor.initializer-list",
patterns: [
{
include: "$initial_context"
}
]
}
]
cpp_grammar[:special_block_context] = [
:attributes,
:using_namespace,
:namespace_block,
:class_block,
:struct_block,
:union_block,
:enum_block,
:extern_block,
]
cpp_grammar[:string_context] = [
{
begin: "(u|u8|U|L)?\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
match: "\\\\u\\h{4}|\\\\U\\h{8}",
name: "constant.character.escape"
},
{
match: "\\\\['\"?\\\\abfnrtv]",
name: "constant.character.escape"
},
{
match: "\\\\[0-7]{1,3}",
name: "constant.character.escape"
},
{
match: "\\\\x\\h+",
name: "constant.character.escape"
},
{
include: "#string_escapes_context_c"
}
]
},
{
begin: "(u|u8|U|L)?R\"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\(",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
},
"3" => {
name: "invalid.illegal.delimiter-too-long"
}
},
end: "\\)\\2(\\3)\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
},
"1" => {
name: "invalid.illegal.delimiter-too-long"
}
},
name: "string.quoted.double.raw"
}
]
cpp_grammar[:block] = {
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#block_context"
}
]
}
cpp_grammar[:block_context] = [
:preprocessor_rule_enabled_block,
:preprocessor_rule_disabled_block,
:preprocessor_rule_conditional_block,
:method_access,
:member_access,
:function_call_c,
{
name: "meta.initialization",
begin: "(?x)\n(?:\n (?:\n\t(?=\\s)(?<!else|new|return)\n\t(?<=\\w) \\s+(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas) # or word + space before name\n )\n)\n(\n (?:[A-Za-z_][A-Za-z0-9_]*+ | :: )++ # actual name\n |\n (?:(?<=operator) (?:[-*&<>=+!]+ | \\(\\) | \\[\\]))\n)\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "variable.other"
},
"2" => {
name: "punctuation.section.parens.begin.bracket.round.initialization"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round.initialization"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#block_context"
}
]
},
:parentheses_block,
:$initial_context
]
cpp_grammar[:function_call_c] = {
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|constexpr|volatile|operator|(?:::)?new|(?:::)?delete)\\s*\\()\n(?=\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*#{maybe(template_call.without_numbered_capture_groups)}\\( # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)",
name: "meta.function-call",
patterns: [
{
include: "#function_call_context_c"
}
]
}
cpp_grammar[:comments_context] = {
patterns: [
{
captures: {
"1" => {
name: "meta.toc-list.banner.block"
}
},
match: "^/\\* =(\\s*.*?)\\s*= \\*/$\\n?",
name: "comment.block"
},
{
begin: "/\\*",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment.begin"
}
},
end: "\\*/",
endCaptures: {
"0" => {
name: "punctuation.definition.comment.end"
}
},
name: "comment.block"
},
{
captures: {
"1" => {
name: "meta.toc-list.banner.line"
}
},
match: "^// =(\\s*.*?)\\s*=\\s*$\\n?",
name: "comment.line.banner"
},
{
begin: "(^[ \\t]+)?(?=//)",
beginCaptures: {
"1" => {
name: "punctuation.whitespace.comment.leading"
}
},
end: "(?!\\G)",
patterns: [
{
begin: "//",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment"
}
},
end: "(?=\\n)",
name: "comment.line.double-slash",
patterns: [
{
include: "#line_continuation_character"
}
]
}
]
}
]
}
cpp_grammar[:disabled] = {
begin: "^\\s*#\\s*if(n?def)?\\b.*$",
end: "^\\s*#\\s*endif\\b",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
cpp_grammar[:line_continuation_character] = {
match: "(\\\\)\\n",
captures: {
"1" => {
name: "constant.character.escape.line-continuation"
}
}
}
cpp_grammar[:parentheses] = {
name: "meta.parens",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:parentheses_block] = {
name: "meta.parens.block",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#block_context"
},
{
match: lookBehindToAvoid(/:/).then(/:/).lookAheadToAvoid(/:/),
name: "colon punctuation.separator.range-based"
}
]
}
cpp_grammar[:pragma_mark] = {
captures: {
"1" => {
name: "meta.preprocessor.pragma"
},
"2" => {
name: "keyword.control.directive.pragma.pragma-mark"
},
"3" => {
name: "punctuation.definition.directive"
},
"4" => {
name: "entity.name.tag.pragma-mark"
}
},
match: "^\\s*(((#)\\s*pragma\\s+mark)\\s+(.*))",
name: "meta.section"
}
cpp_grammar[:string_context_c] = [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: lookBehindToAvoid(/[\da-fA-F]/).then(/'/),
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
}
]
cpp_grammar[:string_escapes_context_c] = [
{
match: "(?x)\\\\ (\n\\\\\t\t\t |\n[abefnprtv'\"?] |\n[0-3]\\d{,2}\t |\n[4-7]\\d?\t\t|\nx[a-fA-F0-9]{,2} |\nu[a-fA-F0-9]{,4} |\nU[a-fA-F0-9]{,8} )",
name: "constant.character.escape"
},
{
match: "\\\\.",
name: "invalid.illegal.unknown-escape"
},
{
match: "(?x) %\n(\\d+\\$)?\t\t\t\t\t\t # field (argument #)\n[#0\\- +']*\t\t\t\t\t\t # flags\n[,;:_]?\t\t\t\t\t\t\t # separator character (AltiVec)\n((-?\\d+)|\\*(-?\\d+\\$)?)?\t\t # minimum field width\n(\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)?\t# precision\n(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier\n[diouxXDOUeEfFgGaACcSspn%]\t\t # conversion type",
name: "constant.other.placeholder"
},
# I don't think these are actual escapes, and they incorrectly mark valid strings
# It might be related to printf and format from C (which is low priority for C++)
# {
# match: "(%)(?!\"\\s*(PRI|SCN))",
# captures: {
# "1" => {
# name: "constant.other.placeholder"
# }
# }
# }
]
cpp_grammar[:vararg_ellipses] = {
match: "(?<!\\.)\\.\\.\\.(?!\\.)",
name: "punctuation.vararg-ellipses"
}
cpp_grammar[:preprocessor_rule_conditional] = {
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#preprocessor_rule_enabled_elif"
},
{
include: "#preprocessor_rule_enabled_else"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "$initial_context"
}
]
}
cpp_grammar[:preprocessor_rule_conditional_block] = {
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#preprocessor_rule_enabled_elif_block"
},
{
include: "#preprocessor_rule_enabled_else_block"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#block_context"
}
]
}
cpp_grammar[:preprocessor_rule_conditional_line_context] = [
{
match: "(?:\\bdefined\\b\\s*$)|(?:\\bdefined\\b(?=\\s*\\(*\\s*(?:(?!defined\\b)[a-zA-Z_$][\\w$]*\\b)\\s*\\)*\\s*(?:\\n|//|/\\*|\\?|\\:|&&|\\|\\||\\\\\\s*\\n)))",
name: "keyword.control.directive.conditional"
},
{
match: "\\bdefined\\b",
name: "invalid.illegal.macro-name"
},
:comments_context,
:string_context_c,
:number_literal,
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
:operators,
:language_constants,
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
:line_continuation_character,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)|(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
}
]
cpp_grammar[:preprocessor_rule_disabled] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
include: "#preprocessor_rule_enabled_elif"
},
{
include: "#preprocessor_rule_enabled_else"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "$initial_context"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_disabled_block] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
include: "#preprocessor_rule_enabled_elif_block"
},
{
include: "#preprocessor_rule_enabled_else_block"
},
{
include: "#preprocessor_rule_disabled_elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#block_context"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_disabled_elif] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "constant.numeric.preprocessor"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "$initial_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_block] = {
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "#block_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_elif] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
include: "$initial_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_elif_block] = {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor_rule_conditional_line_context"
}
]
},
{
include: "#comments_context"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma_mark"
}
]
},
{
include: "#block_context"
}
]
}
]
}
cpp_grammar[:preprocessor_rule_enabled_else] = {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "$initial_context"
}
]
}
cpp_grammar[:preprocessor_rule_enabled_else_block] = {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "#block_context"
}
]
}
cpp_grammar[:preprocessor_rule_define_line_context] = [
:vararg_ellipses,
{
match: /##?/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
name: "variable.other.macro.argument"
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#preprocessor_rule_define_line_blocks_context"
}
]
},
{
match: "\\(",
name: "punctuation.section.parens.begin.bracket.round"
},
{
match: "\\)",
name: "punctuation.section.parens.end.bracket.round"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\\s*\\()\n(?=\n (?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*\\( # actual name\n |\n (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)|(?<!\\\\)(?=\\s*\\n)",
name: "meta.function",
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escapes_context_c"
},
{
include: "#line_continuation_character"
}
]
},
:method_access,
:member_access,
:$initial_context
]
cpp_grammar[:preprocessor_rule_define_line_blocks_context] = [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_blocks_context"
},
{
include: "#preprocessor_rule_define_line_context"
}
]
},
{
include: "#preprocessor_rule_define_line_context"
}
]
cpp_grammar[:preprocessor_rule_define_line_functions_context] = [
:comments_context,
:storage_types,
:vararg_ellipses,
:method_access,
:member_access,
:operators,
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.arguments.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor_rule_define_line_functions_context"
}
]
},
:preprocessor_rule_define_line_context
]
cpp_grammar[:function_context_c] = [
:attributes,
:comments_context,
:storage_types,
:operators,
:vararg_ellipses,
{
name: "meta.function.definition.parameters",
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.parameters.begin.bracket.round"
},
},
end: /\)|:/,
endCaptures: {
"0" => {
name: "punctuation.section.parameters.end.bracket.round"
}
},
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function_context_c"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function_context_c"
}
]
},
:$initial_context
]
cpp_grammar[:function_call_context_c] = [
:attributes,
:comments_context,
:storage_types,
:method_access,
:member_access,
:operators,
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:new)\\s*(#{maybe(template_call.without_numbered_capture_groups)}) # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "keyword.operator.wordlike memory keyword.operator.new"
},
"2" => {
patterns: [
{
include: "#template_call_innards"
}
]
},
"3" => {
name: "punctuation.section.arguments.begin.bracket.round"
},
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
:function_call,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function_call_context_c"
}
]
},
:block_context
]
Dir.chdir __dir__
# Save
@syntax_location = "../syntaxes/cpp.tmLanguage"
cpp_grammar.saveAsYamlTo(@syntax_location)
cpp_grammar.saveAsJsonTo(@syntax_location)
cpp_grammar.saveTagsTo("tags.txt")
# TODO, upgrade the code so this is not necessary
# for exporting to C
@cpp_grammar = cpp_grammar
|
require_relative '../textmate_tools.rb'
require_relative './cpp_tokens.rb'
# todo
# fix initializer list "functions" e.g. `int a{5};`
# fix the ... inside of macros
# have all patterns with keywords be dynamically generated
cpp_grammar = Grammar.new(
name:"C++",
scope_name: "source.cpp",
version: "https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
information_for_contributors: [
"This code was auto generated by a much-more-readble ruby file: https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
"This file essentially an updated/improved fork of the atom syntax https://github.com/atom/language-c/blob/master/grammars/c%2B%2B.cson",
],
)
#
#
# Numbers
#
#
#
# misc
#
number_seperator_pattern = newPattern(
should_fully_match: [ "'" ],
should_partial_match: [ "1'1", "1'", "'1" ],
should_not_partial_match: [ "1''1", "1''" ],
repository_name: 'literal_numeric_seperator',
match: lookBehindToAvoid(/'/).then(/'/).lookAheadToAvoid(/'/),
tag_as:"punctuation.separator.constant.numeric",
)
hex_digits = newPattern(
should_fully_match: [ "1", "123456", "DeAdBeeF", "49'30'94", "DeA'dBe'eF", "dea234f4930" ],
should_not_fully_match: [ "'3902" , "de2300p1000", "0x000" ],
should_not_partial_match: [ "p", "x", "." ],
match: /[0-9a-fA-F]/.zeroOrMoreOf(/[0-9a-fA-F]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.hexadecimal",
includes: [ number_seperator_pattern ],
)
decimal_digits = newPattern(
should_fully_match: [ "1", "123456", "49'30'94" , "1'2" ],
should_not_fully_match: [ "'3902" , "1.2", "0x000" ],
match: /[0-9]/.zeroOrMoreOf(/[0-9]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.decimal",
includes: [ number_seperator_pattern ],
)
# see https://en.cppreference.com/w/cpp/language/floating_literal
hex_exponent = newPattern(
should_fully_match: [ "p100", "p-100", "p+100", "P100" ],
should_not_fully_match: [ "p0x0", "p-+100" ],
match: newPattern(
match: /[pP]/,
tag_as: "keyword.other.unit.exponent.hexadecimal",
).maybe(
match: /\+/,
tag_as: "keyword.operator.plus.exponent.hexadecimal",
).maybe(
match: /\-/,
tag_as: "keyword.operator.minus.exponent.hexadecimal",
).then(
match: decimal_digits.without_numbered_capture_groups,
tag_as: "constant.numeric.exponent.hexadecimal",
includes: [ number_seperator_pattern ]
),
)
decimal_exponent = newPattern(
should_fully_match: [ "e100", "e-100", "e+100", "E100", ],
should_not_fully_match: [ "e0x0", "e-+100" ],
match: newPattern(
match: /[eE]/,
tag_as: "keyword.other.unit.exponent.decimal",
).maybe(
match: /\+/,
tag_as: "keyword.operator.plus.exponent.decimal",
).maybe(
match: /\-/,
tag_as: "keyword.operator.minus.exponent.decimal",
).then(
match: decimal_digits.without_numbered_capture_groups,
tag_as: "constant.numeric.exponent.decimal",
includes: [ number_seperator_pattern ]
),
)
#
# Number Literal
#
number_literal = newPattern(
repository_name: 'number_literal',
match: lookBehindToAvoid(/\w/).then(
# Floating point
# see https://en.cppreference.com/w/cpp/language/floating_literal
newPattern(
floating_literal = newPattern(
# Hex
newPattern(
hex_literal_float = newPattern(
match: /0[xX]/,
tag_as: "keyword.other.unit.hexadecimal",
).maybe(
hex_digits
).then(
# lookBehind/Ahead because there needs to be a hex digit on at least one side
match: lookBehindFor(/[0-9a-fA-F]/).then(/\./).or(/\./.lookAheadFor(/[0-9a-fA-F]/)),
tag_as: "constant.numeric.hexadecimal",
).maybe(
hex_digits
).maybe(
hex_exponent
)
# Decimal
).or(
decimal_literal_float = maybe(
decimal_digits
).then(
# lookBehind/Ahead because there needs to be a decimal digit on at least one side
match: lookBehindFor(/[0-9]/).then(/\./).or(/\./.lookAheadFor(/[0-9]/)),
tag_as: "constant.numeric.decimal.point",
).maybe(
decimal_digits
).maybe(
decimal_exponent
)
)
# Floating point suffix
).maybe(
literal_float_suffix = newPattern(
match: /[lLfF]/.lookAheadToAvoid(/\w/),
tag_as: "keyword.other.unit.suffix.floating-point"
)
)
# Integer
# see https://en.cppreference.com/w/cpp/language/integer_literal
).or(
integer_literal = newPattern(
# Binary
newPattern(
binary_literal_integer = newPattern(
match: /0[bB]/,
tag_as: "keyword.other.unit.binary"
).then(
match: oneOrMoreOf(/[01]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.binary",
includes: [ number_seperator_pattern ]
)
# Octal
).or(
octal_literal_integer = newPattern(
match: /0/,
tag_as: "keyword.other.unit.octal"
).then(
match: oneOrMoreOf(/[0-7]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.octal",
includes: [ number_seperator_pattern ]
)
# Hex
).or(
hex_literal_integer = newPattern(
match: /0[xX]/,
tag_as: "keyword.other.unit.hexadecimal",
).then(
hex_digits
).maybe(
hex_exponent
)
# Decimal
).or(
decimal_literal_integer = newPattern(
decimal_digits
).maybe(
decimal_exponent
)
)
# integer suffix
).maybe(
literal_integer_suffix = newPattern(
match: /[uU]/.or(/[uU]ll?/).or(/[uU]LL?/).or(/ll?[uU]?/).or(/LL?[uU]?/).lookAheadToAvoid(/\w/),
tag_as: "keyword.other.unit.suffix.integer"
)
)
)
# user defined endings
).then(
match: /\w*/,
tag_as: "keyword.other.unit.user-defined"
)
)
#
# Contexts
#
# eventually this context will be more exclusive (can't have class definitons inside of an evaluation)
# but for now it just includes everything
evaluation_context = [
'$base'
# function call
# number literal
# lambdas
]
#
# Variable
#
# todo: make a better name for this function
variableBounds = ->(regex_pattern) do
lookBehindToAvoid(@standard_character).then(regex_pattern).lookAheadToAvoid(@standard_character)
end
variable_name_without_bounds = /[a-zA-Z_]#{@standard_character.without_default_mode_modifiers}*/
# word bounds are inefficient, but they are accurate
variable_name = variableBounds[variable_name_without_bounds]
#
# Constants
#
language_constants = newPattern(
repository_name: 'constants',
match: variableBounds[@cpp_tokens.that(:isLiteral)],
tag_as: "constant.language"
)
#
# Types
#
look_behind_for_type = lookBehindFor(/\w |\*\/|[&*>\]\)]|\.\.\./).maybe(@spaces)
# why is posix reserved types not in "storage_types"? I don't know, if you can get it in there and everything still works it would be appreciated
posix_reserved_types = newPattern(
match: variableBounds[ /[a-zA-Z_]/.zeroOrMoreOf(@standard_character).then(/_t/) ],
tag_as: "support.type.posix-reserved"
)
storage_types = newPattern(
repository_name: 'storage_types',
includes: [
primitive_types = newPattern(
match: variableBounds[ @cpp_tokens.that(:isPrimitive) ],
tag_as: "storage.type.primitive"
),
non_primitive_types = newPattern(
match: variableBounds[@cpp_tokens.that(not(:isPrimitive), :isType)],
tag_as: "storage.type"
),
# FIXME, these should be changed to each have their own matcher, and struct should be handled the similar to 'class'
other_types = newPattern(
match: variableBounds[ /(asm|__asm__|enum|union|struct)/ ],
tag_as: "storage.type.$match"
)
]
)
#
# Keywords and Keyword-ish things
#
functional_specifiers_pre_parameters = newPattern(
match: variableBounds[ @cpp_tokens.that(:isFunctionSpecifier) ],
tag_as: "storage.modifier.specificer.functional.pre-parameters.$match"
)
qualifiers_and_specifiers_post_parameters = newPattern(
match: variableBounds[ @cpp_tokens.that(:canAppearAfterParametersBeforeBody) ].lookAheadFor(/\s*/.then(/\{/.or(/;/).or(/[\n\r]/))),
tag_as: "storage.modifier.specifier.functional.post-parameters.$match"
)
storage_specifiers = newPattern(
match: variableBounds[ @cpp_tokens.that(:isStorageSpecifier) ],
tag_as: "storage.modifier.specifier.$match"
)
access_control_keywords = newPattern(
match: lookBehindToAvoid(@standard_character).then(@cpp_tokens.that(:isAccessSpecifier)).maybe(@spaces).then(/:/),
tag_as: "storage.type.modifier.access.control.$match"
)
exception_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isExceptionRelated) ],
tag_as: "keyword.control.exception.$match"
)
other_keywords = newPattern(
match: variableBounds[ /(using|typedef)/ ],
tag_as: "keyword.other.$match"
)
the_this_keyword = newPattern(
match: variableBounds[ /this/ ],
tag_as: "variable.language.this"
)
# TODO: enhance casting operators to include <>'s
type_casting_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isTypeCastingOperator) ],
tag_as: "keyword.operator.cast.$match"
)
memory_operators = newPattern(
repository_name: 'memory_operators',
tag_as: "keyword.operator.memory",
match: lookBehindToAvoid(
@standard_character
).then(
newPattern(
newPattern(
match: /delete/,
tag_as: "keyword.operator.memory.delete.array"
).maybe(@spaces).then(
match: /\[\]/,
tag_as: "keyword.operator.memory.delete.array.bracket"
)
).or(
match: /delete/,
tag_as: "keyword.operator.memory.delete"
).or(
match: /new/,
tag_as: "keyword.operator.memory.new"
)
).lookAheadToAvoid(@standard_character)
)
control_flow_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isControlFlow) ],
tag_as: "keyword.control.$match"
)
#
# Templates
#
characters_in_template_call = /[\s<>,\w]/
template_call_context = [
:storage_types,
:constants,
:scope_resolution,
newPattern(
match: variable_name,
tag_as: 'storage.type.user-defined'
),
:operators,
:number_literal,
:strings,
newPattern(
match: /,/,
tag_as: "punctuation.separator.comma.template.argument"
)
]
# note: template_call should indeally be a Range(), the reason its not is
# because it's embedded inside of other patterns
template_call = newPattern(
repository_name: 'template_call_innards',
tag_as: 'meta.template.call',
match: /</.zeroOrMoreOf(characters_in_template_call).then(/>/).maybe(@spaces),
includes: template_call_context
)
template_definition_context = [
:scope_resolution,
:template_definition_argument,
:template_argument_defaulted,
:template_call_innards,
*evaluation_context
]
template_start = lookBehindToAvoid(@standard_character).then(
match: /template/,
tag_as: "storage.type.template"
).maybe(@spaces).then(
match: /</,
tag_as: "punctuation.section.angle-brackets.start.template.definition"
)
# a template definition that is by itself on a line (this is ideal)
template_isolated_definition = newPattern(
repository_name: 'template_isolated_definition',
match: template_start.then(
match: zeroOrMoreOf(/./),
tag_as: "meta.template.definition",
includes: template_definition_context,
).then(
match: />/.maybe(@spaces).then(/$/),
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
)
template_definition = Range.new(
repository_name: 'template_definition',
tag_as: 'meta.template.definition',
start_pattern: template_start,
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
includes: [
# a template call inside of a non-isolated template definition
# however this is rolling the dice: because if there is a less-than operator in a defaulted argument, then this pattern will screw everything up
# a better solution would be nice, but its going to be difficult/impossible
Range.new(
start_pattern: newPattern(
match: lookBehindFor(/\w/).maybe(@spaces).then(/</),
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
includes: template_call_context
),
*template_definition_context,
]
)
template_argument_defaulted = newPattern(
repository_name: 'template_argument_defaulted',
match: lookBehindFor(/<|,/).maybe(@spaces).then(
match: zeroOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
).maybe(@spaces).then(
match: /[=]/,
tag_as: "keyword.operator.assignment"
)
)
template_definition_argument = newPattern(
repository_name: 'template_definition_argument',
match: maybe(
@spaces
# case 1: only one word
).then(
match: variable_name_without_bounds,
tag_as: "storage.type.template.argument.$match",
# case 2: normal situation (ex: "typename T")
).or(
newPattern(
match: oneOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template.argument.$match",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template",
)
# case 3: ellipses (ex: "typename... Args")
).or(
newPattern(
match: variable_name_without_bounds,
tag_as: "storage.type.template",
).maybe(@spaces).then(
match: /\.\.\./,
tag_as: "punctuation.vararg-ellipses.template.definition",
).maybe(@spaces).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
)
).maybe(@spaces).then(
newPattern(
match: /,/,
tag_as: "punctuation.separator.comma.template.argument",
).or(
lookAheadFor(/>|$/)
)
)
)
#
# Scope resolution
#
one_scope_resolution = variable_name_without_bounds.maybe(@spaces).maybe(template_call.without_numbered_capture_groups).then(/::/)
preceding_scopes = newPattern(
match: zeroOrMoreOf(one_scope_resolution).maybe(@spaces),
includes: [ :scope_resolution ]
)
scope_resolution = newPattern(
repository_name: 'scope_resolution',
tag_as: "meta.scope-resolution",
match: preceding_scopes.then(
match: variable_name_without_bounds,
tag_as: "entity.name.namespace.scope-resolution"
).maybe(@spaces).maybe(
template_call
).then(
match: /::/,
tag_as: "punctuation.separator.namespace.access"
)
)
#
# Functions
#
functionTemplate = ->(repository_name:nil, match_name: nil, tag_name_as: nil, tag_content_as: nil, tag_parenthese_as: nil) do
return Range.new(
repository_name: repository_name,
tag_content_as: "meta.#{tag_content_as}",
start_pattern: newPattern(
match: match_name,
tag_as: tag_name_as,
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.#{tag_parenthese_as}"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.#{tag_parenthese_as}"
),
includes: evaluation_context
)
end
cant_be_a_function_name = @cpp_tokens.that(:isWord, not(:isPreprocessorDirective), not(:isValidFunctionName))
avoid_invalid_function_names = lookBehindToAvoid(@standard_character).lookAheadToAvoid(maybe(@spaces).then(cant_be_a_function_name).maybe(@spaces).then(/\(/))
look_ahead_for_function_name = lookAheadFor(variable_name_without_bounds.maybe(@spaces).then(/\(/))
function_definition = Range.new(
tag_as: "meta.function.definition.parameters",
start_pattern: avoid_invalid_function_names.then(look_ahead_for_function_name),
end_pattern: lookBehindFor(/\)/),
includes: [ "#function-innards-c" ]
)
# a full match example of function call would be: aNameSpace::subClass<TemplateArg>FunctionName<5>(
function_call = Range.new(
start_pattern: avoid_invalid_function_names.then(
preceding_scopes
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.call"
).maybe(@spaces).maybe(
template_call
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round"
),
includes: [ "#function-call-innards-c" ]
)
#
# Operators
#
operator_context = []
normal_word_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isOperator, :isWord, not(:isTypeCastingOperator), not(:isControlFlow), not(:isFunctionLike)) ],
tag_as: "keyword.operator.$match",
)
array_of_function_like_operators = @cpp_tokens.tokens.select { |each| each[:isFunctionLike] && !each[:isSpecifier] }
for each in array_of_function_like_operators
name = each[:name]
operator_context.push(functionTemplate[
repository_name: "#{name}_operator",
match_name: variableBounds[/#{name}/],
tag_name_as: "keyword.operator.#{name}",
tag_content_as: "arguments.operator.#{name}",
tag_parenthese_as: "operator.#{name} keyword.operator.#{name}"
])
end
operator_context += [
functionTemplate[
repository_name: "decltype_specifier",
match_name: variableBounds[/decltype/],
tag_name_as: "keyword.other.decltype storage.type.decltype",
tag_content_as: "arguments.decltype",
tag_parenthese_as: "decltype storage.type.decltype"
],
type_casting_operators,
:method_access,
:member_access,
normal_word_operators,
:vararg_ellipses,
{
match: "--",
name: "keyword.operator.decrement"
},
{
match: "\\+\\+",
name: "keyword.operator.increment"
},
{
match: "%=|\\+=|-=|\\*=|(?<!\\()/=",
name: "keyword.operator.assignment.compound"
},
{
match: "&=|\\^=|<<=|>>=|\\|=",
name: "keyword.operator.assignment.compound.bitwise"
},
{
match: "<<|>>",
name: "keyword.operator.bitwise.shift"
},
{
match: "!=|<=|>=|==|<|>",
name: "keyword.operator.comparison"
},
{
match: "&&|!|\\|\\|",
name: "keyword.operator.logical"
},
{
match: "&|\\||\\^|~",
name: "keyword.operator"
},
{
match: "=",
name: "keyword.operator.assignment"
},
{
match: "%|\\*|/|-|\\+",
name: "keyword.operator"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
applyEndPatternLast: true,
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#c_function_call"
},
{
include: "$base"
}
]
}
]
operators = newPattern(
repository_name: 'operators',
includes: operator_context,
)
#
# Probably a parameter
#
array_brackets = /\[\]/.maybe(@spaces)
comma_or_closing_paraenthese = /,/.or(/\)/)
stuff_after_a_parameter = maybe(@spaces).lookAheadFor(maybe(array_brackets).then(comma_or_closing_paraenthese))
probably_a_parameter = newPattern(
repository_name: 'probably_a_parameter',
match: newPattern(
match: variable_name_without_bounds.maybe(@spaces).lookAheadFor("="),
tag_as: "variable.parameter.defaulted"
).or(
match: look_behind_for_type.then(variable_name_without_bounds).then(stuff_after_a_parameter),
tag_as: "variable.parameter"
)
)
#
# Operator overload
#
# symbols can have spaces
operator_symbols = maybe(@spaces).then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isSymbol))
# words must have spaces, the variable_name_without_bounds is for implicit overloads
operator_wordish = @spaces.then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isWordish).or(zeroOrMoreOf(one_scope_resolution).then(variable_name_without_bounds).maybe(@spaces).maybe(/&/)))
after_operator_keyword = operator_symbols.or(operator_wordish)
operator_overload = Range.new(
repository_name: 'operator_overload',
tag_as: "meta.function.definition.parameters.operator-overload",
start_pattern: newPattern(
match: /operator/,
tag_as: "keyword.other.operator.overload",
).then(
match: after_operator_keyword,
tag_as: "entity.name.operator.overloadee",
includes: [:scope_resolution]
).maybe(@spaces).then(
match: /\(/,
tag_as: "punctuation.section.parameters.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.parameters.end.bracket.round"
),
includes: [:probably_a_parameter, :'function-innards-c' ]
)
#
# Access . .* -> ->*
#
dot_operator = /\.\*/.or(/\./)
arrow_operator = /->\*/.or(/->/)
member_operator = newPattern(
match: dot_operator,
tag_as: "punctuation.separator.dot-access"
).or(
match: arrow_operator,
tag_as: "punctuation.separator.pointer-access"
)
subsequent_object_with_operator = variable_name_without_bounds.maybe(@spaces).then(member_operator.without_numbered_capture_groups).maybe(@spaces)
# TODO: the member_access and method_access can probably be simplified considerably
# TODO: member_access and method_access might also need additional matching to handle scope resolutions
partial_member = newPattern(
match: variable_name_without_bounds.or(lookBehindFor(/\]|\)/)).maybe(@spaces),
tag_as: "variable.other.object.access",
).then(
member_operator
)
member_context = [
:member_access,
:method_access,
partial_member
]
member_start = partial_member.then(
match: zeroOrMoreOf(subsequent_object_with_operator),
includes: member_context
).maybe(@spaces)
# access to attribute
member_access = newPattern(
repository_name: 'member_access',
match: member_start.then(
match: @word_boundary.lookAheadToAvoid(@cpp_tokens.that(:isType)).then(variable_name_without_bounds).then(@word_boundary).lookAheadToAvoid(/\(/),
tag_as: "variable.other.member"
)
)
# access to method
method_access = Range.new(
repository_name: 'method_access',
tag_content_as: "meta.function-call.member",
start_pattern: member_start.then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.member"
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.function.member"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.function.member"
),
includes: ["#function-call-innards-c"],
)
#
# Namespace
#
# see https://en.cppreference.com/w/cpp/language/namespace
using_namespace = Range.new(
tag_as: "meta.using-namespace-declaration",
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /using/,
tag_as: "keyword.other.using.directive",
).then(@spaces).then(
match: /namespace/,
tag_as: "keyword.other.namespace.directive storage.type.namespace.directive"
).then(@spaces).maybe(
preceding_scopes
).then(
match: variable_name,
tag_as: "entity.name.namespace"
).lookAheadFor(
/;|\n/
),
end_pattern: newPattern(
match: /;/,
tag_as: "punctuation.terminator.statement"
),
)
# TODO: add support for namespace name = qualified-namespace ;
namespace_definition = Range.new(
tag_as: "meta.namespace-block",
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /namespace/,
tag_as: "keyword.other.namespace.definition storage.type.namespace.definition"
).then(@spaces).then(
# Named namespace (with possible scope )
preceding_scopes
).maybe(@spaces).then(
newPattern(
match: variable_name,
tag_as: "entity.name.namespace",
# anonymous namespaces
).or(
lookAheadFor(/\{/)
)
),
end_pattern: lookBehindFor(/\}/).or(lookAheadFor(/;|,|\(|\)|>|\[|\]|=/)),
includes: [
Range.new(
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.definition.scope"
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.definition.scope"
),
includes: [:special_block, :constructor, "$base" ]
),
"$base"
]
)
#
# Preprocessor
#
# not sure if this pattern is actually accurate (it was the one provided by atom/c.tmLanguage)
preprocessor_name_no_bounds = /[a-zA-Z_$][\w$]*/
preprocessor_function_name = preprocessor_name_no_bounds.lookAheadFor(maybe(@spaces).then(/\(/))
macro_argument = newPattern(
match: /##/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
tag_as: "variable.other.macro.argument"
)
#
# Lambdas
#
array_of_invalid_function_names = @cpp_tokens.representationsThat(:canAppearBeforeLambdaCapture)
non_variable_name = /#{array_of_invalid_function_names.map { |each| '\W'+each+'|^'+each } .join('|')}/
lambdas = Range.new(
repository_name: 'lambdas',
start_pattern: newPattern(
should_fully_match: [ "[]", "[=]", "[&]", "[x,y,x]", "[x, y, &z, w = 1 + 1]", "[ a = blah[1324], b, c ]" ],
should_partial_match: [ "[]", "[=](", "[&]{", "[x,y,x]", "[x, y, &z, w = 1 + 1] (", "[ a = blah[1324], b, c ] {" ],
should_not_partial_match: [ "delete[]", "thing[]", "thing []", "thing []", "thing[0][0] = 0" ],
match: lookBehindFor(/[^\s]|^/).lookBehindToAvoid(/[\w\]\)]/).or(lookBehindFor(non_variable_name)).maybe(@spaces).then(
match: /\[/,
tag_as: "punctuation.definition.capture.begin.lambda",
).then(
match: /(?:.*\[.*?\].*?)*.*?/,
tag_as: "meta.lambda.capture",
# the zeroOrMoreOf() is for other []'s that are inside of the lambda capture
# this pattern is still imperfect: if someone had a string literal with ['s in it, it could fail
includes: [ probably_a_parameter, "#function-innards-c" ],
).then(
match: /\]/,
tag_as: "punctuation.definition.capture.end.lambda",
)
),
end_pattern: newPattern(
match: lookBehindFor(/}/),
),
includes: [
# check for parameters first
Range.new(
tag_as: 'meta.function.definition.parameters.lambda',
start_pattern: newPattern(
match: /\(/,
tag_as: "punctuation.definition.parameters.begin.lambda",
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.definition.parameters.end.lambda",
),
includes: [ probably_a_parameter, "#function-innards-c" ]
),
# specificers
newPattern(
match: variableBounds[ @cpp_tokens.that(:isLambdaSpecifier) ],
tag_as: "storage.modifier.lambda.$match"
),
# check for the -> syntax
newPattern(
match: /->/,
tag_as: "punctuation.definition.lambda.return-type"
).maybe(
match: /.+?/.lookAheadFor(/\{|$/),
tag_as: "storage.type.return-type.lambda"
),
# then find the body
Range.new(
tag_as: "meta.function.definition.body.lambda",
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.section.block.begin.bracket.curly.lambda",
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly.lambda",
),
includes: [ "$base" ]
),
]
)
#
# Support
#
# TODO: currently this is not used, ideally it will be built up over time and then be included
# it will be for things such as cout, cin, vector, string, map, etc
#
# Enums
#
enum_block = Range.new(
start_pattern: newPattern(
match: /enum/,
tag_as: "storage.type.enum"
).then(@spaces).maybe(newPattern(
match: /class|struct/,
tag_as: "storage.type.enum",
).then(@spaces)).then(
match: variable_name,
tag_as: "entity.name.type.enum",
).maybe(maybe(@spaces).then(
match: /:/,
tag_as: "punctuation.type-specifier.colon",
).maybe(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.underlying.enum",
)),
end_pattern: newPattern(
lookBehindFor(/\}/)
.or(
match: /;/,
tag_as: "punctuation.terminator.statement",
).or(
lookAheadFor(newPattern(
match: /[()>\[\]=]/,
tag_as: "punctuation.terminator.statement",
))
)),
tag_as: "meta.enum-block",
includes: [
{
begin: "\\{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "(\\})(\\s*\\n)?",
endCaptures: {
"1" => {
name: "punctuation.section.block.end.bracket.curly"
},
"2" => {
name: "invalid.illegal.you-forgot-semicolon"
}
},
patterns: [
{
include: "#special_block"
},
{
include: "#constructor"
},
{
include: "$base"
}
]
},
"$base",
],
)
#
# Classes and structs
#
# the following are basically the equivlent of:
# @cpp_tokens.that(:isAccessSpecifier).or(/,/).or(/:/)
# that ^ causes an error in the lookBehindFor() so it has to be manually spread
can_come_before_a_inherited_class = @cpp_tokens.representationsThat(:isAccessSpecifier) + [ ',', ':' ]
can_come_before_a_inherited_class_regex = /#{can_come_before_a_inherited_class.join('|')}/
inhertance_context = [
newPattern(
match: /,/,
tag_as: "punctuation.separator.delimiter.inhertance"
),
newPattern(
match: @cpp_tokens.that(:isAccessSpecifier),
tag_as: "storage.type.modifier.access.$match",
),
lookBehindFor(can_come_before_a_inherited_class_regex).maybe(@spaces).lookAheadToAvoid(@cpp_tokens.that(:isAccessSpecifier)).then(
match: variable_name,
tag_as: "entity.name.type.inherited"
)
]
class_struct_block = Range.new(
start_pattern: newPattern(
should_fully_match: ["class foo: bar", "class foo: public baz"],
should_not_fully_match: ["class foo {"],
should_partial_match: ["class foo f;", "struct st s;"],
match: newPattern(
reference: "storage_type",
match: variableBounds[ /struct|class|union/ ],
tag_as: "storage.type.$match",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.$reference(storage_type)",
).maybe(maybe(@spaces).then(
match: /:/,
tag_as: "punctuation.inhertance.colon"
# the following may seem redundant (removing it shouldn't change anything)
# this is because the follow are matched by what is inside of this Range
# However its preferable to match things here, in the Start (using a pattern), over matching it inside of the range
# this is because the start pattern typically fails safely (is limited to 1 line), while typically Ranges fail dangerously (can match the whole document)
).maybe(@spaces)
.zeroOrMoreOf(
match: maybe(/,/)
.maybe(@spaces)
.maybe(@cpp_tokens.that(:isAccessSpecifier))
.maybe(@spaces).oneOrMoreOf(
maybe(@spaces).maybe(/,/).maybe(@spaces)
.lookAheadToAvoid(@cpp_tokens.that(:isAccessSpecifier))
.then(variable_name)
),
includes: inhertance_context
)
),
),
end_pattern: newPattern(
lookBehindFor(/\}/)
.or(
match: /;/,
tag_as: "punctuation.terminator.statement",
).or(
lookAheadFor(newPattern(
match: /[()>\[\]=]/,
tag_as: "punctuation.terminator.statement",
))
)
),
tag_as: "meta.class-struct-block",
includes: [
#
# This part is only for what is before the {}'s (aka inhertance)
#
"#angle_brackets",
*inhertance_context,
#
# This Range is for everything in the {}'s
#
{
begin: "\\{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "(\\})(\\s*\\n)?",
endCaptures: {
"1" => {
name: "punctuation.section.block.end.bracket.curly"
},
"2" => {
name: "invalid.illegal.you-forgot-semicolon"
}
},
patterns: [
{
include: "#special_block"
},
{
include: "#constructor"
},
{
include: "$base"
}
]
},
"$base",
],
)
cpp_grammar.initalContextIncludes(
:special_block,
macro_argument,
:strings,
functional_specifiers_pre_parameters,
qualifiers_and_specifiers_post_parameters,
storage_specifiers,
access_control_keywords,
exception_keywords,
other_keywords,
:memory_operators,
the_this_keyword,
language_constants,
template_isolated_definition,
template_definition,
scope_resolution,
{
match: /\b(constexpr|export|mutable|typename|thread_local)\b/,
name: "storage.modifier"
},
{
name: "meta.function.destructor",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.destructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.destructor"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.destructor"
}
},
patterns: [
{
include: "$base"
}
]
},
{
name: "meta.function.destructor.prototype",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.definition.parameters.begin"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end"
}
},
patterns: [
{
include: "$base"
}
]
},
lambdas,
#
# C patterns
#
"#preprocessor-rule-enabled",
"#preprocessor-rule-disabled",
"#preprocessor-rule-conditional",
"#comments-c",
control_flow_keywords,
storage_types,
{
match: "\\b(const|extern|register|restrict|static|volatile|inline)\\b",
name: "storage.modifier"
},
operator_overload,
number_literal,
:strings_c,
{
name: "meta.preprocessor.macro",
begin: "(?x)\n^\\s* ((\\#)\\s*define) \\s+\t# define\n((?<id>#{preprocessor_name_no_bounds}))\t # macro name\n(?:\n (\\()\n\t(\n\t \\s* \\g<id> \\s*\t\t # first argument\n\t ((,) \\s* \\g<id> \\s*)* # additional arguments\n\t (?:\\.\\.\\.)?\t\t\t# varargs ellipsis?\n\t)\n (\\))\n)?",
beginCaptures: {
"1" => {
name: "keyword.control.directive.define"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "entity.name.function.preprocessor"
},
"5" => {
name: "punctuation.definition.parameters.begin"
},
"6" => {
name: "variable.parameter.preprocessor"
},
"8" => {
name: "punctuation.separator.parameters"
},
"9" => {
name: "punctuation.definition.parameters.end"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
{
name: "meta.preprocessor.diagnostic",
begin: "^\\s*((#)\\s*(error|warning))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.diagnostic.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?<!\\\\)(?=\\n)",
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "[^'\"]",
end: "(?<!\\\\)(?=\\s*\\n)",
name: "string.unquoted.single",
patterns: [
{
include: "#line_continuation_character"
},
{
include: "#comments-c"
}
]
}
]
},
{
name: "meta.preprocessor.include",
begin: "^\\s*((#)\\s*(include(?:_next)?|import))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#line_continuation_character"
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double.include"
},
{
begin: "<",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: ">",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.other.lt-gt.include"
}
]
},
"#pragma-mark",
{
name: "meta.preprocessor",
begin: "^\\s*((#)\\s*line)\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.line"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#strings_c"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
},
{
name: "meta.preprocessor",
begin: "^\\s*(?:((#)\\s*undef))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.undef"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
}
]
},
{
name: "meta.preprocessor.pragma",
begin: "^\\s*(?:((#)\\s*pragma))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.pragma"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#strings_c"
},
{
match: "[a-zA-Z_$][\\w\\-$]*",
name: "entity.other.attribute-name.pragma.preprocessor"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
},
:operators,
{
match: "\\b(u_char|u_short|u_int|u_long|ushort|uint|u_quad_t|quad_t|qaddr_t|caddr_t|daddr_t|div_t|dev_t|fixpt_t|blkcnt_t|blksize_t|gid_t|in_addr_t|in_port_t|ino_t|key_t|mode_t|nlink_t|id_t|pid_t|off_t|segsz_t|swblk_t|uid_t|id_t|clock_t|size_t|ssize_t|time_t|useconds_t|suseconds_t)\\b",
name: "support.type.sys-types"
},
{
match: "\\b(pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t)\\b",
name: "support.type.pthread"
},
{
match: "(?x) \\b\n(int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|int_least8_t\n|int_least16_t|int_least32_t|int_least64_t|uint_least8_t|uint_least16_t|uint_least32_t\n|uint_least64_t|int_fast8_t|int_fast16_t|int_fast32_t|int_fast64_t|uint_fast8_t\n|uint_fast16_t|uint_fast32_t|uint_fast64_t|intptr_t|uintptr_t|intmax_t|intmax_t\n|uintmax_t|uintmax_t)\n\\b",
name: "support.type.stdint"
},
posix_reserved_types,
"#block-c",
"#parens-c",
function_definition,
"#line_continuation_character",
{
name: "meta.bracket.square.access",
begin: "([a-zA-Z_][a-zA-Z_0-9]*|(?<=[\\]\\)]))?(\\[)(?!\\])",
beginCaptures: {
"1" => {
name: "variable.other.object"
},
"2" => {
name: "punctuation.definition.begin.bracket.square"
}
},
end: "\\]",
endCaptures: {
"0" => {
name: "punctuation.definition.end.bracket.square"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
name: "storage.modifier.array.bracket.square",
match: /#{lookBehindToAvoid(/delete/)}\\[\\s*\\]/
},
{
match: ";",
name: "punctuation.terminator.statement"
},
{
match: ",",
name: "punctuation.separator.delimiter"
}
)
cpp_grammar.addToRepository({
"angle_brackets" => {
begin: "<",
end: ">",
name: "meta.angle-brackets",
patterns: [
{
include: "#angle_brackets"
},
{
include: "$base"
}
]
},
"block" => {
begin: "\\{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "\\}",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
captures: {
"1" => {
name: "support.function.any-method"
},
"2" => {
name: "punctuation.definition.parameters"
}
},
match: "(?x)\n(\n (?!while|for|do|if|else|switch|catch|return)\n (?:\\b[A-Za-z_][A-Za-z0-9_]*+\\b|::)*+ # actual name\n)\n\\s*(\\() # opening bracket",
name: "meta.function-call"
},
{
include: "$base"
}
]
},
"constructor" => {
patterns: [
{
begin: "(?x)\n(?:^\\s*) # beginning of line\n((?!while|for|do|if|else|switch|catch)[A-Za-z_][A-Za-z0-9_:]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.constructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.constructor"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.constructor"
}
},
name: "meta.function.constructor",
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function-innards-c"
}
]
},
{
begin: "(?x)\n(:)\n(\n (?=\n \\s*[A-Za-z_][A-Za-z0-9_:]* # actual name\n \\s* (\\() # opening bracket\n )\n)",
beginCaptures: {
"1" => {
name: "punctuation.definition.initializer-list.parameters"
}
},
end: "(?=\\{)",
name: "meta.function.constructor.initializer-list",
patterns: [
{
include: "$base"
}
]
}
]
},
"special_block" => {
patterns: [
using_namespace.to_tag,
namespace_definition.to_tag,
enum_block.to_tag,
class_struct_block.to_tag,
{
begin: "\\b(extern)(?=\\s*\")",
beginCaptures: {
"1" => {
name: "storage.modifier"
}
},
end: "(?<=\\})|(?=\\w)|(?=\\s*#\\s*endif\\b)",
name: "meta.extern-block",
patterns: [
{
begin: "\\{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "\\}|(?=\\s*#\\s*endif\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#special_block"
},
{
include: "$base"
}
]
},
{
include: "$base"
}
]
}
]
},
# TODO: "strings" is included and it is different from "strings_c", but its not used anywhere. Figure out whats going on here
"strings" => {
patterns: [
{
begin: "(u|u8|U|L)?\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
match: "\\\\u\\h{4}|\\\\U\\h{8}",
name: "constant.character.escape"
},
{
match: "\\\\['\"?\\\\abfnrtv]",
name: "constant.character.escape"
},
{
match: "\\\\[0-7]{1,3}",
name: "constant.character.escape"
},
{
match: "\\\\x\\h+",
name: "constant.character.escape"
},
{
include: "#string_placeholder-c"
}
]
},
{
begin: "(u|u8|U|L)?R\"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\(",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
},
"3" => {
name: "invalid.illegal.delimiter-too-long"
}
},
end: "\\)\\2(\\3)\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
},
"1" => {
name: "invalid.illegal.delimiter-too-long"
}
},
name: "string.quoted.double.raw"
}
]
},
"block-c" => {
patterns: [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#block_innards-c"
}
]
}
]
},
"block_innards-c" => {
patterns: [
{
include: "#preprocessor-rule-enabled-block"
},
{
include: "#preprocessor-rule-disabled-block"
},
{
include: "#preprocessor-rule-conditional-block"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#c_function_call"
},
{
name: "meta.initialization",
begin: "(?x)\n(?:\n (?:\n\t(?=\\s)(?<!else|new|return)\n\t(?<=\\w) \\s+(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas) # or word + space before name\n )\n)\n(\n (?:[A-Za-z_][A-Za-z0-9_]*+ | :: )++ # actual name\n |\n (?:(?<=operator) (?:[-*&<>=+!]+ | \\(\\) | \\[\\]))\n)\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "variable.other"
},
"2" => {
name: "punctuation.section.parens.begin.bracket.round.initialization"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round.initialization"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#block_innards-c"
}
]
},
{
include: "#parens-block-c"
},
{
include: "$base"
}
]
},
"c_function_call" => {
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|constexpr|volatile|operator|(?:::)?new|(?:::)?delete)\\s*\\()\n(?=\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*#{maybe(template_call.without_numbered_capture_groups)}\\( # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)",
name: "meta.function-call",
patterns: [
{
include: "#function-call-innards-c"
}
]
},
"comments-c" => {
patterns: [
{
captures: {
"1" => {
name: "meta.toc-list.banner.block"
}
},
match: "^/\\* =(\\s*.*?)\\s*= \\*/$\\n?",
name: "comment.block"
},
{
begin: "/\\*",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment.begin"
}
},
end: "\\*/",
endCaptures: {
"0" => {
name: "punctuation.definition.comment.end"
}
},
name: "comment.block"
},
{
captures: {
"1" => {
name: "meta.toc-list.banner.line"
}
},
match: "^// =(\\s*.*?)\\s*=\\s*$\\n?",
name: "comment.line.banner"
},
{
begin: "(^[ \\t]+)?(?=//)",
beginCaptures: {
"1" => {
name: "punctuation.whitespace.comment.leading"
}
},
end: "(?!\\G)",
patterns: [
{
begin: "//",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment"
}
},
end: "(?=\\n)",
name: "comment.line.double-slash",
patterns: [
{
include: "#line_continuation_character"
}
]
}
]
}
]
},
"disabled" => {
begin: "^\\s*#\\s*if(n?def)?\\b.*$",
end: "^\\s*#\\s*endif\\b",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
"line_continuation_character" => {
patterns: [
{
match: "(\\\\)\\n",
captures: {
"1" => {
name: "constant.character.escape.line-continuation"
}
}
}
]
},
"parens-c" => {
name: "punctuation.section.parens-c",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "$base"
}
]
},
"parens-block-c" => {
name: "meta.block.parens",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#block_innards-c"
},
{
match: lookBehindToAvoid(/:/).then(/:/).lookAheadToAvoid(/:/),
name: "punctuation.range-based"
}
]
},
"pragma-mark" => {
captures: {
"1" => {
name: "meta.preprocessor.pragma"
},
"2" => {
name: "keyword.control.directive.pragma.pragma-mark"
},
"3" => {
name: "punctuation.definition.directive"
},
"4" => {
name: "entity.name.tag.pragma-mark"
}
},
match: "^\\s*(((#)\\s*pragma\\s+mark)\\s+(.*))",
name: "meta.section"
},
"strings_c" => {
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#string_placeholder-c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: lookBehindToAvoid(/[\da-fA-F]/).then(/'/),
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#line_continuation_character"
}
]
}
]
},
"string_escaped_char-c" => {
patterns: [
{
match: "(?x)\\\\ (\n\\\\\t\t\t |\n[abefnprtv'\"?] |\n[0-3]\\d{,2}\t |\n[4-7]\\d?\t\t|\nx[a-fA-F0-9]{,2} |\nu[a-fA-F0-9]{,4} |\nU[a-fA-F0-9]{,8} )",
name: "constant.character.escape"
},
{
match: "\\\\.",
name: "invalid.illegal.unknown-escape"
}
]
},
"string_placeholder-c" => {
patterns: [
{
match: "(?x) %\n(\\d+\\$)?\t\t\t\t\t\t # field (argument #)\n[#0\\- +']*\t\t\t\t\t\t # flags\n[,;:_]?\t\t\t\t\t\t\t # separator character (AltiVec)\n((-?\\d+)|\\*(-?\\d+\\$)?)?\t\t # minimum field width\n(\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)?\t# precision\n(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier\n[diouxXDOUeEfFgGaACcSspn%]\t\t # conversion type",
name: "constant.other.placeholder"
},
# I don't think these are actual escapes, and they incorrectly mark valid strings
# It might be related to printf and format from C (which is low priority for C++)
# {
# match: "(%)(?!\"\\s*(PRI|SCN))",
# captures: {
# "1" => {
# name: "constant.other.placeholder"
# }
# }
# }
]
},
"vararg_ellipses" => {
match: "(?<!\\.)\\.\\.\\.(?!\\.)",
name: "punctuation.vararg-ellipses"
},
"preprocessor-rule-conditional" => {
patterns: [
{
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#preprocessor-rule-enabled-elif"
},
{
include: "#preprocessor-rule-enabled-else"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "$base"
}
]
},
{
match: "^\\s*#\\s*(else|elif|endif)\\b",
captures: {
"0" => {
name: "invalid.illegal.stray-$1"
}
}
}
]
},
"preprocessor-rule-conditional-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#preprocessor-rule-enabled-elif-block"
},
{
include: "#preprocessor-rule-enabled-else-block"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#block_innards-c"
}
]
},
{
match: "^\\s*#\\s*(else|elif|endif)\\b",
captures: {
"0" => {
name: "invalid.illegal.stray-$1"
}
}
}
]
},
"preprocessor-rule-conditional-line" => {
patterns: [
{
match: "(?:\\bdefined\\b\\s*$)|(?:\\bdefined\\b(?=\\s*\\(*\\s*(?:(?!defined\\b)[a-zA-Z_$][\\w$]*\\b)\\s*\\)*\\s*(?:\\n|//|/\\*|\\?|\\:|&&|\\|\\||\\\\\\s*\\n)))",
name: "keyword.control.directive.conditional"
},
{
match: "\\bdefined\\b",
name: "invalid.illegal.macro-name"
},
{
include: "#comments-c"
},
{
include: "#strings_c"
},
{
include: "#number_literal"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#operators"
},
{
include: "#constants"
},
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)|(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
}
]
},
"preprocessor-rule-disabled" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
include: "#preprocessor-rule-enabled-elif"
},
{
include: "#preprocessor-rule-enabled-else"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "$base"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
}
]
},
"preprocessor-rule-disabled-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
include: "#preprocessor-rule-enabled-elif-block"
},
{
include: "#preprocessor-rule-enabled-else-block"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#block_innards-c"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
}
]
},
"preprocessor-rule-disabled-elif" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
},
"preprocessor-rule-enabled" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "constant.numeric.preprocessor"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "$base"
}
]
}
]
}
]
},
"preprocessor-rule-enabled-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "#block_innards-c"
}
]
}
]
}
]
},
"preprocessor-rule-enabled-elif" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
include: "$base"
}
]
}
]
},
"preprocessor-rule-enabled-elif-block" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
include: "#block_innards-c"
}
]
}
]
},
"preprocessor-rule-enabled-else" => {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "$base"
}
]
},
"preprocessor-rule-enabled-else-block" => {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "#block_innards-c"
}
]
},
"preprocessor-rule-define-line-contents" => {
patterns: [
{
include: "#vararg_ellipses"
},
{
match: /##?/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
name: "variable.other.macro.argument"
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#preprocessor-rule-define-line-blocks"
}
]
},
{
match: "\\(",
name: "punctuation.section.parens.begin.bracket.round"
},
{
match: "\\)",
name: "punctuation.section.parens.end.bracket.round"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\\s*\\()\n(?=\n (?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*\\( # actual name\n |\n (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)|(?<!\\\\)(?=\\s*\\n)",
name: "meta.function",
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#string_placeholder-c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#line_continuation_character"
}
]
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "$base"
}
]
},
"preprocessor-rule-define-line-blocks" => {
patterns: [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-blocks"
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
"preprocessor-rule-define-line-functions" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#vararg_ellipses"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#operators"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.arguments.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
"function-innards-c" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#operators"
},
{
include: "#vararg_ellipses"
},
{
name: "meta.function.definition.parameters",
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.parameters.begin.bracket.round"
},
},
end: /\)|:/,
endCaptures: {
"0" => {
name: "punctuation.section.parameters.end.bracket.round"
}
},
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function-innards-c"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function-innards-c"
}
]
},
{
include: "$base"
}
]
},
"function-call-innards-c" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#operators"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:new)\\s*(#{maybe(template_call.without_numbered_capture_groups)}) # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "keyword.operator.memory.new"
},
"2" => {
patterns: [
{
include: "#template_call_innards"
}
]
},
"3" => {
name: "punctuation.section.arguments.begin.bracket.round"
},
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
function_call.to_tag,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
include: "#block_innards-c"
}
]
}
})
Dir.chdir __dir__
# Save
cpp_grammar.saveAsYamlTo("../syntaxes/cpp.tmLanguage")
cpp_grammar.saveAsJsonTo("../syntaxes/cpp.tmLanguage")
standardize the way block patterns are generated
require_relative '../textmate_tools.rb'
require_relative './cpp_tokens.rb'
# todo
# fix initializer list "functions" e.g. `int a{5};`
# fix the ... inside of macros
# have all patterns with keywords be dynamically generated
cpp_grammar = Grammar.new(
name:"C++",
scope_name: "source.cpp",
version: "https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
information_for_contributors: [
"This code was auto generated by a much-more-readble ruby file: https://github.com/jeff-hykin/cpp-textmate-grammar/blob/master/generate.rb",
"This file essentially an updated/improved fork of the atom syntax https://github.com/atom/language-c/blob/master/grammars/c%2B%2B.cson",
],
)
#
# Utils
#
@semicolon = newPattern(
match: /;/,
tag_as: "punctuation.terminator.statement",
)
def blockFinderFor( name:"", tag_as:"", start_pattern:nil, needs_semicolon: true, primary_includes: [], head_includes:[], body_includes: [ "$base" ], tail_includes: [], secondary_includes:[])
lookahead_endings = /[;()>\[\]=]/
if needs_semicolon
end_pattern = newPattern(
match: newPattern(
lookBehindFor(/}/).maybe(@spaces).then(@semicolon)
).or(
@semicolon
).or(
lookAheadFor(lookahead_endings)
)
)
else
end_pattern = lookBehindFor(/\}/).or(lookAheadFor(lookahead_endings))
end
return Range.new(
tag_as: tag_as,
start_pattern: newPattern(
match: start_pattern,
tag_as: "meta.head."+name,
),
end_pattern: end_pattern,
includes: [
*primary_includes,
# Head
Range.new(
tag_as: "meta.head."+name,
start_pattern: lookBehindFor(/^/).then(/[\s\n]?/),
end_pattern: newPattern(/[\s\n]?/).lookAheadFor(/{/),
includes: head_includes
),
# Body
Range.new(
tag_as: "meta.body."+name, # body is everything in the {}'s
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.section.block.begin.bracket.curly."+name
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly."+name
),
includes: body_includes
),
# Tail
Range.new(
tag_as: "meta.tail."+name,
start_pattern: lookBehindFor(/}/).then(/[\s\n]*/),
end_pattern: newPattern(/[\s\n]*/).lookAheadFor(/;/),
includes: tail_includes
),
*secondary_includes
]
)
end
#
#
# Numbers
#
#
#
# misc
#
number_seperator_pattern = newPattern(
should_fully_match: [ "'" ],
should_partial_match: [ "1'1", "1'", "'1" ],
should_not_partial_match: [ "1''1", "1''" ],
repository_name: 'literal_numeric_seperator',
match: lookBehindToAvoid(/'/).then(/'/).lookAheadToAvoid(/'/),
tag_as:"punctuation.separator.constant.numeric",
)
hex_digits = newPattern(
should_fully_match: [ "1", "123456", "DeAdBeeF", "49'30'94", "DeA'dBe'eF", "dea234f4930" ],
should_not_fully_match: [ "'3902" , "de2300p1000", "0x000" ],
should_not_partial_match: [ "p", "x", "." ],
match: /[0-9a-fA-F]/.zeroOrMoreOf(/[0-9a-fA-F]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.hexadecimal",
includes: [ number_seperator_pattern ],
)
decimal_digits = newPattern(
should_fully_match: [ "1", "123456", "49'30'94" , "1'2" ],
should_not_fully_match: [ "'3902" , "1.2", "0x000" ],
match: /[0-9]/.zeroOrMoreOf(/[0-9]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.decimal",
includes: [ number_seperator_pattern ],
)
# see https://en.cppreference.com/w/cpp/language/floating_literal
hex_exponent = newPattern(
should_fully_match: [ "p100", "p-100", "p+100", "P100" ],
should_not_fully_match: [ "p0x0", "p-+100" ],
match: newPattern(
match: /[pP]/,
tag_as: "keyword.other.unit.exponent.hexadecimal",
).maybe(
match: /\+/,
tag_as: "keyword.operator.plus.exponent.hexadecimal",
).maybe(
match: /\-/,
tag_as: "keyword.operator.minus.exponent.hexadecimal",
).then(
match: decimal_digits.without_numbered_capture_groups,
tag_as: "constant.numeric.exponent.hexadecimal",
includes: [ number_seperator_pattern ]
),
)
decimal_exponent = newPattern(
should_fully_match: [ "e100", "e-100", "e+100", "E100", ],
should_not_fully_match: [ "e0x0", "e-+100" ],
match: newPattern(
match: /[eE]/,
tag_as: "keyword.other.unit.exponent.decimal",
).maybe(
match: /\+/,
tag_as: "keyword.operator.plus.exponent.decimal",
).maybe(
match: /\-/,
tag_as: "keyword.operator.minus.exponent.decimal",
).then(
match: decimal_digits.without_numbered_capture_groups,
tag_as: "constant.numeric.exponent.decimal",
includes: [ number_seperator_pattern ]
),
)
#
# Number Literal
#
number_literal = newPattern(
repository_name: 'number_literal',
match: lookBehindToAvoid(/\w/).then(
# Floating point
# see https://en.cppreference.com/w/cpp/language/floating_literal
newPattern(
floating_literal = newPattern(
# Hex
newPattern(
hex_literal_float = newPattern(
match: /0[xX]/,
tag_as: "keyword.other.unit.hexadecimal",
).maybe(
hex_digits
).then(
# lookBehind/Ahead because there needs to be a hex digit on at least one side
match: lookBehindFor(/[0-9a-fA-F]/).then(/\./).or(/\./.lookAheadFor(/[0-9a-fA-F]/)),
tag_as: "constant.numeric.hexadecimal",
).maybe(
hex_digits
).maybe(
hex_exponent
)
# Decimal
).or(
decimal_literal_float = maybe(
decimal_digits
).then(
# lookBehind/Ahead because there needs to be a decimal digit on at least one side
match: lookBehindFor(/[0-9]/).then(/\./).or(/\./.lookAheadFor(/[0-9]/)),
tag_as: "constant.numeric.decimal.point",
).maybe(
decimal_digits
).maybe(
decimal_exponent
)
)
# Floating point suffix
).maybe(
literal_float_suffix = newPattern(
match: /[lLfF]/.lookAheadToAvoid(/\w/),
tag_as: "keyword.other.unit.suffix.floating-point"
)
)
# Integer
# see https://en.cppreference.com/w/cpp/language/integer_literal
).or(
integer_literal = newPattern(
# Binary
newPattern(
binary_literal_integer = newPattern(
match: /0[bB]/,
tag_as: "keyword.other.unit.binary"
).then(
match: oneOrMoreOf(/[01]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.binary",
includes: [ number_seperator_pattern ]
)
# Octal
).or(
octal_literal_integer = newPattern(
match: /0/,
tag_as: "keyword.other.unit.octal"
).then(
match: oneOrMoreOf(/[0-7]/.or(number_seperator_pattern)),
tag_as: "constant.numeric.octal",
includes: [ number_seperator_pattern ]
)
# Hex
).or(
hex_literal_integer = newPattern(
match: /0[xX]/,
tag_as: "keyword.other.unit.hexadecimal",
).then(
hex_digits
).maybe(
hex_exponent
)
# Decimal
).or(
decimal_literal_integer = newPattern(
decimal_digits
).maybe(
decimal_exponent
)
)
# integer suffix
).maybe(
literal_integer_suffix = newPattern(
match: /[uU]/.or(/[uU]ll?/).or(/[uU]LL?/).or(/ll?[uU]?/).or(/LL?[uU]?/).lookAheadToAvoid(/\w/),
tag_as: "keyword.other.unit.suffix.integer"
)
)
)
# user defined endings
).then(
match: /\w*/,
tag_as: "keyword.other.unit.user-defined"
)
)
#
# Contexts
#
# eventually this context will be more exclusive (can't have class definitons inside of an evaluation)
# but for now it just includes everything
evaluation_context = [
'$base'
# function call
# number literal
# lambdas
]
#
# Variable
#
# todo: make a better name for this function
variableBounds = ->(regex_pattern) do
lookBehindToAvoid(@standard_character).then(regex_pattern).lookAheadToAvoid(@standard_character)
end
variable_name_without_bounds = /[a-zA-Z_]#{@standard_character.without_default_mode_modifiers}*/
# word bounds are inefficient, but they are accurate
variable_name = variableBounds[variable_name_without_bounds]
#
# Constants
#
language_constants = newPattern(
repository_name: 'constants',
match: variableBounds[@cpp_tokens.that(:isLiteral)],
tag_as: "constant.language"
)
#
# Types
#
look_behind_for_type = lookBehindFor(/\w |\*\/|[&*>\]\)]|\.\.\./).maybe(@spaces)
# why is posix reserved types not in "storage_types"? I don't know, if you can get it in there and everything still works it would be appreciated
posix_reserved_types = newPattern(
match: variableBounds[ /[a-zA-Z_]/.zeroOrMoreOf(@standard_character).then(/_t/) ],
tag_as: "support.type.posix-reserved"
)
storage_types = newPattern(
repository_name: 'storage_types',
includes: [
primitive_types = newPattern(
match: variableBounds[ @cpp_tokens.that(:isPrimitive) ],
tag_as: "storage.type.primitive"
),
non_primitive_types = newPattern(
match: variableBounds[@cpp_tokens.that(not(:isPrimitive), :isType)],
tag_as: "storage.type"
),
other_types = newPattern(
match: variableBounds[ /(asm|__asm__)/ ],
tag_as: "storage.type.$match"
)
]
)
#
# Keywords and Keyword-ish things
#
functional_specifiers_pre_parameters = newPattern(
match: variableBounds[ @cpp_tokens.that(:isFunctionSpecifier) ],
tag_as: "storage.modifier.specificer.functional.pre-parameters.$match"
)
qualifiers_and_specifiers_post_parameters = newPattern(
match: variableBounds[ @cpp_tokens.that(:canAppearAfterParametersBeforeBody) ].lookAheadFor(/\s*/.then(/\{/.or(/;/).or(/[\n\r]/))),
tag_as: "storage.modifier.specifier.functional.post-parameters.$match"
)
storage_specifiers = newPattern(
match: variableBounds[ @cpp_tokens.that(:isStorageSpecifier) ],
tag_as: "storage.modifier.specifier.$match"
)
access_control_keywords = newPattern(
match: lookBehindToAvoid(@standard_character).then(@cpp_tokens.that(:isAccessSpecifier)).maybe(@spaces).then(/:/),
tag_as: "storage.type.modifier.access.control.$match"
)
exception_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isExceptionRelated) ],
tag_as: "keyword.control.exception.$match"
)
other_keywords = newPattern(
match: variableBounds[ /(using|typedef)/ ],
tag_as: "keyword.other.$match"
)
the_this_keyword = newPattern(
match: variableBounds[ /this/ ],
tag_as: "variable.language.this"
)
# TODO: enhance casting operators to include <>'s
type_casting_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isTypeCastingOperator) ],
tag_as: "keyword.operator.cast.$match"
)
memory_operators = newPattern(
repository_name: 'memory_operators',
tag_as: "keyword.operator.memory",
match: lookBehindToAvoid(
@standard_character
).then(
newPattern(
newPattern(
match: /delete/,
tag_as: "keyword.operator.memory.delete.array"
).maybe(@spaces).then(
match: /\[\]/,
tag_as: "keyword.operator.memory.delete.array.bracket"
)
).or(
match: /delete/,
tag_as: "keyword.operator.memory.delete"
).or(
match: /new/,
tag_as: "keyword.operator.memory.new"
)
).lookAheadToAvoid(@standard_character)
)
control_flow_keywords = newPattern(
match: variableBounds[ @cpp_tokens.that(:isControlFlow) ],
tag_as: "keyword.control.$match"
)
#
# Templates
#
characters_in_template_call = /[\s<>,\w]/
template_call_context = [
:storage_types,
:constants,
:scope_resolution,
newPattern(
match: variable_name,
tag_as: 'storage.type.user-defined'
),
:operators,
:number_literal,
:strings,
newPattern(
match: /,/,
tag_as: "punctuation.separator.comma.template.argument"
)
]
# note: template_call should indeally be a Range(), the reason its not is
# because it's embedded inside of other patterns
template_call = newPattern(
repository_name: 'template_call_innards',
tag_as: 'meta.template.call',
match: /</.zeroOrMoreOf(characters_in_template_call).then(/>/).maybe(@spaces),
includes: template_call_context
)
template_definition_context = [
:scope_resolution,
:template_definition_argument,
:template_argument_defaulted,
:template_call_innards,
*evaluation_context
]
template_start = lookBehindToAvoid(@standard_character).then(
match: /template/,
tag_as: "storage.type.template"
).maybe(@spaces).then(
match: /</,
tag_as: "punctuation.section.angle-brackets.start.template.definition"
)
# a template definition that is by itself on a line (this is ideal)
template_isolated_definition = newPattern(
repository_name: 'template_isolated_definition',
match: template_start.then(
match: zeroOrMoreOf(/./),
tag_as: "meta.template.definition",
includes: template_definition_context,
).then(
match: />/.maybe(@spaces).then(/$/),
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
)
template_definition = Range.new(
repository_name: 'template_definition',
tag_as: 'meta.template.definition',
start_pattern: template_start,
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.end.template.definition"
),
includes: [
# a template call inside of a non-isolated template definition
# however this is rolling the dice: because if there is a less-than operator in a defaulted argument, then this pattern will screw everything up
# a better solution would be nice, but its going to be difficult/impossible
Range.new(
start_pattern: newPattern(
match: lookBehindFor(/\w/).maybe(@spaces).then(/</),
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
end_pattern: newPattern(
match: />/,
tag_as: "punctuation.section.angle-brackets.begin.template.call"
),
includes: template_call_context
),
*template_definition_context,
]
)
template_argument_defaulted = newPattern(
repository_name: 'template_argument_defaulted',
match: lookBehindFor(/<|,/).maybe(@spaces).then(
match: zeroOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
).maybe(@spaces).then(
match: /[=]/,
tag_as: "keyword.operator.assignment"
)
)
template_definition_argument = newPattern(
repository_name: 'template_definition_argument',
match: maybe(
@spaces
# case 1: only one word
).then(
match: variable_name_without_bounds,
tag_as: "storage.type.template.argument.$match",
# case 2: normal situation (ex: "typename T")
).or(
newPattern(
match: oneOrMoreOf(variable_name_without_bounds.then(@spaces)),
tag_as: "storage.type.template.argument.$match",
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template",
)
# case 3: ellipses (ex: "typename... Args")
).or(
newPattern(
match: variable_name_without_bounds,
tag_as: "storage.type.template",
).maybe(@spaces).then(
match: /\.\.\./,
tag_as: "punctuation.vararg-ellipses.template.definition",
).maybe(@spaces).then(
match: variable_name_without_bounds,
tag_as: "entity.name.type.template"
)
).maybe(@spaces).then(
newPattern(
match: /,/,
tag_as: "punctuation.separator.comma.template.argument",
).or(
lookAheadFor(/>|$/)
)
)
)
#
# Scope resolution
#
one_scope_resolution = variable_name_without_bounds.maybe(@spaces).maybe(template_call.without_numbered_capture_groups).then(/::/)
preceding_scopes = newPattern(
match: zeroOrMoreOf(one_scope_resolution).maybe(@spaces),
includes: [ :scope_resolution ]
)
scope_resolution = newPattern(
repository_name: 'scope_resolution',
tag_as: "meta.scope-resolution",
match: preceding_scopes.then(
match: variable_name_without_bounds,
tag_as: "entity.name.namespace.scope-resolution"
).maybe(@spaces).maybe(
template_call
).then(
match: /::/,
tag_as: "punctuation.separator.namespace.access"
)
)
#
# Functions
#
functionTemplate = ->(repository_name:nil, match_name: nil, tag_name_as: nil, tag_content_as: nil, tag_parenthese_as: nil) do
return Range.new(
repository_name: repository_name,
tag_content_as: "meta.#{tag_content_as}",
start_pattern: newPattern(
match: match_name,
tag_as: tag_name_as,
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.#{tag_parenthese_as}"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.#{tag_parenthese_as}"
),
includes: evaluation_context
)
end
cant_be_a_function_name = @cpp_tokens.that(:isWord, not(:isPreprocessorDirective), not(:isValidFunctionName))
avoid_invalid_function_names = lookBehindToAvoid(@standard_character).lookAheadToAvoid(maybe(@spaces).then(cant_be_a_function_name).maybe(@spaces).then(/\(/))
look_ahead_for_function_name = lookAheadFor(variable_name_without_bounds.maybe(@spaces).then(/\(/))
function_definition = Range.new(
tag_as: "meta.function.definition.parameters",
start_pattern: avoid_invalid_function_names.then(look_ahead_for_function_name),
end_pattern: lookBehindFor(/\)/),
includes: [ "#function-innards-c" ]
)
# a full match example of function call would be: aNameSpace::subClass<TemplateArg>FunctionName<5>(
function_call = Range.new(
start_pattern: avoid_invalid_function_names.then(
preceding_scopes
).then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.call"
).maybe(@spaces).maybe(
template_call
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round"
),
includes: [ "#function-call-innards-c" ]
)
#
# Operators
#
operator_context = []
normal_word_operators = newPattern(
match: variableBounds[ @cpp_tokens.that(:isOperator, :isWord, not(:isTypeCastingOperator), not(:isControlFlow), not(:isFunctionLike)) ],
tag_as: "keyword.operator.$match",
)
array_of_function_like_operators = @cpp_tokens.tokens.select { |each| each[:isFunctionLike] && !each[:isSpecifier] }
for each in array_of_function_like_operators
name = each[:name]
operator_context.push(functionTemplate[
repository_name: "#{name}_operator",
match_name: variableBounds[/#{name}/],
tag_name_as: "keyword.operator.#{name}",
tag_content_as: "arguments.operator.#{name}",
tag_parenthese_as: "operator.#{name} keyword.operator.#{name}"
])
end
operator_context += [
functionTemplate[
repository_name: "decltype_specifier",
match_name: variableBounds[/decltype/],
tag_name_as: "keyword.other.decltype storage.type.decltype",
tag_content_as: "arguments.decltype",
tag_parenthese_as: "decltype storage.type.decltype"
],
type_casting_operators,
:method_access,
:member_access,
normal_word_operators,
:vararg_ellipses,
{
match: "--",
name: "keyword.operator.decrement"
},
{
match: "\\+\\+",
name: "keyword.operator.increment"
},
{
match: "%=|\\+=|-=|\\*=|(?<!\\()/=",
name: "keyword.operator.assignment.compound"
},
{
match: "&=|\\^=|<<=|>>=|\\|=",
name: "keyword.operator.assignment.compound.bitwise"
},
{
match: "<<|>>",
name: "keyword.operator.bitwise.shift"
},
{
match: "!=|<=|>=|==|<|>",
name: "keyword.operator.comparison"
},
{
match: "&&|!|\\|\\|",
name: "keyword.operator.logical"
},
{
match: "&|\\||\\^|~",
name: "keyword.operator"
},
{
match: "=",
name: "keyword.operator.assignment"
},
{
match: "%|\\*|/|-|\\+",
name: "keyword.operator"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
applyEndPatternLast: true,
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#c_function_call"
},
{
include: "$base"
}
]
}
]
operators = newPattern(
repository_name: 'operators',
includes: operator_context,
)
#
# Probably a parameter
#
array_brackets = /\[\]/.maybe(@spaces)
comma_or_closing_paraenthese = /,/.or(/\)/)
stuff_after_a_parameter = maybe(@spaces).lookAheadFor(maybe(array_brackets).then(comma_or_closing_paraenthese))
probably_a_parameter = newPattern(
repository_name: 'probably_a_parameter',
match: newPattern(
match: variable_name_without_bounds.maybe(@spaces).lookAheadFor("="),
tag_as: "variable.parameter.defaulted"
).or(
match: look_behind_for_type.then(variable_name_without_bounds).then(stuff_after_a_parameter),
tag_as: "variable.parameter"
)
)
#
# Operator overload
#
# symbols can have spaces
operator_symbols = maybe(@spaces).then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isSymbol))
# words must have spaces, the variable_name_without_bounds is for implicit overloads
operator_wordish = @spaces.then(@cpp_tokens.that(:canAppearAfterOperatorKeyword, :isWordish).or(zeroOrMoreOf(one_scope_resolution).then(variable_name_without_bounds).maybe(@spaces).maybe(/&/)))
after_operator_keyword = operator_symbols.or(operator_wordish)
operator_overload = Range.new(
repository_name: 'operator_overload',
tag_as: "meta.function.definition.parameters.operator-overload",
start_pattern: newPattern(
match: /operator/,
tag_as: "keyword.other.operator.overload",
).then(
match: after_operator_keyword,
tag_as: "entity.name.operator.overloadee",
includes: [:scope_resolution]
).maybe(@spaces).then(
match: /\(/,
tag_as: "punctuation.section.parameters.begin.bracket.round"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.parameters.end.bracket.round"
),
includes: [:probably_a_parameter, :'function-innards-c' ]
)
#
# Access . .* -> ->*
#
dot_operator = /\.\*/.or(/\./)
arrow_operator = /->\*/.or(/->/)
member_operator = newPattern(
match: dot_operator,
tag_as: "punctuation.separator.dot-access"
).or(
match: arrow_operator,
tag_as: "punctuation.separator.pointer-access"
)
subsequent_object_with_operator = variable_name_without_bounds.maybe(@spaces).then(member_operator.without_numbered_capture_groups).maybe(@spaces)
# TODO: the member_access and method_access can probably be simplified considerably
# TODO: member_access and method_access might also need additional matching to handle scope resolutions
partial_member = newPattern(
match: variable_name_without_bounds.or(lookBehindFor(/\]|\)/)).maybe(@spaces),
tag_as: "variable.other.object.access",
).then(
member_operator
)
member_context = [
:member_access,
:method_access,
partial_member
]
member_start = partial_member.then(
match: zeroOrMoreOf(subsequent_object_with_operator),
includes: member_context
).maybe(@spaces)
# access to attribute
member_access = newPattern(
repository_name: 'member_access',
match: member_start.then(
match: @word_boundary.lookAheadToAvoid(@cpp_tokens.that(:isType)).then(variable_name_without_bounds).then(@word_boundary).lookAheadToAvoid(/\(/),
tag_as: "variable.other.member"
)
)
# access to method
method_access = Range.new(
repository_name: 'method_access',
tag_content_as: "meta.function-call.member",
start_pattern: member_start.then(
match: variable_name_without_bounds,
tag_as: "entity.name.function.member"
).then(
match: /\(/,
tag_as: "punctuation.section.arguments.begin.bracket.round.function.member"
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.section.arguments.end.bracket.round.function.member"
),
includes: ["#function-call-innards-c"],
)
#
# Namespace
#
# see https://en.cppreference.com/w/cpp/language/namespace
using_namespace = Range.new(
tag_as: "meta.using-namespace-declaration",
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /using/,
tag_as: "keyword.other.using.directive",
).then(@spaces).then(
match: /namespace/,
tag_as: "keyword.other.namespace.directive storage.type.namespace.directive"
).then(@spaces).maybe(
preceding_scopes
).then(
match: variable_name,
tag_as: "entity.name.namespace"
).lookAheadFor(
/;|\n/
),
end_pattern: newPattern(
match: /;/,
tag_as: "punctuation.terminator.statement"
),
)
# TODO: add support for namespace name = qualified-namespace ;
namespace_block = blockFinderFor(
name: "namespace",
tag_as: "meta.namespace-block",
needs_semicolon: false,
start_pattern: lookBehindToAvoid(@standard_character).then(
match: /namespace/,
tag_as: "keyword.other.namespace.definition storage.type.namespace.definition"
).then(@spaces).then(
# Named namespace (with possible scope )
preceding_scopes
).maybe(@spaces).then(
newPattern(
match: variable_name,
tag_as: "entity.name.namespace",
# anonymous namespaces
).or(
lookAheadFor(/\{/)
)
),
)
#
# Preprocessor
#
# not sure if this pattern is actually accurate (it was the one provided by atom/c.tmLanguage)
preprocessor_name_no_bounds = /[a-zA-Z_$][\w$]*/
preprocessor_function_name = preprocessor_name_no_bounds.lookAheadFor(maybe(@spaces).then(/\(/))
macro_argument = newPattern(
match: /##/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
tag_as: "variable.other.macro.argument"
)
#
# Lambdas
#
array_of_invalid_function_names = @cpp_tokens.representationsThat(:canAppearBeforeLambdaCapture)
non_variable_name = /#{array_of_invalid_function_names.map { |each| '\W'+each+'|^'+each } .join('|')}/
lambdas = Range.new(
repository_name: 'lambdas',
start_pattern: newPattern(
should_fully_match: [ "[]", "[=]", "[&]", "[x,y,x]", "[x, y, &z, w = 1 + 1]", "[ a = blah[1324], b, c ]" ],
should_partial_match: [ "[]", "[=](", "[&]{", "[x,y,x]", "[x, y, &z, w = 1 + 1] (", "[ a = blah[1324], b, c ] {" ],
should_not_partial_match: [ "delete[]", "thing[]", "thing []", "thing []", "thing[0][0] = 0" ],
match: lookBehindFor(/[^\s]|^/).lookBehindToAvoid(/[\w\]\)]/).or(lookBehindFor(non_variable_name)).maybe(@spaces).then(
match: /\[/,
tag_as: "punctuation.definition.capture.begin.lambda",
).then(
match: /(?:.*\[.*?\].*?)*.*?/,
tag_as: "meta.lambda.capture",
# the zeroOrMoreOf() is for other []'s that are inside of the lambda capture
# this pattern is still imperfect: if someone had a string literal with ['s in it, it could fail
includes: [ probably_a_parameter, "#function-innards-c" ],
).then(
match: /\]/,
tag_as: "punctuation.definition.capture.end.lambda",
)
),
end_pattern: newPattern(
match: lookBehindFor(/}/),
),
includes: [
# check for parameters first
Range.new(
tag_as: 'meta.function.definition.parameters.lambda',
start_pattern: newPattern(
match: /\(/,
tag_as: "punctuation.definition.parameters.begin.lambda",
),
end_pattern: newPattern(
match: /\)/,
tag_as: "punctuation.definition.parameters.end.lambda",
),
includes: [ probably_a_parameter, "#function-innards-c" ]
),
# specificers
newPattern(
match: variableBounds[ @cpp_tokens.that(:isLambdaSpecifier) ],
tag_as: "storage.modifier.lambda.$match"
),
# check for the -> syntax
newPattern(
match: /->/,
tag_as: "punctuation.definition.lambda.return-type"
).maybe(
match: /.+?/.lookAheadFor(/\{|$/),
tag_as: "storage.type.return-type.lambda"
),
# then find the body
Range.new(
tag_as: "meta.function.definition.body.lambda",
start_pattern: newPattern(
match: /\{/,
tag_as: "punctuation.section.block.begin.bracket.curly.lambda",
),
end_pattern: newPattern(
match: /\}/,
tag_as: "punctuation.section.block.end.bracket.curly.lambda",
),
includes: [ "$base" ]
),
]
)
#
# Support
#
# TODO: currently this is not used, ideally it will be built up over time and then be included
# it will be for things such as cout, cin, vector, string, map, etc
#
# Classes, structs, unions, enums
#
# see https://en.cppreference.com/w/cpp/language/enum
# this range matches both the case with brackets and the case without brackets
enum_block = blockFinderFor(
name: "enum",
tag_as: "meta.enum-block",
start_pattern: newPattern(
match: /enum/,
tag_as: "storage.type.enum"
).then(@spaces).maybe(
# see "Scoped enumerations" on https://en.cppreference.com/w/cpp/language/enum
newPattern(
match: /class|struct/,
tag_as: "storage.type.enum.enum-key.$match",
).then(@spaces)
).then(
match: variable_name,
tag_as: "entity.name.type.enum",
).maybe(
maybe(@spaces).then(
match: /:/,
tag_as: "punctuation.type-specifier.colon",
).maybe(@spaces).maybe(
scope_resolution
).maybe(@spaces).then(
match: variable_name,
tag_as: "storage.type.integral.$match",
)
),
)
# the following are basically the equivlent of:
# @cpp_tokens.that(:isAccessSpecifier).or(/,/).or(/:/)
# that ^ causes an error in the lookBehindFor() so it has to be manually spread
can_come_before_a_inherited_class = @cpp_tokens.representationsThat(:isAccessSpecifier) + [ ',', ':' ]
can_come_before_a_inherited_class_regex = /#{can_come_before_a_inherited_class.join('|')}/
inhertance_context = [
newPattern(
match: /,/,
tag_as: "punctuation.separator.delimiter.inhertance"
),
newPattern(
match: @cpp_tokens.that(:isAccessSpecifier),
tag_as: "storage.type.modifier.access.$match",
),
lookBehindFor(can_come_before_a_inherited_class_regex).maybe(@spaces).lookAheadToAvoid(@cpp_tokens.that(:isAccessSpecifier)).then(
match: variable_name,
tag_as: "entity.name.type.inherited"
)
]
generateClassOrStructBlockFinder = ->(name) do
return blockFinderFor(
tag_as: "",
name: name,
start_pattern: newPattern(
should_fully_match: ["#{name} foo: bar", "#{name} foo: public baz"],
should_not_fully_match: ["#{name} foo {"],
should_partial_match: ["#{name} foo f;", "#{name} st s;"],
match: newPattern(
reference: "storage_type",
match: variableBounds[ /#{name}/ ],
tag_as: "storage.type.$match",
).then(@spaces).then(
match: variable_name,
tag_as: "entity.name.type.$reference(storage_type)",
).maybe(maybe(@spaces).then(
match: /:/,
tag_as: "punctuation.inhertance.colon"
# the following may seem redundant (removing it shouldn't change anything)
# this is because the follow are matched by what is inside of this Range
# However its preferable to match things here, in the Start (using a pattern), over matching it inside of the range
# this is because the start pattern typically fails safely (is limited to 1 line), while typically Ranges fail dangerously (can match the whole document)
).maybe(@spaces).zeroOrMoreOf(
match: maybe(/,/).maybe(
@spaces
).maybe(
@cpp_tokens.that(:isAccessSpecifier)
).maybe(@spaces).oneOrMoreOf(
maybe(@spaces).maybe(/,/).maybe(
@spaces
).lookAheadToAvoid(
@cpp_tokens.that(:isAccessSpecifier)
).then(variable_name)
),
includes: inhertance_context
)
),
),
head_includes: [
"#angle_brackets",
*inhertance_context,
],
body_includes: [ "#special_block", "#constructor", "$base" ],
)
end
class_block = generateClassOrStructBlockFinder["class"]
struct_block = generateClassOrStructBlockFinder["struct"]
union_block = generateClassOrStructBlockFinder["union"]
# the following is a legacy pattern, I'm not sure if it is still accurate
# I have no idea why it matches a double quote
extern_block = blockFinderFor(
name: 'extern',
tag_as: "meta.extern-block",
start_pattern: newPattern(
match: /\bextern/,
tag_as: "storage.type.extern"
).lookAheadFor(/\s*\"/),
secondary_includes: [ "$base" ]
)
cpp_grammar.initalContextIncludes(
:special_block,
macro_argument,
:strings,
functional_specifiers_pre_parameters,
qualifiers_and_specifiers_post_parameters,
storage_specifiers,
access_control_keywords,
exception_keywords,
other_keywords,
:memory_operators,
the_this_keyword,
language_constants,
template_isolated_definition,
template_definition,
scope_resolution,
{
match: /\b(constexpr|export|mutable|typename|thread_local)\b/,
name: "storage.modifier"
},
{
name: "meta.function.destructor",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.destructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.destructor"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.destructor"
}
},
patterns: [
{
include: "$base"
}
]
},
{
name: "meta.function.destructor.prototype",
begin: "(?x)\n(?:\n ^ | # beginning of line\n (?:(?<!else|new|=)) # or word + space before name\n)\n((?:[A-Za-z_][A-Za-z0-9_]*::)*+~[A-Za-z_][A-Za-z0-9_]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.definition.parameters.begin"
}
},
end: /\)/,
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end"
}
},
patterns: [
{
include: "$base"
}
]
},
lambdas,
#
# C patterns
#
"#preprocessor-rule-enabled",
"#preprocessor-rule-disabled",
"#preprocessor-rule-conditional",
"#comments-c",
control_flow_keywords,
storage_types,
{
match: "\\b(const|extern|register|restrict|static|volatile|inline)\\b",
name: "storage.modifier"
},
operator_overload,
number_literal,
:strings_c,
{
name: "meta.preprocessor.macro",
begin: "(?x)\n^\\s* ((\\#)\\s*define) \\s+\t# define\n((?<id>#{preprocessor_name_no_bounds}))\t # macro name\n(?:\n (\\()\n\t(\n\t \\s* \\g<id> \\s*\t\t # first argument\n\t ((,) \\s* \\g<id> \\s*)* # additional arguments\n\t (?:\\.\\.\\.)?\t\t\t# varargs ellipsis?\n\t)\n (\\))\n)?",
beginCaptures: {
"1" => {
name: "keyword.control.directive.define"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "entity.name.function.preprocessor"
},
"5" => {
name: "punctuation.definition.parameters.begin"
},
"6" => {
name: "variable.parameter.preprocessor"
},
"8" => {
name: "punctuation.separator.parameters"
},
"9" => {
name: "punctuation.definition.parameters.end"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
{
name: "meta.preprocessor.diagnostic",
begin: "^\\s*((#)\\s*(error|warning))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.diagnostic.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?<!\\\\)(?=\\n)",
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#line_continuation_character"
}
]
},
{
begin: "[^'\"]",
end: "(?<!\\\\)(?=\\s*\\n)",
name: "string.unquoted.single",
patterns: [
{
include: "#line_continuation_character"
},
{
include: "#comments-c"
}
]
}
]
},
{
name: "meta.preprocessor.include",
begin: "^\\s*((#)\\s*(include(?:_next)?|import))\\b\\s*",
beginCaptures: {
"1" => {
name: "keyword.control.directive.$3"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#line_continuation_character"
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double.include"
},
{
begin: "<",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: ">",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.other.lt-gt.include"
}
]
},
"#pragma-mark",
{
name: "meta.preprocessor",
begin: "^\\s*((#)\\s*line)\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.line"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#strings_c"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
},
{
name: "meta.preprocessor",
begin: "^\\s*(?:((#)\\s*undef))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.undef"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
}
]
},
{
name: "meta.preprocessor.pragma",
begin: "^\\s*(?:((#)\\s*pragma))\\b",
beginCaptures: {
"1" => {
name: "keyword.control.directive.pragma"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=(?://|/\\*))|(?<!\\\\)(?=\\n)",
patterns: [
{
include: "#strings_c"
},
{
match: "[a-zA-Z_$][\\w\\-$]*",
name: "entity.other.attribute-name.pragma.preprocessor"
},
{
include: "#number_literal"
},
{
include: "#line_continuation_character"
}
]
},
:operators,
{
match: "\\b(u_char|u_short|u_int|u_long|ushort|uint|u_quad_t|quad_t|qaddr_t|caddr_t|daddr_t|div_t|dev_t|fixpt_t|blkcnt_t|blksize_t|gid_t|in_addr_t|in_port_t|ino_t|key_t|mode_t|nlink_t|id_t|pid_t|off_t|segsz_t|swblk_t|uid_t|id_t|clock_t|size_t|ssize_t|time_t|useconds_t|suseconds_t)\\b",
name: "support.type.sys-types"
},
{
match: "\\b(pthread_attr_t|pthread_cond_t|pthread_condattr_t|pthread_mutex_t|pthread_mutexattr_t|pthread_once_t|pthread_rwlock_t|pthread_rwlockattr_t|pthread_t|pthread_key_t)\\b",
name: "support.type.pthread"
},
{
match: "(?x) \\b\n(int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|int_least8_t\n|int_least16_t|int_least32_t|int_least64_t|uint_least8_t|uint_least16_t|uint_least32_t\n|uint_least64_t|int_fast8_t|int_fast16_t|int_fast32_t|int_fast64_t|uint_fast8_t\n|uint_fast16_t|uint_fast32_t|uint_fast64_t|intptr_t|uintptr_t|intmax_t|intmax_t\n|uintmax_t|uintmax_t)\n\\b",
name: "support.type.stdint"
},
posix_reserved_types,
"#block-c",
"#parens-c",
function_definition,
"#line_continuation_character",
{
name: "meta.bracket.square.access",
begin: "([a-zA-Z_][a-zA-Z_0-9]*|(?<=[\\]\\)]))?(\\[)(?!\\])",
beginCaptures: {
"1" => {
name: "variable.other.object"
},
"2" => {
name: "punctuation.definition.begin.bracket.square"
}
},
end: "\\]",
endCaptures: {
"0" => {
name: "punctuation.definition.end.bracket.square"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
name: "storage.modifier.array.bracket.square",
match: /#{lookBehindToAvoid(/delete/)}\\[\\s*\\]/
},
{
match: ";",
name: "punctuation.terminator.statement"
},
{
match: ",",
name: "punctuation.separator.delimiter"
}
)
cpp_grammar.addToRepository({
"angle_brackets" => {
begin: "<",
end: ">",
name: "meta.angle-brackets",
patterns: [
{
include: "#angle_brackets"
},
{
include: "$base"
}
]
},
"block" => {
begin: "\\{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "\\}",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
captures: {
"1" => {
name: "support.function.any-method"
},
"2" => {
name: "punctuation.definition.parameters"
}
},
match: "(?x)\n(\n (?!while|for|do|if|else|switch|catch|return)\n (?:\\b[A-Za-z_][A-Za-z0-9_]*+\\b|::)*+ # actual name\n)\n\\s*(\\() # opening bracket",
name: "meta.function-call"
},
{
include: "$base"
}
]
},
"constructor" => {
patterns: [
{
begin: "(?x)\n(?:^\\s*) # beginning of line\n((?!while|for|do|if|else|switch|catch)[A-Za-z_][A-Za-z0-9_:]*) # actual name\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "entity.name.function.constructor"
},
"2" => {
name: "punctuation.definition.parameters.begin.constructor"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.definition.parameters.end.constructor"
}
},
name: "meta.function.constructor",
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function-innards-c"
}
]
},
{
begin: "(?x)\n(:)\n(\n (?=\n \\s*[A-Za-z_][A-Za-z0-9_:]* # actual name\n \\s* (\\() # opening bracket\n )\n)",
beginCaptures: {
"1" => {
name: "punctuation.definition.initializer-list.parameters"
}
},
end: "(?=\\{)",
name: "meta.function.constructor.initializer-list",
patterns: [
{
include: "$base"
}
]
}
]
},
"special_block" => {
patterns: [
using_namespace.to_tag,
namespace_block.to_tag,
class_block.to_tag,
struct_block.to_tag,
union_block.to_tag,
enum_block.to_tag,
extern_block.to_tag,
]
},
# TODO: "strings" is included and it is different from "strings_c", but its not used anywhere. Figure out whats going on here
"strings" => {
patterns: [
{
begin: "(u|u8|U|L)?\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
match: "\\\\u\\h{4}|\\\\U\\h{8}",
name: "constant.character.escape"
},
{
match: "\\\\['\"?\\\\abfnrtv]",
name: "constant.character.escape"
},
{
match: "\\\\[0-7]{1,3}",
name: "constant.character.escape"
},
{
match: "\\\\x\\h+",
name: "constant.character.escape"
},
{
include: "#string_placeholder-c"
}
]
},
{
begin: "(u|u8|U|L)?R\"(?:([^ ()\\\\\\t]{0,16})|([^ ()\\\\\\t]*))\\(",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
},
"1" => {
name: "meta.encoding"
},
"3" => {
name: "invalid.illegal.delimiter-too-long"
}
},
end: "\\)\\2(\\3)\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
},
"1" => {
name: "invalid.illegal.delimiter-too-long"
}
},
name: "string.quoted.double.raw"
}
]
},
"block-c" => {
patterns: [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#block_innards-c"
}
]
}
]
},
"block_innards-c" => {
patterns: [
{
include: "#preprocessor-rule-enabled-block"
},
{
include: "#preprocessor-rule-disabled-block"
},
{
include: "#preprocessor-rule-conditional-block"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#c_function_call"
},
{
name: "meta.initialization",
begin: "(?x)\n(?:\n (?:\n\t(?=\\s)(?<!else|new|return)\n\t(?<=\\w) \\s+(and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas) # or word + space before name\n )\n)\n(\n (?:[A-Za-z_][A-Za-z0-9_]*+ | :: )++ # actual name\n |\n (?:(?<=operator) (?:[-*&<>=+!]+ | \\(\\) | \\[\\]))\n)\n\\s*(\\() # opening bracket",
beginCaptures: {
"1" => {
name: "variable.other"
},
"2" => {
name: "punctuation.section.parens.begin.bracket.round.initialization"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round.initialization"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#block_innards-c"
}
]
},
{
include: "#parens-block-c"
},
{
include: "$base"
}
]
},
"c_function_call" => {
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|constexpr|volatile|operator|(?:::)?new|(?:::)?delete)\\s*\\()\n(?=\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*#{maybe(template_call.without_numbered_capture_groups)}\\( # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)",
name: "meta.function-call",
patterns: [
{
include: "#function-call-innards-c"
}
]
},
"comments-c" => {
patterns: [
{
captures: {
"1" => {
name: "meta.toc-list.banner.block"
}
},
match: "^/\\* =(\\s*.*?)\\s*= \\*/$\\n?",
name: "comment.block"
},
{
begin: "/\\*",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment.begin"
}
},
end: "\\*/",
endCaptures: {
"0" => {
name: "punctuation.definition.comment.end"
}
},
name: "comment.block"
},
{
captures: {
"1" => {
name: "meta.toc-list.banner.line"
}
},
match: "^// =(\\s*.*?)\\s*=\\s*$\\n?",
name: "comment.line.banner"
},
{
begin: "(^[ \\t]+)?(?=//)",
beginCaptures: {
"1" => {
name: "punctuation.whitespace.comment.leading"
}
},
end: "(?!\\G)",
patterns: [
{
begin: "//",
beginCaptures: {
"0" => {
name: "punctuation.definition.comment"
}
},
end: "(?=\\n)",
name: "comment.line.double-slash",
patterns: [
{
include: "#line_continuation_character"
}
]
}
]
}
]
},
"disabled" => {
begin: "^\\s*#\\s*if(n?def)?\\b.*$",
end: "^\\s*#\\s*endif\\b",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
"line_continuation_character" => {
patterns: [
{
match: "(\\\\)\\n",
captures: {
"1" => {
name: "constant.character.escape.line-continuation"
}
}
}
]
},
"parens-c" => {
name: "punctuation.section.parens-c",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "$base"
}
]
},
"parens-block-c" => {
name: "meta.block.parens",
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#block_innards-c"
},
{
match: lookBehindToAvoid(/:/).then(/:/).lookAheadToAvoid(/:/),
name: "punctuation.range-based"
}
]
},
"pragma-mark" => {
captures: {
"1" => {
name: "meta.preprocessor.pragma"
},
"2" => {
name: "keyword.control.directive.pragma.pragma-mark"
},
"3" => {
name: "punctuation.definition.directive"
},
"4" => {
name: "entity.name.tag.pragma-mark"
}
},
match: "^\\s*(((#)\\s*pragma\\s+mark)\\s+(.*))",
name: "meta.section"
},
"strings_c" => {
patterns: [
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#string_placeholder-c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: lookBehindToAvoid(/[\da-fA-F]/).then(/'/),
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#line_continuation_character"
}
]
}
]
},
"string_escaped_char-c" => {
patterns: [
{
match: "(?x)\\\\ (\n\\\\\t\t\t |\n[abefnprtv'\"?] |\n[0-3]\\d{,2}\t |\n[4-7]\\d?\t\t|\nx[a-fA-F0-9]{,2} |\nu[a-fA-F0-9]{,4} |\nU[a-fA-F0-9]{,8} )",
name: "constant.character.escape"
},
{
match: "\\\\.",
name: "invalid.illegal.unknown-escape"
}
]
},
"string_placeholder-c" => {
patterns: [
{
match: "(?x) %\n(\\d+\\$)?\t\t\t\t\t\t # field (argument #)\n[#0\\- +']*\t\t\t\t\t\t # flags\n[,;:_]?\t\t\t\t\t\t\t # separator character (AltiVec)\n((-?\\d+)|\\*(-?\\d+\\$)?)?\t\t # minimum field width\n(\\.((-?\\d+)|\\*(-?\\d+\\$)?)?)?\t# precision\n(hh|h|ll|l|j|t|z|q|L|vh|vl|v|hv|hl)? # length modifier\n[diouxXDOUeEfFgGaACcSspn%]\t\t # conversion type",
name: "constant.other.placeholder"
},
# I don't think these are actual escapes, and they incorrectly mark valid strings
# It might be related to printf and format from C (which is low priority for C++)
# {
# match: "(%)(?!\"\\s*(PRI|SCN))",
# captures: {
# "1" => {
# name: "constant.other.placeholder"
# }
# }
# }
]
},
"vararg_ellipses" => {
match: "(?<!\\.)\\.\\.\\.(?!\\.)",
name: "punctuation.vararg-ellipses"
},
"preprocessor-rule-conditional" => {
patterns: [
{
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#preprocessor-rule-enabled-elif"
},
{
include: "#preprocessor-rule-enabled-else"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "$base"
}
]
},
{
match: "^\\s*#\\s*(else|elif|endif)\\b",
captures: {
"0" => {
name: "invalid.illegal.stray-$1"
}
}
}
]
},
"preprocessor-rule-conditional-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if(?:n?def)?\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#preprocessor-rule-enabled-elif-block"
},
{
include: "#preprocessor-rule-enabled-else-block"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#block_innards-c"
}
]
},
{
match: "^\\s*#\\s*(else|elif|endif)\\b",
captures: {
"0" => {
name: "invalid.illegal.stray-$1"
}
}
}
]
},
"preprocessor-rule-conditional-line" => {
patterns: [
{
match: "(?:\\bdefined\\b\\s*$)|(?:\\bdefined\\b(?=\\s*\\(*\\s*(?:(?!defined\\b)[a-zA-Z_$][\\w$]*\\b)\\s*\\)*\\s*(?:\\n|//|/\\*|\\?|\\:|&&|\\|\\||\\\\\\s*\\n)))",
name: "keyword.control.directive.conditional"
},
{
match: "\\bdefined\\b",
name: "invalid.illegal.macro-name"
},
{
include: "#comments-c"
},
{
include: "#strings_c"
},
{
include: "#number_literal"
},
{
begin: "\\?",
beginCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
end: ":",
endCaptures: {
"0" => {
name: "keyword.operator.ternary"
}
},
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#operators"
},
{
include: "#constants"
},
{
match: preprocessor_name_no_bounds,
name: "entity.name.function.preprocessor"
},
{
include: "#line_continuation_character"
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)|(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
}
]
},
"preprocessor-rule-disabled" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
include: "#preprocessor-rule-enabled-elif"
},
{
include: "#preprocessor-rule-enabled-else"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "$base"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
}
]
},
"preprocessor-rule-disabled-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
include: "#preprocessor-rule-enabled-elif-block"
},
{
include: "#preprocessor-rule-enabled-else-block"
},
{
include: "#preprocessor-rule-disabled-elif"
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#block_innards-c"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
}
]
},
"preprocessor-rule-disabled-elif" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0+\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:elif|else|endif)\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
}
]
},
"preprocessor-rule-enabled" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
},
"3" => {
name: "constant.numeric.preprocessor"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "$base"
}
]
}
]
}
]
},
"preprocessor-rule-enabled-block" => {
patterns: [
{
begin: "^\\s*((#)\\s*if\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "^\\s*((#)\\s*endif\\b)",
endCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.else-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*elif\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.if-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
patterns: [
{
include: "#block_innards-c"
}
]
}
]
}
]
},
"preprocessor-rule-enabled-elif" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
include: "$base"
}
]
}
]
},
"preprocessor-rule-enabled-elif-block" => {
begin: "^\\s*((#)\\s*elif\\b)(?=\\s*\\(*\\b0*1\\b\\)*\\s*(?:$|//|/\\*))",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
begin: "\\G(?=.)(?!//|/\\*(?!.*\\\\\\s*\\n))",
end: "(?=//)|(?=/\\*(?!.*\\\\\\s*\\n))|(?<!\\\\)(?=\\n)",
name: "meta.preprocessor",
patterns: [
{
include: "#preprocessor-rule-conditional-line"
}
]
},
{
include: "#comments-c"
},
{
begin: "\\n",
end: "(?=^\\s*((#)\\s*(?:endif)\\b))",
patterns: [
{
begin: "^\\s*((#)\\s*(else)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
"contentName" => "comment.block.preprocessor.elif-branch.in-block",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
begin: "^\\s*((#)\\s*(elif)\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*(?:else|elif|endif)\\b))",
"contentName" => "comment.block.preprocessor.elif-branch",
patterns: [
{
include: "#disabled"
},
{
include: "#pragma-mark"
}
]
},
{
include: "#block_innards-c"
}
]
}
]
},
"preprocessor-rule-enabled-else" => {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "$base"
}
]
},
"preprocessor-rule-enabled-else-block" => {
begin: "^\\s*((#)\\s*else\\b)",
beginCaptures: {
"0" => {
name: "meta.preprocessor"
},
"1" => {
name: "keyword.control.directive.conditional"
},
"2" => {
name: "punctuation.definition.directive"
}
},
end: "(?=^\\s*((#)\\s*endif\\b))",
patterns: [
{
include: "#block_innards-c"
}
]
},
"preprocessor-rule-define-line-contents" => {
patterns: [
{
include: "#vararg_ellipses"
},
{
match: /##?/.then(variable_name_without_bounds).lookAheadToAvoid(@standard_character),
name: "variable.other.macro.argument"
},
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
name: "meta.block",
patterns: [
{
include: "#preprocessor-rule-define-line-blocks"
}
]
},
{
match: "\\(",
name: "punctuation.section.parens.begin.bracket.round"
},
{
match: "\\)",
name: "punctuation.section.parens.end.bracket.round"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas|asm|__asm__|auto|bool|_Bool|char|_Complex|double|enum|float|_Imaginary|int|long|short|signed|struct|typedef|union|unsigned|void)\\s*\\()\n(?=\n (?:[A-Za-z_][A-Za-z0-9_]*+|::)++\\s*\\( # actual name\n |\n (?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\\s*\\(\n)",
end: "(?<=\\))(?!\\w)|(?<!\\\\)(?=\\s*\\n)",
name: "meta.function",
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
begin: "\"",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "\"|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.double",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#string_placeholder-c"
},
{
include: "#line_continuation_character"
}
]
},
{
begin: "'",
beginCaptures: {
"0" => {
name: "punctuation.definition.string.begin"
}
},
end: "'|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.definition.string.end"
}
},
name: "string.quoted.single",
patterns: [
{
include: "#string_escaped_char-c"
},
{
include: "#line_continuation_character"
}
]
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "$base"
}
]
},
"preprocessor-rule-define-line-blocks" => {
patterns: [
{
begin: "{",
beginCaptures: {
"0" => {
name: "punctuation.section.block.begin.bracket.curly"
}
},
end: "}|(?=\\s*#\\s*(?:elif|else|endif)\\b)|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"0" => {
name: "punctuation.section.block.end.bracket.curly"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-blocks"
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
"preprocessor-rule-define-line-functions" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#vararg_ellipses"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#operators"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.arguments.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "(\\))|(?<!\\\\)(?=\\s*\\n)",
endCaptures: {
"1" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#preprocessor-rule-define-line-functions"
}
]
},
{
include: "#preprocessor-rule-define-line-contents"
}
]
},
"function-innards-c" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#operators"
},
{
include: "#vararg_ellipses"
},
{
name: "meta.function.definition.parameters",
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:[A-Za-z_][A-Za-z0-9_]*+|::)++ # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "entity.name.function"
},
"2" => {
name: "punctuation.section.parameters.begin.bracket.round"
},
},
end: /\)|:/,
endCaptures: {
"0" => {
name: "punctuation.section.parameters.end.bracket.round"
}
},
patterns: [
{
include: "#probably_a_parameter"
},
{
include: "#function-innards-c"
}
]
},
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function-innards-c"
}
]
},
{
include: "$base"
}
]
},
"function-call-innards-c" => {
patterns: [
{
include: "#comments-c"
},
{
include: "#storage_types"
},
{
include: "#method_access"
},
{
include: "#member_access"
},
{
include: "#operators"
},
{
begin: "(?x)\n(?!(?:while|for|do|if|else|switch|catch|return|typeid|alignof|alignas|sizeof|and|and_eq|bitand|bitor|compl|not|not_eq|or|or_eq|typeid|xor|xor_eq|alignof|alignas)\\s*\\()\n(\n(?:new)\\s*(#{maybe(template_call.without_numbered_capture_groups)}) # actual name\n|\n(?:(?<=operator)(?:[-*&<>=+!]+|\\(\\)|\\[\\]))\n)\n\\s*(\\()",
beginCaptures: {
"1" => {
name: "keyword.operator.memory.new"
},
"2" => {
patterns: [
{
include: "#template_call_innards"
}
]
},
"3" => {
name: "punctuation.section.arguments.begin.bracket.round"
},
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.arguments.end.bracket.round"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
function_call.to_tag,
{
begin: "\\(",
beginCaptures: {
"0" => {
name: "punctuation.section.parens.begin.bracket.round"
}
},
end: "\\)",
endCaptures: {
"0" => {
name: "punctuation.section.parens.end.bracket.round"
}
},
patterns: [
{
include: "#function-call-innards-c"
}
]
},
{
include: "#block_innards-c"
}
]
}
})
Dir.chdir __dir__
# Save
cpp_grammar.saveAsYamlTo("../syntaxes/cpp.tmLanguage")
cpp_grammar.saveAsJsonTo("../syntaxes/cpp.tmLanguage")
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{dm-ar-finders}
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version=
s.authors = ["John W Higgins"]
s.date = %q{2011-03-10}
s.description = %q{DataMapper plugin providing ActiveRecord-style finders}
s.email = %q{john [a] wishVPS [d] com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"dm-ar-finders.gemspec",
"lib/dm-ar-finders.rb",
"spec/integration/ar-finders_spec.rb",
"spec/rcov.opts",
"spec/spec.opts",
"spec/spec_helper.rb",
"tasks/spec.rake",
"tasks/yard.rake",
"tasks/yardstick.rake"
]
s.homepage = %q{http://github.com/datamapper/dm-ar-finders}
s.require_paths = ["lib"]
s.rubyforge_project = %q{datamapper}
s.rubygems_version = %q{1.6.2}
s.summary = %q{DataMapper plugin providing ActiveRecord-style finders}
s.test_files = [
"spec/integration/ar-finders_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<rake>, ["~> 0.8.7"])
s.add_development_dependency(%q<rspec>, ["~> 1.3.1"])
else
s.add_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rake>, ["~> 0.8.7"])
s.add_dependency(%q<rspec>, ["~> 1.3.1"])
end
else
s.add_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rake>, ["~> 0.8.7"])
s.add_dependency(%q<rspec>, ["~> 1.3.1"])
end
end
Updated gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{dm-ar-finders}
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["John W Higgins"]
s.date = %q{2011-03-16}
s.description = %q{DataMapper plugin providing ActiveRecord-style finders}
s.email = %q{john [a] wishVPS [d] com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"dm-ar-finders.gemspec",
"lib/dm-ar-finders.rb",
"spec/integration/ar-finders_spec.rb",
"spec/rcov.opts",
"spec/spec.opts",
"spec/spec_helper.rb",
"tasks/spec.rake",
"tasks/yard.rake",
"tasks/yardstick.rake"
]
s.homepage = %q{http://github.com/datamapper/dm-ar-finders}
s.require_paths = ["lib"]
s.rubyforge_project = %q{datamapper}
s.rubygems_version = %q{1.6.2}
s.summary = %q{DataMapper plugin providing ActiveRecord-style finders}
s.test_files = [
"spec/integration/ar-finders_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_development_dependency(%q<rake>, ["~> 0.8.7"])
s.add_development_dependency(%q<rspec>, ["~> 1.3.1"])
else
s.add_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rake>, ["~> 0.8.7"])
s.add_dependency(%q<rspec>, ["~> 1.3.1"])
end
else
s.add_dependency(%q<dm-core>, ["~> 1.1.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.2"])
s.add_dependency(%q<rake>, ["~> 0.8.7"])
s.add_dependency(%q<rspec>, ["~> 1.3.1"])
end
end
|
class CreateServer
def initialize(data, user)
@data = data
@user = user
end
def process
location = @data.location
template = @data.template
squall = Squall::VirtualMachine.new(uri: ONAPP_CP[:uri], user: @user.onapp_user, pass: @user.onapp_password)
params = {
label: @data.name,
hypervisor_group_id: location.hv_group_id,
hostname: @data.hostname,
memory: @data.memory,
cpus: @data.cpus,
cpu_shares: 100,
primary_disk_size: @data.disk_size.to_i - template.required_swap,
template_id: template.identifier,
required_virtual_machine_build: 1,
required_virtual_machine_startup: 1,
required_ip_address_assignment: 1
}
params.merge!(swap_disk_size: template.required_swap) unless location.provider.scan(/vmware/).length > 1
params.merge!(rate_limit: location.network_limit) if location.network_limit.present? && location.network_limit > 0
params.merge!(licensing_type: 'mak') if template.os_type.include?('windows') || template.os_distro.include?('windows')
params.merge!(note: 'Created with Cloud.net')
squall.create params
end
def self.extract_ip(server)
ip = '0.0.0.0'
array = server['ip_addresses']
if array && array.length >= 1
return array.first['ip_address']['address']
end
''
end
end
Was checking for VMWare template by scanning() and checking for length over 1, fixed to check for length over 0
class CreateServer
def initialize(data, user)
@data = data
@user = user
end
def process
location = @data.location
template = @data.template
squall = Squall::VirtualMachine.new(uri: ONAPP_CP[:uri], user: @user.onapp_user, pass: @user.onapp_password)
params = {
label: @data.name,
hypervisor_group_id: location.hv_group_id,
hostname: @data.hostname,
memory: @data.memory,
cpus: @data.cpus,
cpu_shares: 100,
primary_disk_size: @data.disk_size.to_i - template.required_swap,
template_id: template.identifier,
required_virtual_machine_build: 1,
required_virtual_machine_startup: 1,
required_ip_address_assignment: 1
}
params.merge!(swap_disk_size: template.required_swap) unless location.provider.scan(/vmware/).length > 0
params.merge!(rate_limit: location.network_limit) if location.network_limit.present? && location.network_limit > 0
params.merge!(licensing_type: 'mak') if template.os_type.include?('windows') || template.os_distro.include?('windows')
params.merge!(note: 'Created with Cloud.net')
squall.create params
end
def self.extract_ip(server)
ip = '0.0.0.0'
array = server['ip_addresses']
if array && array.length >= 1
return array.first['ip_address']['address']
end
''
end
end
|
class PersonSearch
def initialize query
@query = clean_query query
@max = 100
end
def perform_search
return [] if @query.blank?
name_matches, query_matches, fuzzy_matches = perform_searches
exact_matches = name_matches.select { |p| p.name == @query }
(exact_matches + name_matches + query_matches + fuzzy_matches).uniq[0..@max - 1]
end
private
def perform_searches
name_matches = search "name:#{@query}"
query_matches = search @query
fuzzy_matches = fuzzy_search
[name_matches, query_matches, fuzzy_matches]
end
def fuzzy_search
search(
size: @max,
query: {
fuzzy_like_this: {
fields: [
:name, :tags, :description, :location_in_building, :building,
:city, :role_and_group, :community_name
],
like_text: @query, prefix_length: 3, ignore_tf: true
}
}
)
end
def clean_query query
query.
tr(',', ' ').
squeeze(' ').
strip
end
def search query
Person.search_results(query, limit: @max)
end
end
Join search results with push() instead of +()
class PersonSearch
def initialize query
@query = clean_query query
@max = 100
end
def perform_search
return [] if @query.blank?
name_matches, query_matches, fuzzy_matches = perform_searches
exact_matches = name_matches.select { |p| p.name == @query }
exact_matches.
push(*name_matches).
push(*query_matches).
push(*fuzzy_matches).
uniq[0..@max - 1]
end
private
def perform_searches
name_matches = search "name:#{@query}"
query_matches = search @query
fuzzy_matches = fuzzy_search
[name_matches, query_matches, fuzzy_matches]
end
def fuzzy_search
search(
size: @max,
query: {
fuzzy_like_this: {
fields: [
:name, :tags, :description, :location_in_building, :building,
:city, :role_and_group, :community_name
],
like_text: @query, prefix_length: 3, ignore_tf: true
}
}
)
end
def clean_query query
query.
tr(',', ' ').
squeeze(' ').
strip
end
def search query
Person.search_results(query, limit: @max)
end
end
|
carve out structure for query service
class QueryService
WAIT_TIME = 15 * 60 #for converting time from seconds to minutes
def self.create_decision(context)
end
def self.add_user(decision, user)
end
# Start a new decision
def self.create_proposal(decision, current_participation, idea)
end
# Update a query with a yes
def self.approve(query, current_participation)
end
# Update a query with a no
def self.reject(query)
end
# Close a decision
def self.close(query)
end
end
|
module Units::Creator
def self.run(teacher, name, activities_data, classrooms_data)
self.create_helper(teacher, name, activities_data, classrooms_data)
end
def self.fast_assign_unit_template(teacher_id, unit_template_id)
# unit fix: pass whole teacher object
teacher = User.find(teacher_id)
# this call is unnecessary as we can do sql without it
unit_template = UnitTemplate.find(unit_template_id)
activities_data = unit_template.activities.map{ |a| {id: a.id, due_date: nil} }
# unit fix: may be able to better optimize this one, but possibly not
classrooms_data = teacher.classrooms_i_teach.map{ |c| {id: c.id, student_ids: [], assign_on_join: true} }
self.create_helper(teacher, unit_template.name, activities_data, classrooms_data)
end
def self.assign_unit_template_to_one_class(teacher_id, unit_template_id, classroom)
classroom_array = [classroom]
# converted to array so we can map in helper function as we would otherwise
# unit fix: pass whole teacher object
teacher = User.find(teacher_id)
# this call is unnecessary as we can do sql without it
unit_template = UnitTemplate.find(unit_template_id)
activities_data = unit_template.activities.map{ |a| {id: a.id, due_date: nil} }
self.create_helper(teacher, unit_template.name, activities_data, classroom_array)
end
private
def self.create_helper(teacher, name, activities_data, classrooms)
unit = Unit.create!(name: name, user: teacher)
# makes a permutation of each classroom with each activity to
# create all necessary activity sessions
classrooms.each do |classroom|
product = activities_data.product([classroom[:id].to_i])
product.each do |pair|
activity_data, classroom_id = pair
unit.classroom_activities.create!(activity_id: activity_data[:id],
due_date: activity_data[:due_date],
classroom_id: classroom_id,
assigned_student_ids: classroom[:student_ids],
assign_on_join: classroom[:assign_on_join]
)
end
end
unit.hide_if_no_visible_classroom_activities
# activity_sessions in the state of 'unstarted' are automatically created in an after_create callback in the classroom_activity model
AssignActivityWorker.perform_async(teacher.id)
end
end
uniq product in units creator
module Units::Creator
def self.run(teacher, name, activities_data, classrooms_data)
self.create_helper(teacher, name, activities_data, classrooms_data)
end
def self.fast_assign_unit_template(teacher_id, unit_template_id)
# unit fix: pass whole teacher object
teacher = User.find(teacher_id)
# this call is unnecessary as we can do sql without it
unit_template = UnitTemplate.find(unit_template_id)
activities_data = unit_template.activities.map{ |a| {id: a.id, due_date: nil} }
# unit fix: may be able to better optimize this one, but possibly not
classrooms_data = teacher.classrooms_i_teach.map{ |c| {id: c.id, student_ids: [], assign_on_join: true} }
self.create_helper(teacher, unit_template.name, activities_data, classrooms_data)
end
def self.assign_unit_template_to_one_class(teacher_id, unit_template_id, classroom)
classroom_array = [classroom]
# converted to array so we can map in helper function as we would otherwise
# unit fix: pass whole teacher object
teacher = User.find(teacher_id)
# this call is unnecessary as we can do sql without it
unit_template = UnitTemplate.find(unit_template_id)
activities_data = unit_template.activities.map{ |a| {id: a.id, due_date: nil} }
self.create_helper(teacher, unit_template.name, activities_data, classroom_array)
end
private
def self.create_helper(teacher, name, activities_data, classrooms)
unit = Unit.create!(name: name, user: teacher)
# makes a permutation of each classroom with each activity to
# create all necessary activity sessions
classrooms.each do |classroom|
product = activities_data.product([classroom[:id].to_i]).uniq
product.each do |pair|
activity_data, classroom_id = pair
unit.classroom_activities.create!(activity_id: activity_data[:id],
due_date: activity_data[:due_date],
classroom_id: classroom_id,
assigned_student_ids: classroom[:student_ids],
assign_on_join: classroom[:assign_on_join]
)
end
end
unit.hide_if_no_visible_classroom_activities
# activity_sessions in the state of 'unstarted' are automatically created in an after_create callback in the classroom_activity model
AssignActivityWorker.perform_async(teacher.id)
end
end
|
# We're using https://github.com/ruby-git/ruby-git
# for all git interactions
require "git"
require "tty-command"
require "securerandom"
require "digest"
require "task_queue"
require "faraday-http-cache"
require_relative "../logging_module"
require_relative "../git_monkey_patches"
module FastlaneCI
# Encapsulates all the data that is needed by GitRepo
# We can have various provider_credentials, but they all need to be turned into `GitRepoAuth`s
# This is because different git providers can have different needs for data
# What github needs is an `api_token`, but a local git repo might only need a `password`.
# We'll call both of these "auth_tokens" here, this way we can use GitRepoAuth
# as a way to unify those, and prevent overloading names at the data source.
# Otherwise, in the JSON we'd see "password" but for some repos that might be an auth_token, or an api_token, or password
class GitRepoAuth
attr_accessor :remote_host # in the case of github, this is usually `github.com`
attr_accessor :username # whatever the git repo needs for a username, usually just an email, usually CI
attr_accessor :full_name # whatever the git repo needs for a username, usually just an email, usually fastlane.CI
attr_accessor :auth_token # usually an API key, but could be a password, usually fastlane.CI's auth_token
def initialize(remote_host: nil, username: nil, full_name: nil, auth_token: nil)
@remote_host = remote_host
@username = username
@full_name = full_name
@auth_token = auth_token
end
end
# Responsible for managing git repos
# This includes the configs repo, but also the actual source code repos
# This class makes sure to use the right credentials, does proper cloning,
# pulling, pushing, git commit messages, etc.
# It is **important** that from the outside you don't access `GitRepoObject.git.something` directly
# as the auth won't be setup. This system is designed to authenticate the user per action, meaning
# that each pull, push, fetch etc. is performed using a specific user
class GitRepo
include FastlaneCI::Logging
# @return [GitRepoConfig]
attr_accessor :git_config
# @return [GitRepoAuth]
attr_accessor :repo_auth # whatever pieces of information that can change between git users
attr_accessor :temporary_storage_path
attr_reader :local_folder # where we are keeping the local repo checkout
# This callback is used when the instance is initialized in async mode, so you can define a proc
# with the final GitRepo configured.
# @example
# GitRepo.new(..., async_start: true, callback: proc { |repo| puts "This is my final repo #{repo}"; })
#
# @return [proc(GitRepo)]
attr_accessor :callback
class << self
attr_accessor :git_action_queue
# Loads the octokit cache stack for speed-up calls to github service.
# As explained in: https://github.com/octokit/octokit.rb#caching
def load_octokit_cache_stack
@stack ||= Faraday::RackBuilder.new do |builder|
builder.use(Faraday::HttpCache, serializer: Marshal, shared_cache: false)
builder.use(Octokit::Response::RaiseError)
builder.adapter(Faraday.default_adapter)
end
return if Octokit.middleware.handlers.include?(Faraday::HttpCache)
Octokit.middleware = @stack
end
end
GitRepo.git_action_queue = TaskQueue::TaskQueue.new(name: "GitRepo task queue")
# Initializer for GitRepo class
# @param git_config [GitConfig]
# @param provider_credential [ProviderCredential]
# @param async_start [Bool] Whether the repo should be setup async or not. (Defaults to `true`)
# @param sync_setup_timeout_seconds [Integer] When in sync setup mode, how many seconds to wait until raise an exception. (Defaults to 300)
# @param callback [proc(GitRepo)] When in async setup mode, the proc to be called with the final GitRepo setup.
def initialize(git_config: nil, local_folder: nil, provider_credential: nil, async_start: false, sync_setup_timeout_seconds: 300, callback: nil)
GitRepo.load_octokit_cache_stack
logger.debug("Creating repo in #{local_folder} for a copy of #{git_config.git_url}")
self.validate_initialization_params!(git_config: git_config, local_folder: local_folder, provider_credential: provider_credential, async_start: async_start, callback: callback)
@git_config = git_config
@local_folder = local_folder
@callback = callback
# Ok, so now we need to pull the bit of information from the credentials that we know we need for git repos
case provider_credential.type
when FastlaneCI::ProviderCredential::PROVIDER_CREDENTIAL_TYPES[:github]
# Package up the authentication parts that are required
@repo_auth = GitRepoAuth.new(
remote_host: provider_credential.remote_host,
username: provider_credential.email,
full_name: provider_credential.full_name,
auth_token: provider_credential.api_token
)
else
# if we add another ProviderCredential type, we'll need to figure out what parts of the credential go where
raise "unsupported credential type: #{provider_credential.type}"
end
logger.debug("Adding task to setup repo #{self.git_config.git_url} at: #{local_folder}")
setup_task = git_action_with_queue(ensure_block: proc { callback_block(async_start) }) do
logger.debug("Starting setup_repo #{self.git_config.git_url}".freeze)
self.setup_repo
logger.debug("Done setup_repo #{self.git_config.git_url}".freeze)
end
# if we're starting asynchronously, we can return now.
if async_start
logger.debug("Asynchronously starting up repo: #{self.git_config.git_url}")
return
end
logger.debug("Synchronously starting up repo: #{self.git_config.git_url} at: #{local_folder}")
now = Time.now.utc
sleep_timeout = now + sync_setup_timeout_seconds # 10 second startup timeout
while !setup_task.completed && now < sleep_timeout
time_left = sleep_timeout - now
logger.debug("Not setup yet, sleeping (time before timeout: #{time_left}) #{self.git_config.git_url}")
sleep(2)
now = Time.now.utc
end
raise "Unable to start git repo #{git_config.git_url} in #{sync_setup_timeout_seconds} seconds" if now > sleep_timeout
logger.debug("Done starting up repo: #{self.git_config.git_url}")
end
def setup_repo
retry_count ||= 0
if File.directory?(self.local_folder)
# TODO: test if this crashes if it's not a git directory
begin
@_git = Git.open(self.local_folder)
rescue ArgumentError => aex
logger.debug("Path #{self.local_folder} is not a git directory, deleting and trying again")
self.clear_directory
self.clone
retry if (retry_count += 1) < 5
raise "Exceeded retry count for #{__method__}. Exception: #{aex}"
end
repo = self.git
if repo.index.writable?
# Things are looking legit so far
# Now we have to check if the repo is actually from the
# same repo URL
if repo.remote("origin").url.casecmp(self.git_config.git_url.downcase).zero?
# If our courrent repo is the ci-config repo and has changes on it, we should commit them before
# other actions, to prevent local changes to be lost.
# This is a common issue, ci_config repo gets recreated several times trough the Services.configuration_git_repo
# and if some changes in the local repo (added projects, etc.) have been added, they're destroyed.
# rubocop:disable Metrics/BlockNesting
if self.local_folder == File.expand_path("~/.fastlane/ci/fastlane-ci-config")
# TODO: move this stuff out of here
# TODO: In case there are conflicts with remote, we want to decide which way we take.
# For now, we merge using the 'recursive' strategy.
if repo.status.changed > 0 || repo.status.added > 0 || repo.status.deleted > 0 || repo.status.untracked > 0
begin
repo.add(all: true)
repo.commit("Sync changes")
git.push("origin", branch: "master", force: true)
rescue StandardError => ex
logger.error("Error commiting changes to ci-config repo")
logger.error(ex)
end
end
else
logger.debug("Resetting #{self.git_config.git_url} in setup_repo")
self.git.reset_hard
logger.debug("Ensuring we're on `master` for #{self.git_config.git_url} in setup_repo")
git.branch("master").checkout
logger.debug("Resetting `master` #{self.git_config.git_url} in setup_repo")
self.git.reset_hard
logger.debug("Pulling `master` #{self.git_config.git_url} in setup_repo")
self.pull
end
else
logger.debug("[#{self.git_config.id}] Repo URL seems to have changed... deleting the old directory and cloning again")
self.clear_directory
self.clone
end
else
self.clear_directory
logger.debug("Cloning #{self.git_config.git_url} into #{self.local_folder} after clearing directory")
self.clone
end
else
logger.debug("Cloning #{self.git_config.git_url} into #{self.local_folder}")
self.clone
# now that we've cloned, we can setup the @_git variable
@_git = Git.open(self.local_folder)
end
logger.debug("Done, now using #{self.local_folder} for #{self.git_config.git_url}")
# rubocop:enable Metrics/BlockNesting
end
def validate_initialization_params!(git_config: nil, local_folder: nil, provider_credential: nil, async_start: nil, callback: nil)
raise "No git config provided" if git_config.nil?
raise "No local_folder provided" if local_folder.nil?
raise "No provider_credential provided" if provider_credential.nil?
raise "Callback provided but not initialized in async mode" if !callback.nil? && !async_start
credential_type = provider_credential.type
git_config_credential_type = git_config.provider_credential_type_needed
credential_mismatch = credential_type != git_config_credential_type
raise "provider_credential.type and git_config.provider_credential_type_needed mismatch: #{credential_type} vs #{git_config_credential_type}" if credential_mismatch
end
def clear_directory
logger.debug("Deleting #{self.local_folder}")
FileUtils.rm_rf(self.local_folder)
end
# Returns the absolute path to a file from inside the git repo
def file_path(file_path)
File.join(self.local_folder, file_path)
end
def git
return @_git
end
# call like you would self.git.branches.remote.each { |branch| branch.yolo }
# call like you would, but you also get the git repo involved, so it's .each { |git, branch| branch.yolo; git.yolo }
def git_and_remote_branches_each_async(&each_block)
git_action_with_queue do
branch_count = 0
self.git.branches.remote.each do |branch|
each_block.call(self.git, branch)
branch_count += 1
end
end
end
# Return the last commit, that isn't a merge commit
# Make sure to have checked out the right branch for which
# you want to get the last commit of
def most_recent_commit
return self.git.log.first
end
# Responsible for setting the author information when committing a change
# NOT PROTECTED BY QUEUE, ONLY CALL WHEN INSIDE A git_action_queue BLOCK
def setup_author(full_name: self.repo_auth.full_name, username: self.repo_auth.username)
# TODO: performance implications of settings this every time?
# TODO: Set actual name + email here
# TODO: see if we can set credentials here also
if full_name.nil? || full_name.length == 0
full_name = "Unknown user"
end
logger.debug("Using #{full_name} with #{username} as author information on #{self.git_config.git_url}")
git.config("user.name", full_name)
git.config("user.email", username)
end
def temporary_git_storage
@temporary_git_storage ||= File.expand_path("~/.fastlane/.tmp")
FileUtils.mkdir_p(@temporary_git_storage)
return @temporary_git_storage
end
# Responsible for using the auth token to be able to push/pull changes
# from git remote
def setup_auth(repo_auth: self.repo_auth)
# generate a unique file name for this specific user, host, and git url
git_auth_key = Digest::SHA2.hexdigest(repo_auth.remote_host + repo_auth.username + self.git_config.git_url)
temporary_storage_path = File.join(self.temporary_git_storage, "git-auth-#{git_auth_key}")
self.temporary_storage_path = temporary_storage_path
# More details: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
# Creates the `local_folder` directory if it does not exist
FileUtils.mkdir_p(self.local_folder) unless File.directory?(self.local_folder)
store_credentials_command = "git credential-store --file #{temporary_storage_path.shellescape} store"
content = [
"protocol=https",
"host=#{repo_auth.remote_host}",
"username=#{repo_auth.username}",
"password=#{repo_auth.auth_token}",
""
].join("\n")
scope = "local"
unless File.directory?(File.join(self.local_folder, ".git"))
# we don't have a git repo yet, we have no choice
# TODO: check if we find a better way for the initial clone to work without setting system global state
scope = "global"
end
use_credentials_command = "git config --#{scope} credential.helper 'store --file #{temporary_storage_path.shellescape}' #{self.local_folder}"
# Uncomment if you want to debug git credential stuff, keeping it commented out because it's very noisey
# logger.debug("Setting credentials for #{self.git_config.git_url} with command: #{use_credentials_command}")
cmd = TTY::Command.new(printer: :quiet)
cmd.run(store_credentials_command, input: content)
cmd.run(use_credentials_command)
return temporary_storage_path
end
def unset_auth
return unless self.temporary_storage_path.kind_of?(String)
# TODO: Also auto-clean those files from time to time, on server re-launch maybe, or background worker
FileUtils.rm(self.temporary_storage_path) if File.exist?(self.temporary_storage_path)
end
def perform_block(use_global_git_mutex: true, &block)
if use_global_git_mutex
git_action_with_queue(ensure_block: proc { unset_auth }) { block.call }
else
block.call # Assuming all things in the block are synchronous
self.unset_auth
end
end
def pull(repo_auth: self.repo_auth, use_global_git_mutex: true)
logger.debug("Enqueuing a pull on `master` (with mutex?: #{use_global_git_mutex}) for #{self.git_config.git_url}")
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Starting pull #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.pull
logger.debug("Done pulling #{self.git_config.git_url}")
end
end
def checkout_branch(branch: nil, repo_auth: self.repo_auth, use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Checking out branch: #{branch} from #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.branch(branch).checkout
logger.debug("Done checking out branch: #{branch} from #{self.git_config.git_url}")
end
end
def checkout_commit(sha: nil, repo_auth: self.repo_auth, use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Checking out sha: #{sha} from #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.reset_hard(git.gcommit(sha))
logger.debug("Done checking out sha: #{sha} from #{self.git_config.git_url}")
end
end
# Discard any changes
def reset_hard!(use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Starting reset_hard! #{self.git.branch.name} in #{self.git_config.git_url}".freeze)
self.git.reset_hard
self.git.clean(force: true, d: true)
logger.debug("Done reset_hard! #{self.git.branch.name} in #{self.git_config.git_url}".freeze)
end
end
# This method commits and pushes all changes
# if `file_to_commit` is `nil`, all files will be added
# TODO: this method isn't actually tested yet
def commit_changes!(commit_message: nil, file_to_commit: nil, repo_auth: self.repo_auth)
git_action_with_queue do
logger.debug("Starting commit_changes! #{self.git_config.git_url} for #{repo_auth.username}")
raise "file_to_commit not yet implemented" if file_to_commit
commit_message ||= "Automatic commit by fastlane.ci"
self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username)
git.add(all: true) # TODO: for now we only add all files
changed = git.status.changed
added = git.status.added
deleted = git.status.deleted
if changed.count == 0 && added.count == 0 && deleted.count == 0
logger.debug("No changes in repo #{self.git_config.full_name}, skipping commit #{commit_message}")
else
git.commit(commit_message)
logger.debug("Done commit_changes! #{self.git_config.full_name} for #{repo_auth.username}")
end
end
end
def push(repo_auth: self.repo_auth)
git_action_with_queue(ensure_block: proc { unset_auth }) do
logger.debug("Pushing to #{self.git_config.git_url}")
self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username)
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
# TODO: how do we handle branches
self.git.push
logger.debug("Done pushing to #{self.git_config.git_url}")
end
end
def status
self.git.status
end
# `ensure_block`: block that you want executed after the `&block` finishes executed, even on error
def git_action_with_queue(ensure_block: nil, &block)
git_task = TaskQueue::Task.new(work_block: block, ensure_block: ensure_block)
GitRepo.git_action_queue.add_task_async(task: git_task)
return git_task
end
def fetch(use_global_git_mutex: true)
logger.debug("Enqueuing a fetch on (with mutex?: #{use_global_git_mutex}) for #{self.git_config.git_url}")
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Starting fetch #{self.git_config.git_url}".freeze)
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
self.git.remotes.each { |remote| self.git.fetch(remote) }
logger.debug("Done fetching #{self.git_config.git_url}".freeze)
end
end
def switch_to_fork(clone_url:, branch:, sha: nil, local_branch_name:, use_global_git_mutex: false)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Switching to branch #{branch} from forked repo: #{clone_url} (pulling into #{local_branch_name})")
reset_hard!(use_global_git_mutex: false)
# TODO: make sure it doesn't exist yet
git.branch(local_branch_name)
reset_hard!(use_global_git_mutex: false)
git.pull(clone_url, branch)
end
end
def clone(repo_auth: self.repo_auth, async: false)
if async
logger.debug("Asynchronously cloning #{self.git_config.git_url}".freeze)
# If we're async, just push it on the queue
git_action_with_queue(ensure_block: proc { unset_auth }) do
clone_synchronously(repo_auth: repo_auth)
logger.debug("Done asynchronously cloning of #{self.git_config.git_url}".freeze)
end
else
logger.debug("Synchronously cloning #{self.git_config.git_url}".freeze)
clone_synchronously(repo_auth: repo_auth)
logger.debug("Done synchronously cloning of #{self.git_config.git_url}".freeze)
unset_auth
end
end
def callback_block(async_start)
# How do we know that the task was successfully finished?
return if self.callback.nil?
return unless async_start
self.callback.call(self)
end
private
def clone_synchronously(repo_auth: self.repo_auth)
# `@local_folder` is where we store the local git repo
# fastlane.ci will also delete this directory if it breaks
# and just re-clones. So make sure it's fine if it gets deleted
raise "No local folder path available" unless self.local_folder
logger.debug("Cloning git repo #{self.git_config.git_url}....")
existing_repo_for_project = File.join(self.local_folder, self.git_config.id)
# self.git_config.id.length > 1 to ensure we're not empty or a space
if self.git_config.id.length > 1 && Dir.exist?(existing_repo_for_project)
logger.debug("Removing existing repo at: #{existing_repo_for_project}")
require "fileutils"
# Danger zone
FileUtils.rm_r(existing_repo_for_project)
end
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
logger.debug("[#{self.git_config.id}]: Cloning git repo #{self.git_config.git_url} to #{@local_folder}")
Git.clone(self.git_config.git_url,
"", # checkout into the self.local_folder
path: self.local_folder,
recursive: true)
end
end
end
Add auto-push after we commit
- Controlled via param push_after_commit
# We're using https://github.com/ruby-git/ruby-git
# for all git interactions
require "git"
require "tty-command"
require "securerandom"
require "digest"
require "task_queue"
require "faraday-http-cache"
require_relative "../logging_module"
require_relative "../git_monkey_patches"
module FastlaneCI
# Encapsulates all the data that is needed by GitRepo
# We can have various provider_credentials, but they all need to be turned into `GitRepoAuth`s
# This is because different git providers can have different needs for data
# What github needs is an `api_token`, but a local git repo might only need a `password`.
# We'll call both of these "auth_tokens" here, this way we can use GitRepoAuth
# as a way to unify those, and prevent overloading names at the data source.
# Otherwise, in the JSON we'd see "password" but for some repos that might be an auth_token, or an api_token, or password
class GitRepoAuth
attr_accessor :remote_host # in the case of github, this is usually `github.com`
attr_accessor :username # whatever the git repo needs for a username, usually just an email, usually CI
attr_accessor :full_name # whatever the git repo needs for a username, usually just an email, usually fastlane.CI
attr_accessor :auth_token # usually an API key, but could be a password, usually fastlane.CI's auth_token
def initialize(remote_host: nil, username: nil, full_name: nil, auth_token: nil)
@remote_host = remote_host
@username = username
@full_name = full_name
@auth_token = auth_token
end
end
# Responsible for managing git repos
# This includes the configs repo, but also the actual source code repos
# This class makes sure to use the right credentials, does proper cloning,
# pulling, pushing, git commit messages, etc.
# It is **important** that from the outside you don't access `GitRepoObject.git.something` directly
# as the auth won't be setup. This system is designed to authenticate the user per action, meaning
# that each pull, push, fetch etc. is performed using a specific user
class GitRepo
include FastlaneCI::Logging
# @return [GitRepoConfig]
attr_accessor :git_config
# @return [GitRepoAuth]
attr_accessor :repo_auth # whatever pieces of information that can change between git users
attr_accessor :temporary_storage_path
attr_reader :local_folder # where we are keeping the local repo checkout
# This callback is used when the instance is initialized in async mode, so you can define a proc
# with the final GitRepo configured.
# @example
# GitRepo.new(..., async_start: true, callback: proc { |repo| puts "This is my final repo #{repo}"; })
#
# @return [proc(GitRepo)]
attr_accessor :callback
class << self
attr_accessor :git_action_queue
# Loads the octokit cache stack for speed-up calls to github service.
# As explained in: https://github.com/octokit/octokit.rb#caching
def load_octokit_cache_stack
@stack ||= Faraday::RackBuilder.new do |builder|
builder.use(Faraday::HttpCache, serializer: Marshal, shared_cache: false)
builder.use(Octokit::Response::RaiseError)
builder.adapter(Faraday.default_adapter)
end
return if Octokit.middleware.handlers.include?(Faraday::HttpCache)
Octokit.middleware = @stack
end
end
GitRepo.git_action_queue = TaskQueue::TaskQueue.new(name: "GitRepo task queue")
# Initializer for GitRepo class
# @param git_config [GitConfig]
# @param provider_credential [ProviderCredential]
# @param async_start [Bool] Whether the repo should be setup async or not. (Defaults to `true`)
# @param sync_setup_timeout_seconds [Integer] When in sync setup mode, how many seconds to wait until raise an exception. (Defaults to 300)
# @param callback [proc(GitRepo)] When in async setup mode, the proc to be called with the final GitRepo setup.
def initialize(git_config: nil, local_folder: nil, provider_credential: nil, async_start: false, sync_setup_timeout_seconds: 300, callback: nil)
GitRepo.load_octokit_cache_stack
logger.debug("Creating repo in #{local_folder} for a copy of #{git_config.git_url}")
self.validate_initialization_params!(git_config: git_config, local_folder: local_folder, provider_credential: provider_credential, async_start: async_start, callback: callback)
@git_config = git_config
@local_folder = local_folder
@callback = callback
# Ok, so now we need to pull the bit of information from the credentials that we know we need for git repos
case provider_credential.type
when FastlaneCI::ProviderCredential::PROVIDER_CREDENTIAL_TYPES[:github]
# Package up the authentication parts that are required
@repo_auth = GitRepoAuth.new(
remote_host: provider_credential.remote_host,
username: provider_credential.email,
full_name: provider_credential.full_name,
auth_token: provider_credential.api_token
)
else
# if we add another ProviderCredential type, we'll need to figure out what parts of the credential go where
raise "unsupported credential type: #{provider_credential.type}"
end
logger.debug("Adding task to setup repo #{self.git_config.git_url} at: #{local_folder}")
setup_task = git_action_with_queue(ensure_block: proc { callback_block(async_start) }) do
logger.debug("Starting setup_repo #{self.git_config.git_url}".freeze)
self.setup_repo
logger.debug("Done setup_repo #{self.git_config.git_url}".freeze)
end
# if we're starting asynchronously, we can return now.
if async_start
logger.debug("Asynchronously starting up repo: #{self.git_config.git_url}")
return
end
logger.debug("Synchronously starting up repo: #{self.git_config.git_url} at: #{local_folder}")
now = Time.now.utc
sleep_timeout = now + sync_setup_timeout_seconds # 10 second startup timeout
while !setup_task.completed && now < sleep_timeout
time_left = sleep_timeout - now
logger.debug("Not setup yet, sleeping (time before timeout: #{time_left}) #{self.git_config.git_url}")
sleep(2)
now = Time.now.utc
end
raise "Unable to start git repo #{git_config.git_url} in #{sync_setup_timeout_seconds} seconds" if now > sleep_timeout
logger.debug("Done starting up repo: #{self.git_config.git_url}")
end
def setup_repo
retry_count ||= 0
if File.directory?(self.local_folder)
# TODO: test if this crashes if it's not a git directory
begin
@_git = Git.open(self.local_folder)
rescue ArgumentError => aex
logger.debug("Path #{self.local_folder} is not a git directory, deleting and trying again")
self.clear_directory
self.clone
retry if (retry_count += 1) < 5
raise "Exceeded retry count for #{__method__}. Exception: #{aex}"
end
repo = self.git
if repo.index.writable?
# Things are looking legit so far
# Now we have to check if the repo is actually from the
# same repo URL
if repo.remote("origin").url.casecmp(self.git_config.git_url.downcase).zero?
# If our courrent repo is the ci-config repo and has changes on it, we should commit them before
# other actions, to prevent local changes to be lost.
# This is a common issue, ci_config repo gets recreated several times trough the Services.configuration_git_repo
# and if some changes in the local repo (added projects, etc.) have been added, they're destroyed.
# rubocop:disable Metrics/BlockNesting
if self.local_folder == File.expand_path("~/.fastlane/ci/fastlane-ci-config")
# TODO: move this stuff out of here
# TODO: In case there are conflicts with remote, we want to decide which way we take.
# For now, we merge using the 'recursive' strategy.
if repo.status.changed > 0 || repo.status.added > 0 || repo.status.deleted > 0 || repo.status.untracked > 0
begin
repo.add(all: true)
repo.commit("Sync changes")
git.push("origin", branch: "master", force: true)
rescue StandardError => ex
logger.error("Error commiting changes to ci-config repo")
logger.error(ex)
end
end
else
logger.debug("Resetting #{self.git_config.git_url} in setup_repo")
self.git.reset_hard
logger.debug("Ensuring we're on `master` for #{self.git_config.git_url} in setup_repo")
git.branch("master").checkout
logger.debug("Resetting `master` #{self.git_config.git_url} in setup_repo")
self.git.reset_hard
logger.debug("Pulling `master` #{self.git_config.git_url} in setup_repo")
self.pull
end
else
logger.debug("[#{self.git_config.id}] Repo URL seems to have changed... deleting the old directory and cloning again")
self.clear_directory
self.clone
end
else
self.clear_directory
logger.debug("Cloning #{self.git_config.git_url} into #{self.local_folder} after clearing directory")
self.clone
end
else
logger.debug("Cloning #{self.git_config.git_url} into #{self.local_folder}")
self.clone
# now that we've cloned, we can setup the @_git variable
@_git = Git.open(self.local_folder)
end
logger.debug("Done, now using #{self.local_folder} for #{self.git_config.git_url}")
# rubocop:enable Metrics/BlockNesting
end
def validate_initialization_params!(git_config: nil, local_folder: nil, provider_credential: nil, async_start: nil, callback: nil)
raise "No git config provided" if git_config.nil?
raise "No local_folder provided" if local_folder.nil?
raise "No provider_credential provided" if provider_credential.nil?
raise "Callback provided but not initialized in async mode" if !callback.nil? && !async_start
credential_type = provider_credential.type
git_config_credential_type = git_config.provider_credential_type_needed
credential_mismatch = credential_type != git_config_credential_type
raise "provider_credential.type and git_config.provider_credential_type_needed mismatch: #{credential_type} vs #{git_config_credential_type}" if credential_mismatch
end
def clear_directory
logger.debug("Deleting #{self.local_folder}")
FileUtils.rm_rf(self.local_folder)
end
# Returns the absolute path to a file from inside the git repo
def file_path(file_path)
File.join(self.local_folder, file_path)
end
def git
return @_git
end
# call like you would self.git.branches.remote.each { |branch| branch.yolo }
# call like you would, but you also get the git repo involved, so it's .each { |git, branch| branch.yolo; git.yolo }
def git_and_remote_branches_each_async(&each_block)
git_action_with_queue do
branch_count = 0
self.git.branches.remote.each do |branch|
each_block.call(self.git, branch)
branch_count += 1
end
end
end
# Return the last commit, that isn't a merge commit
# Make sure to have checked out the right branch for which
# you want to get the last commit of
def most_recent_commit
return self.git.log.first
end
# Responsible for setting the author information when committing a change
# NOT PROTECTED BY QUEUE, ONLY CALL WHEN INSIDE A git_action_queue BLOCK
def setup_author(full_name: self.repo_auth.full_name, username: self.repo_auth.username)
# TODO: performance implications of settings this every time?
# TODO: Set actual name + email here
# TODO: see if we can set credentials here also
if full_name.nil? || full_name.length == 0
full_name = "Unknown user"
end
logger.debug("Using #{full_name} with #{username} as author information on #{self.git_config.git_url}")
git.config("user.name", full_name)
git.config("user.email", username)
end
def temporary_git_storage
@temporary_git_storage ||= File.expand_path("~/.fastlane/.tmp")
FileUtils.mkdir_p(@temporary_git_storage)
return @temporary_git_storage
end
# Responsible for using the auth token to be able to push/pull changes
# from git remote
def setup_auth(repo_auth: self.repo_auth)
# generate a unique file name for this specific user, host, and git url
git_auth_key = Digest::SHA2.hexdigest(repo_auth.remote_host + repo_auth.username + self.git_config.git_url)
temporary_storage_path = File.join(self.temporary_git_storage, "git-auth-#{git_auth_key}")
self.temporary_storage_path = temporary_storage_path
# More details: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage
# Creates the `local_folder` directory if it does not exist
FileUtils.mkdir_p(self.local_folder) unless File.directory?(self.local_folder)
store_credentials_command = "git credential-store --file #{temporary_storage_path.shellescape} store"
content = [
"protocol=https",
"host=#{repo_auth.remote_host}",
"username=#{repo_auth.username}",
"password=#{repo_auth.auth_token}",
""
].join("\n")
scope = "local"
unless File.directory?(File.join(self.local_folder, ".git"))
# we don't have a git repo yet, we have no choice
# TODO: check if we find a better way for the initial clone to work without setting system global state
scope = "global"
end
use_credentials_command = "git config --#{scope} credential.helper 'store --file #{temporary_storage_path.shellescape}' #{self.local_folder}"
# Uncomment if you want to debug git credential stuff, keeping it commented out because it's very noisey
# logger.debug("Setting credentials for #{self.git_config.git_url} with command: #{use_credentials_command}")
cmd = TTY::Command.new(printer: :quiet)
cmd.run(store_credentials_command, input: content)
cmd.run(use_credentials_command)
return temporary_storage_path
end
def unset_auth
return unless self.temporary_storage_path.kind_of?(String)
# TODO: Also auto-clean those files from time to time, on server re-launch maybe, or background worker
FileUtils.rm(self.temporary_storage_path) if File.exist?(self.temporary_storage_path)
end
def perform_block(use_global_git_mutex: true, &block)
if use_global_git_mutex
git_action_with_queue(ensure_block: proc { unset_auth }) { block.call }
else
block.call # Assuming all things in the block are synchronous
self.unset_auth
end
end
def pull(repo_auth: self.repo_auth, use_global_git_mutex: true)
logger.debug("Enqueuing a pull on `master` (with mutex?: #{use_global_git_mutex}) for #{self.git_config.git_url}")
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Starting pull #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.pull
logger.debug("Done pulling #{self.git_config.git_url}")
end
end
def checkout_branch(branch: nil, repo_auth: self.repo_auth, use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Checking out branch: #{branch} from #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.branch(branch).checkout
logger.debug("Done checking out branch: #{branch} from #{self.git_config.git_url}")
end
end
def checkout_commit(sha: nil, repo_auth: self.repo_auth, use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.info("Checking out sha: #{sha} from #{self.git_config.git_url}")
self.setup_auth(repo_auth: repo_auth)
git.reset_hard(git.gcommit(sha))
logger.debug("Done checking out sha: #{sha} from #{self.git_config.git_url}")
end
end
# Discard any changes
def reset_hard!(use_global_git_mutex: true)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Starting reset_hard! #{self.git.branch.name} in #{self.git_config.git_url}".freeze)
self.git.reset_hard
self.git.clean(force: true, d: true)
logger.debug("Done reset_hard! #{self.git.branch.name} in #{self.git_config.git_url}".freeze)
end
end
# This method commits and pushes all changes
# if `file_to_commit` is `nil`, all files will be added
# TODO: this method isn't actually tested yet
def commit_changes!(commit_message: nil, push_after_commit: true, file_to_commit: nil, repo_auth: self.repo_auth)
git_action_with_queue do
logger.debug("Starting commit_changes! #{self.git_config.git_url} for #{repo_auth.username}")
raise "file_to_commit not yet implemented" if file_to_commit
commit_message ||= "Automatic commit by fastlane.ci"
setup_author(full_name: repo_auth.full_name, username: repo_auth.username)
git.add(all: true) # TODO: for now we only add all files
changed = git.status.changed
added = git.status.added
deleted = git.status.deleted
untracked = git.status.untracked
if changed.count == 0 && added.count == 0 && deleted.count == 0 && untracked.count == 0
logger.debug("No changes in repo #{self.git_config.full_name}, skipping commit #{commit_message}")
else
git.commit(commit_message)
push(use_global_git_mutex: false) if push_after_commit
logger.debug("Done commit_changes! #{self.git_config.full_name} for #{repo_auth.username}")
end
end
end
def push(use_global_git_mutex: true, repo_auth: self.repo_auth)
perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Pushing to #{self.git_config.git_url}")
self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username)
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
# TODO: how do we handle branches
self.git.push
logger.debug("Done pushing to #{self.git_config.git_url}")
end
end
def status
self.git.status
end
# `ensure_block`: block that you want executed after the `&block` finishes executed, even on error
def git_action_with_queue(ensure_block: nil, &block)
git_task = TaskQueue::Task.new(work_block: block, ensure_block: ensure_block)
GitRepo.git_action_queue.add_task_async(task: git_task)
return git_task
end
def fetch(use_global_git_mutex: true)
logger.debug("Enqueuing a fetch on (with mutex?: #{use_global_git_mutex}) for #{self.git_config.git_url}")
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Starting fetch #{self.git_config.git_url}".freeze)
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
self.git.remotes.each { |remote| self.git.fetch(remote) }
logger.debug("Done fetching #{self.git_config.git_url}".freeze)
end
end
def switch_to_fork(clone_url:, branch:, sha: nil, local_branch_name:, use_global_git_mutex: false)
self.perform_block(use_global_git_mutex: use_global_git_mutex) do
logger.debug("Switching to branch #{branch} from forked repo: #{clone_url} (pulling into #{local_branch_name})")
reset_hard!(use_global_git_mutex: false)
# TODO: make sure it doesn't exist yet
git.branch(local_branch_name)
reset_hard!(use_global_git_mutex: false)
git.pull(clone_url, branch)
end
end
def clone(repo_auth: self.repo_auth, async: false)
if async
logger.debug("Asynchronously cloning #{self.git_config.git_url}".freeze)
# If we're async, just push it on the queue
git_action_with_queue(ensure_block: proc { unset_auth }) do
clone_synchronously(repo_auth: repo_auth)
logger.debug("Done asynchronously cloning of #{self.git_config.git_url}".freeze)
end
else
logger.debug("Synchronously cloning #{self.git_config.git_url}".freeze)
clone_synchronously(repo_auth: repo_auth)
logger.debug("Done synchronously cloning of #{self.git_config.git_url}".freeze)
unset_auth
end
end
def callback_block(async_start)
# How do we know that the task was successfully finished?
return if self.callback.nil?
return unless async_start
self.callback.call(self)
end
private
def clone_synchronously(repo_auth: self.repo_auth)
# `@local_folder` is where we store the local git repo
# fastlane.ci will also delete this directory if it breaks
# and just re-clones. So make sure it's fine if it gets deleted
raise "No local folder path available" unless self.local_folder
logger.debug("Cloning git repo #{self.git_config.git_url}....")
existing_repo_for_project = File.join(self.local_folder, self.git_config.id)
# self.git_config.id.length > 1 to ensure we're not empty or a space
if self.git_config.id.length > 1 && Dir.exist?(existing_repo_for_project)
logger.debug("Removing existing repo at: #{existing_repo_for_project}")
require "fileutils"
# Danger zone
FileUtils.rm_r(existing_repo_for_project)
end
self.temporary_storage_path = self.setup_auth(repo_auth: repo_auth)
logger.debug("[#{self.git_config.id}]: Cloning git repo #{self.git_config.git_url} to #{@local_folder}")
Git.clone(self.git_config.git_url,
"", # checkout into the self.local_folder
path: self.local_folder,
recursive: true)
end
end
end
|
require 'test_helper'
describe GirFFI::PrettyPrinter do
describe "#initialize" do
it "takes no arguments" do
GirFFI::PrettyPrinter.new
end
end
describe "#pretty_print" do
let(:instance) { GirFFI::PrettyPrinter.new }
it "pretty-prints a module without type specification" do
instance.pretty_print 'GObject'
end
end
end
Clean up unit test
require 'test_helper'
describe GirFFI::PrettyPrinter do
describe "#pretty_print" do
let(:instance) { GirFFI::PrettyPrinter.new }
it "pretty-prints a module without version specification" do
instance.pretty_print 'GObject'
pass
end
end
end
|
require File.join(File.dirname(__FILE__), "spec_helper")
context "Dataset" do
setup do
@dataset = Sequel::Dataset.new("db")
end
specify "should accept database and opts in initialize" do
db = "db"
opts = {:from => :test}
d = Sequel::Dataset.new(db, opts)
d.db.should be(db)
d.opts.should be(opts)
d = Sequel::Dataset.new(db)
d.db.should be(db)
d.opts.should be_a_kind_of(Hash)
d.opts.should == {}
end
specify "should provide clone_merge for chainability." do
d1 = @dataset.clone_merge(:from => :test)
d1.class.should == @dataset.class
d1.should_not == @dataset
d1.db.should be(@dataset.db)
d1.opts[:from].should == :test
@dataset.opts[:from].should be_nil
d2 = d1.clone_merge(:order => :name)
d2.class.should == @dataset.class
d2.should_not == d1
d2.should_not == @dataset
d2.db.should be(@dataset.db)
d2.opts[:from].should == :test
d2.opts[:order].should == :name
d1.opts[:order].should be_nil
end
specify "should include Enumerable" do
Sequel::Dataset.included_modules.should include(Enumerable)
end
specify "should raise ImplementedError for the dataset interface methods" do
proc {@dataset.fetch_rows('abc')}.should raise_error(NotImplementedError)
proc {@dataset.insert(1, 2, 3)}.should raise_error(NotImplementedError)
proc {@dataset.update(:name => 'abc')}.should raise_error(NotImplementedError)
proc {@dataset.delete}.should raise_error(NotImplementedError)
end
end
context "Dataset#clone_merge" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:items)
end
specify "should return a clone self" do
clone = @dataset.clone_merge({})
clone.class.should == @dataset.class
clone.db.should == @dataset.db
clone.opts.should == @dataset.opts
end
specify "should merge the specified options" do
clone = @dataset.clone_merge(1 => 2)
clone.opts.should == {1 => 2, :from => [:items]}
end
specify "should overwrite existing options" do
clone = @dataset.clone_merge(:from => [:other])
clone.opts.should == {:from => [:other]}
end
specify "should create a clone with a deep copy of options" do
clone = @dataset.clone_merge(:from => [:other])
@dataset.opts[:from].should == [:items]
clone.opts[:from].should == [:other]
end
specify "should return an object with the same modules included" do
m = Module.new do
def __xyz__; "xyz"; end
end
@dataset.extend(m)
@dataset.clone_merge({}).should respond_to(:__xyz__)
end
end
context "A simple dataset" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should format a select statement" do
@dataset.select_sql.should == 'SELECT * FROM test'
end
specify "should format a delete statement" do
@dataset.delete_sql.should == 'DELETE FROM test'
end
specify "should format an insert statement with default values" do
@dataset.insert_sql.should == 'INSERT INTO test DEFAULT VALUES'
end
specify "should format an insert statement with hash" do
@dataset.insert_sql(:name => 'wxyz', :price => 342).
should match(/INSERT INTO test \(name, price\) VALUES \('wxyz', 342\)|INSERT INTO test \(price, name\) VALUES \(342, 'wxyz'\)/)
@dataset.insert_sql({}).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with array with keys" do
v = [1, 2, 3]
v.keys = [:a, :b, :c]
@dataset.insert_sql(v).should == "INSERT INTO test (a, b, c) VALUES (1, 2, 3)"
v = []
v.keys = [:a, :b]
@dataset.insert_sql(v).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with string keys" do
@dataset.insert_sql('name' => 'wxyz', 'price' => 342).
should match(/INSERT INTO test \(name, price\) VALUES \('wxyz', 342\)|INSERT INTO test \(price, name\) VALUES \(342, 'wxyz'\)/)
end
specify "should format an insert statement with a model instance" do
dbb = Sequel::Database.new
@c = Class.new(Sequel::Model) do
attr_accessor :values
end
v = @c.new; v.values = {:a => 1}
@dataset.insert_sql(v).should == "INSERT INTO test (a) VALUES (1)"
v = @c.new; v.values = {}
@dataset.insert_sql(v).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with an arbitrary value" do
@dataset.insert_sql(123).should == "INSERT INTO test VALUES (123)"
end
specify "should format an insert statement with sub-query" do
@sub = Sequel::Dataset.new(nil).from(:something).filter(:x => 2)
@dataset.insert_sql(@sub).should == \
"INSERT INTO test (SELECT * FROM something WHERE (x = 2))"
end
specify "should format an insert statement with array" do
@dataset.insert_sql('a', 2, 6.5).should ==
"INSERT INTO test VALUES ('a', 2, 6.5)"
end
specify "should format an update statement" do
@dataset.update_sql(:name => 'abc').should ==
"UPDATE test SET name = 'abc'"
@dataset.update_sql {:x << :y}.should ==
"UPDATE test SET x = y"
end
specify "should format an update statement with array with keys" do
v = ['abc']
v.keys = [:name]
@dataset.update_sql(v).should == "UPDATE test SET name = 'abc'"
end
specify "should be able to return rows for arbitrary SQL" do
@dataset.select_sql(:sql => 'xxx yyy zzz').should ==
"xxx yyy zzz"
end
end
context "A dataset with multiple tables in its FROM clause" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:t1, :t2)
end
specify "should raise on #update_sql" do
proc {@dataset.update_sql(:a=>1)}.should raise_error
end
specify "should raise on #delete_sql" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should generate a select query FROM all specified tables" do
@dataset.select_sql.should == "SELECT * FROM t1, t2"
end
end
context "Dataset#where" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:region => 'Asia')
@d2 = @dataset.where('(region = ?)', 'Asia')
@d3 = @dataset.where("(a = 1)")
end
specify "should work with hashes" do
@dataset.where(:name => 'xyz', :price => 342).select_sql.
should match(/WHERE \(name = 'xyz'\) AND \(price = 342\)|WHERE \(price = 342\) AND \(name = 'xyz'\)/)
end
specify "should work with arrays (ala ActiveRecord)" do
@dataset.where('price < ? AND id in (?)', 100, [1, 2, 3]).select_sql.should ==
"SELECT * FROM test WHERE price < 100 AND id in (1, 2, 3)"
end
specify "should work with strings (custom SQL expressions)" do
@dataset.where('(a = 1 AND b = 2)').select_sql.should ==
"SELECT * FROM test WHERE (a = 1 AND b = 2)"
end
specify "should affect select, delete and update statements" do
@d1.select_sql.should == "SELECT * FROM test WHERE (region = 'Asia')"
@d1.delete_sql.should == "DELETE FROM test WHERE (region = 'Asia')"
@d1.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (region = 'Asia')"
@d2.select_sql.should == "SELECT * FROM test WHERE (region = 'Asia')"
@d2.delete_sql.should == "DELETE FROM test WHERE (region = 'Asia')"
@d2.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (region = 'Asia')"
@d3.select_sql.should == "SELECT * FROM test WHERE (a = 1)"
@d3.delete_sql.should == "DELETE FROM test WHERE (a = 1)"
@d3.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (a = 1)"
end
specify "should be composable using AND operator (for scoping)" do
# hashes are merged, no problem
@d1.where(:size => 'big').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (size = 'big')"
# hash and string
@d1.where('population > 1000').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (population > 1000)"
@d1.where('(a > 1) OR (b < 2)').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND ((a > 1) OR (b < 2))"
# hash and array
@d1.where('(GDP > ?)', 1000).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > 1000)"
# array and array
@d2.where('(GDP > ?)', 1000).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > 1000)"
# array and hash
@d2.where(:name => ['Japan', 'China']).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (name IN ('Japan', 'China'))"
# array and string
@d2.where('GDP > ?').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > ?)"
# string and string
@d3.where('b = 2').select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (b = 2)"
# string and hash
@d3.where(:c => 3).select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (c = 3)"
# string and array
@d3.where('(d = ?)', 4).select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (d = 4)"
# string and proc expr
@d3.where {:e < 5}.select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (e < 5)"
end
specify "should raise if the dataset is grouped" do
proc {@dataset.group(:t).where(:a => 1)}.should_not raise_error
@dataset.group(:t).where(:a => 1).sql.should ==
"SELECT * FROM test WHERE (a = 1) GROUP BY t"
end
specify "should accept ranges" do
@dataset.filter(:id => 4..7).sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter(:id => 4...7).sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id < 7)'
@dataset.filter {:id == (4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter {:id.in?(4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter(:table__id => 4..7).sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
@dataset.filter(:table__id => 4...7).sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id < 7)'
@dataset.filter {:table__id == (4..7)}.sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
@dataset.filter {:table__id.in?(4..7)}.sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
end
specify "should accept nil" do
@dataset.filter(:owner_id => nil).sql.should ==
'SELECT * FROM test WHERE (owner_id IS NULL)'
@dataset.filter{:owner_id.nil?}.sql.should ==
'SELECT * FROM test WHERE (owner_id IS NULL)'
end
specify "should accept a subquery" do
# select all countries that have GDP greater than the average for Asia
@dataset.filter('gdp > ?', @d1.select(:gdp.AVG)).sql.should ==
"SELECT * FROM test WHERE gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia'))"
@dataset.filter(:id => @d1.select(:id)).sql.should ==
"SELECT * FROM test WHERE (id IN (SELECT id FROM test WHERE (region = 'Asia')))"
end
specify "should accept a subquery for an EXISTS clause" do
a = @dataset.filter {:price < 100}
@dataset.filter(a.exists).sql.should ==
'SELECT * FROM test WHERE EXISTS (SELECT 1 FROM test WHERE (price < 100))'
end
specify "should accept proc expressions" do
d = @d1.select(:gdp.AVG)
@dataset.filter {:gdp > d}.sql.should ==
"SELECT * FROM test WHERE (gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia')))"
@dataset.filter {:id.in(4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter {:c == 3}.sql.should ==
'SELECT * FROM test WHERE (c = 3)'
@dataset.filter {:id == :items__id}.sql.should ==
'SELECT * FROM test WHERE (id = items.id)'
@dataset.filter {:a < 1}.sql.should ==
'SELECT * FROM test WHERE (a < 1)'
@dataset.filter {:a != 1}.sql.should ==
'SELECT * FROM test WHERE (NOT (a = 1))'
@dataset.filter {:a >= 1 && :b <= 2}.sql.should ==
'SELECT * FROM test WHERE ((a >= 1) AND (b <= 2))'
@dataset.filter {:c.like 'ABC%'}.sql.should ==
"SELECT * FROM test WHERE (c LIKE 'ABC%')"
@dataset.filter {:c.like? 'ABC%'}.sql.should ==
"SELECT * FROM test WHERE (c LIKE 'ABC%')"
end
specify "should raise if receiving a single boolean value" do
# the result of erroneous use of comparison not in a block
# so instead of filter{:x == y} someone writes filter(:x == y)
proc {@dataset.filter(:a == 1)}.should raise_error(Sequel::Error::InvalidFilter)
proc {@dataset.filter(:a != 1)}.should raise_error(Sequel::Error::InvalidFilter)
end
specify "should work for grouped datasets" do
@dataset.group(:a).filter(:b => 1).sql.should ==
'SELECT * FROM test WHERE (b = 1) GROUP BY a'
end
end
context "Dataset#or" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:x => 1)
end
specify "should raise if no filter exists" do
proc {@dataset.or(:a => 1)}.should raise_error(Sequel::Error)
end
specify "should add an alternative expression to the where clause" do
@d1.or(:y => 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (y = 2)'
end
specify "should accept all forms of filters" do
# probably not exhaustive, but good enough
@d1.or('(y > ?)', 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (y > 2)'
(@d1.or {:yy > 3}).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (yy > 3)'
end
specify "should correctly add parens to give predictable results" do
@d1.filter(:y => 2).or(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) AND (y = 2)) OR (z = 3)'
@d1.or(:y => 2).filter(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) OR (y = 2)) AND (z = 3)'
end
end
context "Dataset#and" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:x => 1)
end
specify "should raise if no filter exists" do
proc {@dataset.and(:a => 1)}.should raise_error(Sequel::Error)
proc {@dataset.where(:a => 1).group(:t).and(:b => 2)}.should_not raise_error(Sequel::Error)
@dataset.where(:a => 1).group(:t).and(:b => 2).sql ==
"SELECT * FROM test WHERE (a = 1) AND (b = 2) GROUP BY t"
end
specify "should add an alternative expression to the where clause" do
@d1.and(:y => 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (y = 2)'
end
specify "should accept all forms of filters" do
# probably not exhaustive, but good enough
@d1.and('(y > ?)', 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (y > 2)'
(@d1.and {:yy > 3}).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (yy > 3)'
end
specify "should correctly add parens to give predictable results" do
@d1.or(:y => 2).and(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) OR (y = 2)) AND (z = 3)'
@d1.and(:y => 2).or(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) AND (y = 2)) OR (z = 3)'
end
end
context "Dataset#exclude" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should correctly include the NOT operator when one condition is given" do
@dataset.exclude(:region=>'Asia').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia'))"
end
specify "should take multiple conditions as a hash and express the logic correctly in SQL" do
@dataset.exclude(:region => 'Asia', :name => 'Japan').select_sql.
should match(Regexp.union(/WHERE \(NOT \(\(region = 'Asia'\) AND \(name = 'Japan'\)\)\)/,
/WHERE \(NOT \(\(name = 'Japan'\) AND \(region = 'Asia'\)\)\)/))
end
specify "should parenthesize a single string condition correctly" do
@dataset.exclude("region = 'Asia' AND name = 'Japan'").select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia' AND name = 'Japan'))"
end
specify "should parenthesize an array condition correctly" do
@dataset.exclude('region = ? AND name = ?', 'Asia', 'Japan').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia' AND name = 'Japan'))"
end
specify "should correctly parenthesize when it is used twice" do
@dataset.exclude(:region => 'Asia').exclude(:name => 'Japan').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia')) AND (NOT (name = 'Japan'))"
end
specify "should support proc expressions" do
@dataset.exclude {:id == (6...12)}.sql.should ==
'SELECT * FROM test WHERE (NOT ((id >= 6 AND id < 12)))'
end
end
context "Dataset#having" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@grouped = @dataset.group(:region).select(:region, :population.SUM, :gdp.AVG)
@d1 = @grouped.having('sum(population) > 10')
@d2 = @grouped.having(:region => 'Asia')
@columns = "region, sum(population), avg(gdp)"
end
specify "should raise if the dataset is not grouped" do
proc {@dataset.having('avg(gdp) > 10')}.should raise_error
end
specify "should affect select statements" do
@d1.select_sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING sum(population) > 10"
end
specify "should support proc expressions" do
@grouped.having {:sum[:population] > 10}.sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING (sum(population) > 10)"
end
specify "should work with and on the having clause" do
@grouped.having{ :a > 1 }.and{ :b < 2 }.sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING (a > 1) AND (b < 2)"
end
end
context "a grouped dataset" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).group(:type_id)
end
specify "should raise when trying to generate an update statement" do
proc {@dataset.update_sql(:id => 0)}.should raise_error
end
specify "should raise when trying to generate a delete statement" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should specify the grouping in generated select statement" do
@dataset.select_sql.should ==
"SELECT * FROM test GROUP BY type_id"
end
specify "should format the right statement for counting (as a subquery)" do
db = MockDatabase.new
db[:test].select(:name).group(:name).count
db.sqls.should == ["SELECT COUNT(*) FROM (SELECT name FROM test GROUP BY name) t1"]
end
end
context "Dataset#group_by" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).group_by(:type_id)
end
specify "should raise when trying to generate an update statement" do
proc {@dataset.update_sql(:id => 0)}.should raise_error
end
specify "should raise when trying to generate a delete statement" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should specify the grouping in generated select statement" do
@dataset.select_sql.should ==
"SELECT * FROM test GROUP BY type_id"
end
end
context "Dataset#literal" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should escape strings properly" do
@dataset.literal('abc').should == "'abc'"
@dataset.literal('a"x"bc').should == "'a\"x\"bc'"
@dataset.literal("a'bc").should == "'a''bc'"
@dataset.literal("a''bc").should == "'a''''bc'"
end
specify "should literalize numbers properly" do
@dataset.literal(1).should == "1"
@dataset.literal(1.5).should == "1.5"
end
specify "should literalize nil as NULL" do
@dataset.literal(nil).should == "NULL"
end
specify "should literalize an array properly" do
@dataset.literal([]).should == "NULL"
@dataset.literal([1, 'abc', 3]).should == "1, 'abc', 3"
@dataset.literal([1, "a'b''c", 3]).should == "1, 'a''b''''c', 3"
end
specify "should literalize symbols as column references" do
@dataset.literal(:name).should == "name"
@dataset.literal(:items__name).should == "items.name"
end
specify "should raise an error for unsupported types" do
proc {@dataset.literal({})}.should raise_error
end
specify "should literalize datasets as subqueries" do
d = @dataset.from(:test)
d.literal(d).should == "(#{d.sql})"
end
specify "should literalize Time properly" do
t = Time.now
s = t.strftime("TIMESTAMP '%Y-%m-%d %H:%M:%S'")
@dataset.literal(t).should == s
end
specify "should literalize Date properly" do
d = Date.today
s = d.strftime("DATE '%Y-%m-%d'")
@dataset.literal(d).should == s
end
specify "should not literalize expression strings" do
@dataset.literal('col1 + 2'.expr).should == 'col1 + 2'
@dataset.update_sql(:a => 'a + 2'.expr).should ==
'UPDATE test SET a = a + 2'
end
specify "should literalize BigDecimal instances correctly" do
@dataset.literal(BigDecimal.new("80")).should == "80.0"
end
end
context "Dataset#from" do
setup do
@dataset = Sequel::Dataset.new(nil)
end
specify "should accept a Dataset" do
proc {@dataset.from(@dataset)}.should_not raise_error
end
specify "should format a Dataset as a subquery if it has had options set" do
@dataset.from(@dataset.from(:a).where(:a=>1)).select_sql.should ==
"SELECT * FROM (SELECT * FROM a WHERE (a = 1)) t1"
end
specify "should automatically alias sub-queries" do
@dataset.from(@dataset.from(:a).group(:b)).select_sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) t1"
d1 = @dataset.from(:a).group(:b)
d2 = @dataset.from(:c).group(:d)
@dataset.from(d1, d2).sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) t1, (SELECT * FROM c GROUP BY d) t2"
end
specify "should accept a hash for aliasing" do
@dataset.from(:a => :b).sql.should ==
"SELECT * FROM a b"
@dataset.from(@dataset.from(:a).group(:b) => :c).sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) c"
end
specify "should use the relevant table name if given a simple dataset" do
@dataset.from(@dataset.from(:a)).select_sql.should ==
"SELECT * FROM a"
end
specify "should raise if no source is given" do
proc {@dataset.from(@dataset.from).select_sql}.should raise_error(Sequel::Error)
end
end
context "Dataset#select" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should accept variable arity" do
@d.select(:name).sql.should == 'SELECT name FROM test'
@d.select(:a, :b, :test__c).sql.should == 'SELECT a, b, test.c FROM test'
end
specify "should accept symbols and literal strings" do
@d.select('aaa'.lit).sql.should == 'SELECT aaa FROM test'
@d.select(:a, 'b'.lit).sql.should == 'SELECT a, b FROM test'
@d.select(:test__cc, 'test.d AS e'.lit).sql.should ==
'SELECT test.cc, test.d AS e FROM test'
@d.select('test.d AS e'.lit, :test__cc).sql.should ==
'SELECT test.d AS e, test.cc FROM test'
# symbol helpers
@d.select(:test.ALL).sql.should ==
'SELECT test.* FROM test'
@d.select(:test__name.AS(:n)).sql.should ==
'SELECT test.name AS n FROM test'
@d.select(:test__name___n).sql.should ==
'SELECT test.name AS n FROM test'
end
specify "should use the wildcard if no arguments are given" do
@d.select.sql.should == 'SELECT * FROM test'
end
specify "should accept a hash for AS values" do
@d.select(:name => 'n', :__ggh => 'age').sql.should =~
/SELECT ((name AS n, __ggh AS age)|(__ggh AS age, name AS n)) FROM test/
end
specify "should overrun the previous select option" do
@d.select!(:a, :b, :c).select.sql.should == 'SELECT * FROM test'
@d.select!(:price).select(:name).sql.should == 'SELECT name FROM test'
end
specify "should accept arbitrary objects and literalize them correctly" do
@d.select(1, :a, 't').sql.should == "SELECT 1, a, 't' FROM test"
@d.select(nil, :sum[:t], :x___y).sql.should == "SELECT NULL, sum(t), x AS y FROM test"
@d.select(nil, 1, :x => :y).sql.should == "SELECT NULL, 1, x AS y FROM test"
end
end
context "Dataset#select_all" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should select the wildcard" do
@d.select_all.sql.should == 'SELECT * FROM test'
end
specify "should overrun the previous select option" do
@d.select!(:a, :b, :c).select_all.sql.should == 'SELECT * FROM test'
end
end
context "Dataset#select_more" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should act like #select for datasets with no selection" do
@d.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
@d.select_all.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
@d.select(:blah).select_all.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
end
specify "should add to the currently selected columns" do
@d.select(:a).select_more(:b).sql.should == 'SELECT a, b FROM test'
@d.select(:a.all).select_more(:b.all).sql.should == 'SELECT a.*, b.* FROM test'
end
end
context "Dataset#order" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.order(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, price DESC'
end
specify "should overrun a previous ordering" do
@dataset.order(:name).order(:stamp).sql.should ==
'SELECT * FROM test ORDER BY stamp'
end
specify "should accept a string" do
@dataset.order('dada ASC'.lit).sql.should ==
'SELECT * FROM test ORDER BY dada ASC'
end
end
context "Dataset#order_by" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order_by(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.order_by(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, price DESC'
end
specify "should overrun a previous ordering" do
@dataset.order_by(:name).order(:stamp).sql.should ==
'SELECT * FROM test ORDER BY stamp'
end
specify "should accept a string" do
@dataset.order_by('dada ASC'.lit).sql.should ==
'SELECT * FROM test ORDER BY dada ASC'
end
end
context "Dataset#order_more" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order_more(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should add to a previous ordering" do
@dataset.order(:name).order_more(:stamp.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, stamp DESC'
end
end
context "Dataset#reverse_order" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should use DESC as default order" do
@dataset.reverse_order(:name).sql.should ==
'SELECT * FROM test ORDER BY name DESC'
end
specify "should invert the order given" do
@dataset.reverse_order(:name.DESC).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.reverse_order(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name DESC, price'
end
specify "should reverse a previous ordering if no arguments are given" do
@dataset.order(:name).reverse_order.sql.should ==
'SELECT * FROM test ORDER BY name DESC'
@dataset.order(:clumsy.DESC, :fool).reverse_order.sql.should ==
'SELECT * FROM test ORDER BY clumsy, fool DESC'
end
end
context "Dataset#limit" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include a LIMIT clause in the select statement" do
@dataset.limit(10).sql.should ==
'SELECT * FROM test LIMIT 10'
end
specify "should accept ranges" do
@dataset.limit(3..7).sql.should ==
'SELECT * FROM test LIMIT 5 OFFSET 3'
@dataset.limit(3...7).sql.should ==
'SELECT * FROM test LIMIT 4 OFFSET 3'
end
specify "should include an offset if a second argument is given" do
@dataset.limit(6, 10).sql.should ==
'SELECT * FROM test LIMIT 6 OFFSET 10'
end
specify "should work with fixed sql datasets" do
@dataset.opts[:sql] = 'select * from cccc'
@dataset.limit(6, 10).sql.should ==
'SELECT * FROM (select * from cccc) t1 LIMIT 6 OFFSET 10'
end
end
context "Dataset#naked" do
setup do
@d1 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4})
@d2 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4}).set_model(Object)
end
specify "should return a clone with :naked option set" do
naked = @d1.naked
naked.opts[:naked].should be_true
end
specify "should remove any existing reference to a model class" do
naked = @d2.naked
naked.opts[:models].should be_nil
end
end
context "Dataset#qualified_column_name" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should return the same if already qualified" do
@dataset.qualified_column_name('test.a'.lit, :items).should == 'test.a'
@dataset.qualified_column_name(:ccc__b, :items).should == :ccc__b
end
specify "should qualify the column with the supplied table name" do
@dataset.qualified_column_name('a'.lit, :items).to_s(@dataset).should == 'items.a'
@dataset.qualified_column_name(:b1, :items).to_s(@dataset).should == 'items.b1'
end
end
class DummyDataset < Sequel::Dataset
VALUES = [
{:a => 1, :b => 2},
{:a => 3, :b => 4},
{:a => 5, :b => 6}
]
def fetch_rows(sql, &block)
VALUES.each(&block)
end
end
context "Dataset#map" do
setup do
@d = DummyDataset.new(nil).from(:items)
end
specify "should provide the usual functionality if no argument is given" do
@d.map {|n| n[:a] + n[:b]}.should == [3, 7, 11]
end
specify "should map using #[column name] if column name is given" do
@d.map(:a).should == [1, 3, 5]
end
specify "should return the complete dataset values if nothing is given" do
@d.map.should == DummyDataset::VALUES
end
end
context "Dataset#to_hash" do
setup do
@d = DummyDataset.new(nil).from(:items)
end
specify "should provide a hash with the first column as key and the second as value" do
@d.to_hash(:a, :b).should == {1 => 2, 3 => 4, 5 => 6}
@d.to_hash(:b, :a).should == {2 => 1, 4 => 3, 6 => 5}
end
end
context "Dataset#uniq" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).select(:name)
end
specify "should include DISTINCT clause in statement" do
@dataset.uniq.sql.should == 'SELECT DISTINCT name FROM test'
end
specify "should be aliased by Dataset#distinct" do
@dataset.distinct.sql.should == 'SELECT DISTINCT name FROM test'
end
specify "should accept an expression list" do
@dataset.uniq(:a, :b).sql.should == 'SELECT DISTINCT ON (a, b) name FROM test'
@dataset.uniq(:stamp.cast_as(:integer), :node_id).sql.should == 'SELECT DISTINCT ON (cast(stamp AS integer), node_id) name FROM test'
end
end
context "Dataset#count" do
setup do
@c = Class.new(Sequel::Dataset) do
def self.sql
@@sql
end
def fetch_rows(sql)
@@sql = sql
yield({1 => 1})
end
end
@dataset = @c.new(nil).from(:test)
end
specify "should format SQL properly" do
@dataset.count.should == 1
@c.sql.should == 'SELECT COUNT(*) FROM test'
end
specify "should be aliased by #size" do
@dataset.size.should == 1
end
specify "should include the where clause if it's there" do
@dataset.filter {:abc < 30}.count.should == 1
@c.sql.should == 'SELECT COUNT(*) FROM test WHERE (abc < 30)'
end
specify "should count properly for datasets with fixed sql" do
@dataset.opts[:sql] = "select abc from xyz"
@dataset.count.should == 1
@c.sql.should == "SELECT COUNT(*) FROM (select abc from xyz) t1"
end
end
context "Dataset#group_and_count" do
setup do
@c = Class.new(Sequel::Dataset) do
def self.sql
@@sql
end
def fetch_rows(sql)
@@sql = sql
yield({1 => 1})
end
end
@ds = @c.new(nil).from(:test)
end
specify "should format SQL properly" do
@ds.group_and_count(:name).sql.should == "SELECT name, count(*) AS count FROM test GROUP BY name ORDER BY count"
end
specify "should accept multiple columns for grouping" do
@ds.group_and_count(:a, :b).sql.should == "SELECT a, b, count(*) AS count FROM test GROUP BY a, b ORDER BY count"
end
end
context "Dataset#empty?" do
specify "should return true if #count == 0" do
@c = Class.new(Sequel::Dataset) do
def count
0
end
end
@dataset = @c.new(nil).from(:test)
@dataset.empty?.should be_true
@c = Class.new(Sequel::Dataset) do
def count
1
end
end
@dataset = @c.new(nil).from(:test)
@dataset.empty?.should be_false
end
end
context "Dataset#join_table" do
setup do
@d = Sequel::Dataset.new(nil).from(:items)
end
specify "should format the JOIN clause properly" do
@d.join_table(:left_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should include WHERE clause if applicable" do
@d.filter {:price < 100}.join_table(:right_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id) WHERE (price < 100)'
end
specify "should include ORDER BY clause if applicable" do
@d.order(:stamp).join_table(:full_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id) ORDER BY stamp'
end
specify "should support multiple joins" do
@d.join_table(:inner, :b, :items_id).join_table(:left_outer, :c, :b_id => :b__id).sql.should ==
'SELECT * FROM items INNER JOIN b ON (b.items_id = items.id) LEFT OUTER JOIN c ON (c.b_id = b.id)'
end
specify "should use id as implicit relation primary key if omitted" do
@d.join_table(:left_outer, :categories, :category_id).sql.should ==
@d.join_table(:left_outer, :categories, :category_id => :id).sql
# when doing multiple joins, id should be qualified using the last joined table
@d.join_table(:right_outer, :b, :items_id).join_table(:full_outer, :c, :b_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN b ON (b.items_id = items.id) FULL OUTER JOIN c ON (c.b_id = b.id)'
end
specify "should support left outer joins" do
@d.join_table(:left_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
@d.left_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support right outer joins" do
@d.join_table(:right_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id)'
@d.right_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support full outer joins" do
@d.join_table(:full_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id)'
@d.full_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support inner joins" do
@d.join_table(:inner, :categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
@d.inner_join(:categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
end
specify "should default to an inner join" do
@d.join_table(nil, :categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
@d.join(:categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
end
specify "should raise if an invalid join type is specified" do
proc {@d.join_table(:invalid, :a, :b)}.should raise_error(Sequel::Error)
end
specify "should treat aliased tables correctly" do
@d.from('stats s').join('players p', :id => :player_id).sql.should ==
'SELECT * FROM stats s INNER JOIN players p ON (p.id = s.player_id)'
end
specify "should allow for arbitrary conditions in the JOIN clause" do
@d.join_table(:left_outer, :categories, :status => 0).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.status = 0)'
@d.join_table(:left_outer, :categories, :categorizable_type => "Post").sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.categorizable_type = 'Post')"
@d.join_table(:left_outer, :categories, :timestamp => "CURRENT_TIMESTAMP".lit).sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.timestamp = CURRENT_TIMESTAMP)"
@d.join_table(:left_outer, :categories, :status => [1, 2, 3]).sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.status IN (1, 2, 3))"
end
specify "should support aliased tables" do
ds = Sequel::Dataset.new(nil).from(:foo => :f). \
join_table(:inner, :bar, :id => :bar_id).sql.should ==
'SELECT * FROM foo f INNER JOIN bar ON (bar.id = f.bar_id)'
end
end
context "Dataset#[]=" do
setup do
c = Class.new(Sequel::Dataset) do
def last_sql
@@last_sql
end
def update(*args)
@@last_sql = update_sql(*args)
end
end
@d = c.new(nil).from(:items)
end
specify "should perform an update on the specified filter" do
@d[:a => 1] = {:x => 3}
@d.last_sql.should == 'UPDATE items SET x = 3 WHERE (a = 1)'
end
end
context "Dataset#set" do
setup do
c = Class.new(Sequel::Dataset) do
def last_sql
@@last_sql
end
def update(*args, &block)
@@last_sql = update_sql(*args, &block)
end
end
@d = c.new(nil).from(:items)
end
specify "should act as alias to #update" do
@d.set({:x => 3})
@d.last_sql.should == 'UPDATE items SET x = 3'
@d.set {:x << :x + 1}
@d.last_sql.should == 'UPDATE items SET x = (x + 1)'
@d.set {(:x|1) << (:x|2) + 1}
@d.last_sql.should == 'UPDATE items SET x[1] = (x[2] + 1)'
end
end
context "Dataset#insert_multiple" do
setup do
c = Class.new(Sequel::Dataset) do
attr_reader :inserts
def insert(arg)
@inserts ||= []
@inserts << arg
end
end
@d = c.new(nil)
end
specify "should insert all items in the supplied array" do
@d.insert_multiple [:aa, 5, 3, {1 => 2}]
@d.inserts.should == [:aa, 5, 3, {1 => 2}]
end
specify "should pass array items through the supplied block if given" do
a = ["inevitable", "hello", "the ticking clock"]
@d.insert_multiple(a) {|i| i.gsub('l', 'r')}
@d.inserts.should == ["inevitabre", "herro", "the ticking crock"]
end
end
context "Dataset aggregate methods" do
setup do
c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield({1 => sql})
end
end
@d = c.new(nil).from(:test)
end
specify "should include min" do
@d.min(:a).should == 'SELECT min(a) AS v FROM test'
end
specify "should include max" do
@d.max(:b).should == 'SELECT max(b) AS v FROM test'
end
specify "should include sum" do
@d.sum(:c).should == 'SELECT sum(c) AS v FROM test'
end
specify "should include avg" do
@d.avg(:d).should == 'SELECT avg(d) AS v FROM test'
end
specify "should accept qualified columns" do
@d.avg(:test__bc).should == 'SELECT avg(test.bc) AS v FROM test'
end
end
context "Dataset#range" do
setup do
c = Class.new(Sequel::Dataset) do
@@sql = nil
def last_sql; @@sql; end
def fetch_rows(sql)
@@sql = sql
yield(:v1 => 1, :v2 => 10)
end
end
@d = c.new(nil).from(:test)
end
specify "should generate a correct SQL statement" do
@d.range(:stamp)
@d.last_sql.should == "SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test LIMIT 1"
@d.filter {:price > 100}.range(:stamp)
@d.last_sql.should == "SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test WHERE (price > 100) LIMIT 1"
end
specify "should return a range object" do
@d.range(:tryme).should == (1..10)
@d.last_sql.should == "SELECT min(tryme) AS v1, max(tryme) AS v2 FROM test LIMIT 1"
end
end
context "Dataset#range" do
setup do
c = Class.new(Sequel::Dataset) do
@@sql = nil
def last_sql; @@sql; end
def fetch_rows(sql)
@@sql = sql
yield(:v => 1234)
end
end
@d = c.new(nil).from(:test)
end
specify "should generate a correct SQL statement" do
@d.interval(:stamp)
@d.last_sql.should == "SELECT (max(stamp) - min(stamp)) AS v FROM test LIMIT 1"
@d.filter {:price > 100}.interval(:stamp)
@d.last_sql.should == "SELECT (max(stamp) - min(stamp)) AS v FROM test WHERE (price > 100) LIMIT 1"
end
specify "should return a range object" do
@d.interval(:tryme).should == 1234
@d.last_sql.should == "SELECT (max(tryme) - min(tryme)) AS v FROM test LIMIT 1"
end
end
context "Dataset#first" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
@@last_opts = nil
def self.last_dataset
@@last_dataset
end
def self.last_opts
@@last_opts
end
def single_record(opts = nil)
@@last_opts = @opts.merge(opts || {})
{:a => 1, :b => 2}
end
def all
@@last_dataset = self
[{:a => 1, :b => 2}] * @opts[:limit]
end
end
@d = @c.new(nil).from(:test)
end
specify "should return the first matching record if a hash is specified" do
@d.first(:z => 26).should == {:a => 1, :b => 2}
@c.last_opts[:where].should == ('(z = 26)')
@d.first('z = ?', 15)
@c.last_opts[:where].should == ('z = 15')
end
specify "should return the first matching record if a block is given" do
@d.first {:z > 26}.should == {:a => 1, :b => 2}
@c.last_opts[:where].should == ('(z > 26)')
end
specify "should return a single record if no argument is given" do
@d.first.should == {:a => 1, :b => 2}
end
specify "should set the limit according to the given number" do
@d.first
@c.last_opts[:limit].should == 1
i = rand(10) + 10
@d.first(i)
@c.last_dataset.opts[:limit].should == i
end
specify "should return an array with the records if argument is greater than 1" do
i = rand(10) + 10
r = @d.first(i)
r.should be_a_kind_of(Array)
r.size.should == i
r.each {|row| row.should == {:a => 1, :b => 2}}
end
end
context "Dataset#last" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
def self.last_dataset
@@last_dataset
end
def single_record(opts = nil)
@@last_dataset = clone_merge(opts) if opts
{:a => 1, :b => 2}
end
def all
@@last_dataset = self
[{:a => 1, :b => 2}] * @opts[:limit]
end
end
@d = @c.new(nil).from(:test)
end
specify "should raise if no order is given" do
proc {@d.last}.should raise_error(Sequel::Error)
proc {@d.last(2)}.should raise_error(Sequel::Error)
proc {@d.order(:a).last}.should_not raise_error
proc {@d.order(:a).last(2)}.should_not raise_error
end
specify "should invert the order" do
@d.order(:a).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:a.DESC])
@d.order(:b.DESC).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal(:b)
@d.order(:c, :d).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:c.DESC, :d.DESC])
@d.order(:e.DESC, :f).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:e, :f.DESC])
end
specify "should return the first matching record if a hash is specified" do
@d.order(:a).last(:z => 26).should == {:a => 1, :b => 2}
@c.last_dataset.opts[:where].should == ('(z = 26)')
@d.order(:a).last('z = ?', 15)
@c.last_dataset.opts[:where].should == ('z = 15')
end
specify "should return a single record if no argument is given" do
@d.order(:a).last.should == {:a => 1, :b => 2}
end
specify "should set the limit according to the given number" do
i = rand(10) + 10
r = @d.order(:a).last(i)
@c.last_dataset.opts[:limit].should == i
end
specify "should return an array with the records if argument is greater than 1" do
i = rand(10) + 10
r = @d.order(:a).last(i)
r.should be_a_kind_of(Array)
r.size.should == i
r.each {|row| row.should == {:a => 1, :b => 2}}
end
end
context "Dataset set operations" do
setup do
@a = Sequel::Dataset.new(nil).from(:a).filter(:z => 1)
@b = Sequel::Dataset.new(nil).from(:b).filter(:z => 2)
end
specify "should support UNION and UNION ALL" do
@a.union(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM b WHERE (z = 2)"
@b.union(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) UNION ALL SELECT * FROM a WHERE (z = 1)"
end
specify "should support INTERSECT and INTERSECT ALL" do
@a.intersect(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) INTERSECT SELECT * FROM b WHERE (z = 2)"
@b.intersect(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) INTERSECT ALL SELECT * FROM a WHERE (z = 1)"
end
specify "should support EXCEPT and EXCEPT ALL" do
@a.except(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM b WHERE (z = 2)"
@b.except(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) EXCEPT ALL SELECT * FROM a WHERE (z = 1)"
end
end
context "Dataset#[]" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
def self.last_dataset
@@last_dataset
end
def single_record(opts = nil)
@@last_dataset = opts ? clone_merge(opts) : self
{1 => 2, 3 => 4}
end
end
@d = @c.new(nil).from(:test)
end
specify "should return a single record filtered according to the given conditions" do
@d[:name => 'didi'].should == {1 => 2, 3 => 4}
@c.last_dataset.opts[:where].should == "(name = 'didi')"
@d[:id => 5..45].should == {1 => 2, 3 => 4}
@c.last_dataset.opts[:where].should == "(id >= 5 AND id <= 45)"
end
end
context "Dataset#single_record" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield sql
end
end
@cc = Class.new(@c) do
def fetch_rows(sql); end
end
@d = @c.new(nil).from(:test)
@e = @cc.new(nil).from(:test)
end
specify "should call each and return the first record" do
@d.single_record.should == 'SELECT * FROM test'
end
specify "should pass opts to each" do
@d.single_record(:limit => 3).should == 'SELECT * FROM test LIMIT 3'
end
specify "should return nil if no record is present" do
@e.single_record.should be_nil
end
end
context "Dataset#single_value" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield({1 => sql})
end
end
@cc = Class.new(@c) do
def fetch_rows(sql); end
end
@d = @c.new(nil).from(:test)
@e = @cc.new(nil).from(:test)
end
specify "should call each and return the first value of the first record" do
@d.single_value.should == 'SELECT * FROM test'
end
specify "should pass opts to each" do
@d.single_value(:limit => 3).should == 'SELECT * FROM test LIMIT 3'
end
specify "should return nil" do
@e.single_value.should be_nil
end
end
context "Dataset#set_row_proc" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
# yield a hash with kind as the 1 bit of a number
(1..10).each {|i| block.call({:kind => i[0]})}
end
end
@dataset = @c.new(nil).from(:items)
end
specify "should cause dataset to pass all rows through the filter" do
@dataset.set_row_proc {|h| h[:der] = h[:kind] + 2; h}
rows = @dataset.all
rows.size.should == 10
rows.each {|r| r[:der].should == (r[:kind] + 2)}
end
specify "should be copied over when dataset is cloned" do
@dataset.set_row_proc {|h| h[:der] = h[:kind] + 2; h}
@dataset.filter(:a => 1).first.should == {:kind => 1, :der => 3}
end
end
context "Dataset#set_model" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
# yield a hash with kind as the 1 bit of a number
(1..10).each {|i| block.call({:kind => i[0]})}
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c, :args
def initialize(c, *args); @c = c; @args = args; end
def ==(o); (@c == o.c) && (@args = o.args); end
end
end
specify "should clear the models hash and restore the stock #each if nil is specified" do
@dataset.set_model(@m)
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
@dataset.model_classes.should be_nil
end
specify "should clear the models hash and restore the stock #each if nothing is specified" do
@dataset.set_model(@m)
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
@dataset.model_classes.should be_nil
end
specify "should alter #each to provide model instances" do
@dataset.first.should == {:kind => 1}
@dataset.set_model(@m)
@dataset.first.should == @m.new({:kind => 1})
end
specify "should extend the dataset with a #destroy method" do
@dataset.should_not respond_to(:destroy)
@dataset.set_model(@m)
@dataset.should respond_to(:destroy)
end
specify "should set opts[:naked] to nil" do
@dataset.opts[:naked] = true
@dataset.set_model(@m)
@dataset.opts[:naked].should be_nil
end
specify "should send additional arguments to the models' initialize method" do
@dataset.set_model(@m, 7, 6, 5)
@dataset.first.should == @m.new({:kind => 1}, 7, 6, 5)
end
specify "should provide support for polymorphic model instantiation" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:kind, 0 => @m1, 1 => @m2)
@dataset.opts[:polymorphic_key].should == :kind
all = @dataset.all
all[0].class.should == @m2
all[1].class.should == @m1
all[2].class.should == @m2
all[3].class.should == @m1
#...
# denude model
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
end
specify "should send additional arguments for polymorphic models as well" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:kind, {0 => @m1, 1 => @m2}, :hey => :wow)
all = @dataset.all
all[0].class.should == @m2; all[0].args.should == [{:hey => :wow}]
all[1].class.should == @m1; all[1].args.should == [{:hey => :wow}]
all[2].class.should == @m2; all[2].args.should == [{:hey => :wow}]
all[3].class.should == @m1; all[3].args.should == [{:hey => :wow}]
end
specify "should raise for invalid parameters" do
proc {@dataset.set_model('kind')}.should raise_error(ArgumentError)
proc {@dataset.set_model(0)}.should raise_error(ArgumentError)
proc {@dataset.set_model(:kind)}.should raise_error(ArgumentError) # no hash given
end
end
context "Dataset#model_classes" do
setup do
@c = Class.new(Sequel::Dataset) do
# # We don't need that for now
# def fetch_rows(sql, &block)
# (1..10).each(&block)
# end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should return nil for a naked dataset" do
@dataset.model_classes.should == nil
end
specify "should return a {nil => model_class} hash for a model dataset" do
@dataset.set_model(@m)
@dataset.model_classes.should == {nil => @m}
end
specify "should return the polymorphic hash for a polymorphic model dataset" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:key, 0 => @m1, 1 => @m2)
@dataset.model_classes.should == {0 => @m1, 1 => @m2}
end
end
context "Dataset#polymorphic_key" do
setup do
@c = Class.new(Sequel::Dataset) do
# # We don't need this for now
# def fetch_rows(sql, &block)
# (1..10).each(&block)
# end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should return nil for a naked dataset" do
@dataset.polymorphic_key.should be_nil
end
specify "should return the polymorphic key" do
@dataset.set_model(:id, nil => @m)
@dataset.polymorphic_key.should == :id
end
end
context "A model dataset" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(1..10).each(&block)
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
@dataset.set_model(@m)
end
specify "should supply naked records if the naked option is specified" do
@dataset.each {|r| r.class.should == @m}
@dataset.each(:naked => true) {|r| r.class.should == Fixnum}
end
end
context "A polymorphic model dataset" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(1..10).each {|i| block.call(:bit => i[0])}
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should use a nil key in the polymorphic hash to specify the default model class" do
@m2 = Class.new(@m)
@dataset.set_model(:bit, nil => @m, 1 => @m2)
all = @dataset.all
all[0].class.should == @m2
all[1].class.should == @m
all[2].class.should == @m2
all[3].class.should == @m
#...
end
specify "should raise Sequel::Error if no suitable class is found in the polymorphic hash" do
@m2 = Class.new(@m)
@dataset.set_model(:bit, 1 => @m2)
proc {@dataset.all}.should raise_error(Sequel::Error)
end
specify "should supply naked records if the naked option is specified" do
@dataset.set_model(:bit, nil => @m)
@dataset.each(:naked => true) {|r| r.class.should == Hash}
end
end
context "A dataset with associated model class(es)" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
block.call({:x => 1, :y => 2})
end
end
@dataset = @c.new(nil).from(:items)
@m1 = Class.new do
attr_accessor :v
def initialize(v); @v = v; end
end
@m2 = Class.new do
attr_accessor :v, :vv
def initialize(v = nil); @v = v; end
def self.load(v); o = new(nil); o.vv = v; o; end
end
@m3 = Class.new(@m2)
end
specify "should instantiate an instance by passing the record hash as argument" do
@dataset.set_model(@m1)
o = @dataset.first
o.class.should == @m1
o.v.should == {:x => 1, :y => 2}
end
specify "should use the .load constructor if available" do
@dataset.set_model(@m2)
o = @dataset.first
o.class.should == @m2
o.v.should == nil
o.vv.should == {:x => 1, :y => 2}
end
specify "should use the .load constructor also for polymorphic datasets" do
@dataset.set_model(:y, 1 => @m2, 2 => @m3)
o = @dataset.first
o.class.should == @m3
o.v.should == nil
o.vv.should == {:x => 1, :y => 2}
end
end
context "Dataset#destroy" do
setup do
db = Object.new
m = Module.new do
def transaction; yield; end
end
db.extend(m)
$DESTROYED = []
@m = Class.new do
def initialize(c)
@c = c
end
attr_accessor :c
def ==(o)
@c == o.c
end
def destroy
$DESTROYED << self
end
end
$MODELS = [@m.new(12), @m.new(13)]
c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(12..13).each(&block)
end
end
@d = c.new(db).from(:test)
@d.set_model(@m)
end
specify "should call destroy for every model instance in the dataset" do
count = @d.destroy
count.should == 2
$DESTROYED.should == $MODELS
end
specify "should raise error if no models are associated with the dataset" do
proc {@d.naked.destroy}.should raise_error(Sequel::Error)
end
end
context "Dataset#<<" do
setup do
@d = Sequel::Dataset.new(nil)
@d.meta_def(:insert) do
1234567890
end
end
specify "should call #insert" do
(@d << {:name => 1}).should == 1234567890
end
end
context "A paginated dataset" do
setup do
@d = Sequel::Dataset.new(nil)
@d.meta_def(:count) {153}
@paginated = @d.paginate(1, 20)
end
specify "should set the limit and offset options correctly" do
@paginated.opts[:limit].should == 20
@paginated.opts[:offset].should == 0
end
specify "should set the page count correctly" do
@paginated.page_count.should == 8
@d.paginate(1, 50).page_count.should == 4
end
specify "should set the current page number correctly" do
@paginated.current_page.should == 1
@d.paginate(3, 50).current_page.should == 3
end
specify "should return the next page number or nil if we're on the last" do
@paginated.next_page.should == 2
@d.paginate(4, 50).next_page.should be_nil
end
specify "should return the previous page number or nil if we're on the last" do
@paginated.prev_page.should be_nil
@d.paginate(4, 50).prev_page.should == 3
end
specify "should return the page range" do
@paginated.page_range.should == (1..8)
@d.paginate(4, 50).page_range.should == (1..4)
end
specify "should return the record range for the current page" do
@paginated.current_page_record_range.should == (1..20)
@d.paginate(4, 50).current_page_record_range.should == (151..153)
@d.paginate(5, 50).current_page_record_range.should == (0..0)
end
specify "should return the record count for the current page" do
@paginated.current_page_record_count.should == 20
@d.paginate(3, 50).current_page_record_count.should == 50
@d.paginate(4, 50).current_page_record_count.should == 3
@d.paginate(5, 50).current_page_record_count.should == 0
end
specify "should work with fixed sql" do
ds = @d.clone_merge(:sql => 'select * from blah')
ds.meta_def(:count) {150}
ds.paginate(2, 50).sql.should == 'SELECT * FROM (select * from blah) t1 LIMIT 50 OFFSET 50'
end
end
context "Dataset#columns" do
setup do
@dataset = DummyDataset.new(nil).from(:items)
@dataset.meta_def(:columns=) {|c| @columns = c}
@dataset.meta_def(:first) {@columns = select_sql(nil)}
end
specify "should return the value of @columns" do
@dataset.columns = [:a, :b, :c]
@dataset.columns.should == [:a, :b, :c]
end
specify "should call first if @columns is nil" do
@dataset.columns = nil
@dataset.columns.should == 'SELECT * FROM items'
@dataset.opts[:from] = [:nana]
@dataset.columns.should == 'SELECT * FROM items'
end
end
require 'stringio'
context "Dataset#print" do
setup do
@output = StringIO.new
@orig_stdout = $stdout
$stdout = @output
@dataset = DummyDataset.new(nil).from(:items)
end
teardown do
$stdout = @orig_stdout
end
specify "should print out a table with the values" do
@dataset.print(:a, :b)
@output.rewind
@output.read.should == \
"+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n"
end
specify "should default to the dataset's columns" do
@dataset.meta_def(:columns) {[:a, :b]}
@dataset.print
@output.rewind
@output.read.should == \
"+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n"
end
end
context "Dataset#multi_insert" do
setup do
@dbc = Class.new do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
def transaction
@sqls ||= []
@sqls << 'BEGIN'
yield
@sqls << 'COMMIT'
end
end
@db = @dbc.new
@ds = Sequel::Dataset.new(@db).from(:items)
@list = [{:name => 'abc'}, {:name => 'def'}, {:name => 'ghi'}]
end
specify "should join all inserts into a single SQL string" do
@ds.multi_insert(@list)
@db.sqls.should == [
'BEGIN',
"INSERT INTO items (name) VALUES ('abc')",
"INSERT INTO items (name) VALUES ('def')",
"INSERT INTO items (name) VALUES ('ghi')",
'COMMIT'
]
end
specify "should accept the commit_every option for committing every x records" do
@ds.multi_insert(@list, :commit_every => 2)
@db.sqls.should == [
'BEGIN',
"INSERT INTO items (name) VALUES ('abc')",
"INSERT INTO items (name) VALUES ('def')",
'COMMIT',
'BEGIN',
"INSERT INTO items (name) VALUES ('ghi')",
'COMMIT'
]
end
end
context "Dataset#query" do
setup do
@d = Sequel::Dataset.new(nil)
end
specify "should support #from" do
q = @d.query {from :xxx}
q.class.should == @d.class
q.sql.should == "SELECT * FROM xxx"
end
specify "should support #select" do
q = @d.query do
select :a, :b___mongo
from :yyy
end
q.class.should == @d.class
q.sql.should == "SELECT a, b AS mongo FROM yyy"
end
specify "should support #where" do
q = @d.query do
from :zzz
where {:x + 2 > :y + 3}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE ((x + 2) > (y + 3))"
q = @d.from(:zzz).query do
where {:x > 1 && :y > 2}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE ((x > 1) AND (y > 2))"
q = @d.from(:zzz).query do
where :x => 33
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE (x = 33)"
end
specify "should support #group_by and #having" do
q = @d.query do
from :abc
group_by :id
having {:x >= 2}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM abc GROUP BY id HAVING (x >= 2)"
end
specify "should support #order, #order_by" do
q = @d.query do
from :xyz
order_by :stamp
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM xyz ORDER BY stamp"
end
specify "should raise on non-chainable method calls" do
proc {@d.query {count}}.should raise_error(Sequel::Error)
end
specify "should raise on each, insert, update, delete" do
proc {@d.query {each}}.should raise_error(Sequel::Error)
proc {@d.query {insert(:x => 1)}}.should raise_error(Sequel::Error)
proc {@d.query {update(:x => 1)}}.should raise_error(Sequel::Error)
proc {@d.query {delete}}.should raise_error(Sequel::Error)
end
end
context "Dataset" do
setup do
@d = Sequel::Dataset.new(nil).from(:x)
end
specify "should support self-changing select!" do
@d.select!(:y)
@d.sql.should == "SELECT y FROM x"
end
specify "should support self-changing from!" do
@d.from!(:y)
@d.sql.should == "SELECT * FROM y"
end
specify "should support self-changing order!" do
@d.order!(:y)
@d.sql.should == "SELECT * FROM x ORDER BY y"
end
specify "should support self-changing filter!" do
@d.filter!(:y => 1)
@d.sql.should == "SELECT * FROM x WHERE (y = 1)"
end
specify "should support self-changing filter! with block" do
@d.filter! {:y == 2}
@d.sql.should == "SELECT * FROM x WHERE (y = 2)"
end
specify "should raise for ! methods that don't return a dataset" do
proc {@d.opts!}.should raise_error(NameError)
end
specify "should raise for missing methods" do
proc {@d.xuyz}.should raise_error(NameError)
proc {@d.xyz!}.should raise_error(NameError)
proc {@d.xyz?}.should raise_error(NameError)
end
specify "should support chaining of bang methods" do
@d.order!(:y)
@d.filter!(:y => 1)
@d.sql.should == "SELECT * FROM x WHERE (y = 1) ORDER BY y"
end
end
context "Dataset#transform" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :raw
attr_accessor :sql
def fetch_rows(sql, &block)
block[@raw]
end
def insert(v)
@sql = insert_sql(v)
end
def update(v)
@sql = update_sql(v)
end
end
@ds = @c.new(nil).from(:items)
@ds.transform(:x => [
proc {|v| Marshal.load(v)},
proc {|v| Marshal.dump(v)}
])
end
specify "should change the dataset to transform values loaded from the database" do
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.all.should == [{:x => [1, 2, 3], :y => 'hello'}]
end
specify "should change the dataset to transform values saved to the database" do
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{Marshal.dump(['dream'])}'"
end
specify "should be transferred to cloned datasets" do
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
end
specify "should work correctly together with set_row_proc" do
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
end
context "Dataset#transform" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :raw
attr_accessor :sql
def fetch_rows(sql, &block)
block[@raw]
end
def insert(v)
@sql = insert_sql(v)
end
def update(v)
@sql = update_sql(v)
end
end
@ds = @c.new(nil).from(:items)
end
specify "should raise Sequel::Error for invalid transformations" do
proc {@ds.transform(:x => 'mau')}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => :mau)}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => [])}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => ['mau'])}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => [proc {|v|}, proc {|v|}])}.should_not raise_error(Sequel::Error::InvalidTransform)
end
specify "should support stock YAML transformation" do
@ds.transform(:x => :yaml)
@ds.raw = {:x => [1, 2, 3].to_yaml, :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{:toast.to_yaml}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{['dream'].to_yaml}'"
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => [1, 2, 3].to_yaml, :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{:toast.to_yaml}')"
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => "wow".to_yaml, :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => "wow".to_yaml, :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
specify "should support stock Marshal transformation" do
@ds.transform(:x => :marshal)
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{Marshal.dump(['dream'])}'"
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
specify "should return self" do
@ds.transform(:x => :marshal).should be(@ds)
end
end
context "Dataset#to_csv" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :data
attr_accessor :cols
def fetch_rows(sql, &block)
@columns = @cols
@data.each {|r| r.keys = @columns; block[r]}
end
# naked should return self here because to_csv wants a naked result set.
def naked
self
end
end
@ds = @c.new(nil).from(:items)
@ds.cols = [:a, :b, :c]
@ds.data = [
[1, 2, 3], [4, 5, 6], [7, 8, 9]
]
end
specify "should format a CSV representation of the records" do
@ds.to_csv.should ==
"a, b, c\r\n1, 2, 3\r\n4, 5, 6\r\n7, 8, 9\r\n"
end
specify "should exclude column titles if so specified" do
@ds.to_csv(false).should ==
"1, 2, 3\r\n4, 5, 6\r\n7, 8, 9\r\n"
end
end
context "Dataset#each_hash" do
setup do
@c = Class.new(Sequel::Dataset) do
def each(&block)
a = [[1, 2, 3], [4, 5, 6]]
a.each {|r| r.keys = [:a, :b, :c]; block[r]}
end
end
@ds = @c.new(nil).from(:items)
end
specify "should yield records converted to hashes" do
hashes = []
@ds.each_hash {|h| hashes << h}
hashes.should == [{:a => 1, :b => 2, :c => 3}, {:a => 4, :b => 5, :c => 6}]
end
end
context "Dataset magic methods" do
setup do
@c = Class.new(Sequel::Dataset) do
@@sqls = []
def self.sqls; @@sqls; end
def fetch_rows(sql)
@@sqls << sql
yield({:a => 1, :b => 2})
end
end
@ds = @c.new(nil).from(:items)
end
specify "should support order_by_xxx" do
@ds.should_not respond_to(:order_by_name)
proc {@ds.order_by_name}.should_not raise_error
@ds.should respond_to(:order_by_name)
@ds.order_by_name.should be_a_kind_of(@c)
@ds.order_by_name.sql.should == "SELECT * FROM items ORDER BY name"
end
specify "should support group_by_xxx" do
@ds.should_not respond_to(:group_by_name)
proc {@ds.group_by_name}.should_not raise_error
@ds.should respond_to(:group_by_name)
@ds.group_by_name.should be_a_kind_of(@c)
@ds.group_by_name.sql.should == "SELECT * FROM items GROUP BY name"
end
specify "should support count_by_xxx" do
@ds.should_not respond_to(:count_by_name)
proc {@ds.count_by_name}.should_not raise_error
@ds.should respond_to(:count_by_name)
@ds.count_by_name.should be_a_kind_of(@c)
@ds.count_by_name.sql.should == "SELECT name, count(*) AS count FROM items GROUP BY name ORDER BY count"
end
specify "should support filter_by_xxx" do
@ds.should_not respond_to(:filter_by_name)
proc {@ds.filter_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:filter_by_name)
@ds.filter_by_name('sharon').should be_a_kind_of(@c)
@ds.filter_by_name('sharon').sql.should == "SELECT * FROM items WHERE (name = 'sharon')"
end
specify "should support all_by_xxx" do
@ds.should_not respond_to(:all_by_name)
proc {@ds.all_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:all_by_name)
@ds.all_by_name('sharon').should == [{:a => 1, :b => 2}]
@c.sqls.should == ["SELECT * FROM items WHERE (name = 'sharon')"] * 2
end
specify "should support find_by_xxx" do
@ds.should_not respond_to(:find_by_name)
proc {@ds.find_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:find_by_name)
@ds.find_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items WHERE (name = 'sharon') LIMIT 1"] * 2
end
specify "should support first_by_xxx" do
@ds.should_not respond_to(:first_by_name)
proc {@ds.first_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:first_by_name)
@ds.first_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items ORDER BY name LIMIT 1"] * 2
end
specify "should support last_by_xxx" do
@ds.should_not respond_to(:last_by_name)
proc {@ds.last_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:last_by_name)
@ds.last_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items ORDER BY name DESC LIMIT 1"] * 2
end
end
context "Dataset#create_view" do
setup do
@dbc = Class.new(Sequel::Database) do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
end
@db = @dbc.new
@ds = @db[:items].order(:abc).filter(:category => 'ruby')
end
specify "should create a view with the dataset's sql" do
@ds.create_view(:xyz)
@db.sqls.should == ["CREATE VIEW xyz AS #{@ds.sql}"]
end
end
context "Dataset#create_or_replace_view" do
setup do
@dbc = Class.new(Sequel::Database) do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
end
@db = @dbc.new
@ds = @db[:items].order(:abc).filter(:category => 'ruby')
end
specify "should create a view with the dataset's sql" do
@ds.create_or_replace_view(:xyz)
@db.sqls.should == ["CREATE OR REPLACE VIEW xyz AS #{@ds.sql}"]
end
end
context "Dataset#update_sql" do
setup do
@ds = Sequel::Dataset.new(nil).from(:items)
end
specify "should accept strings" do
@ds.update_sql("a = b").should == "UPDATE items SET a = b"
end
specify "should accept hash with string keys" do
@ds.update_sql('c' => 'd').should == "UPDATE items SET c = 'd'"
end
specify "should accept array subscript references" do
@ds.update_sql((:day|1) => 'd').should == "UPDATE items SET day[1] = 'd'"
end
end
class DummyMummyDataset < Sequel::Dataset
def first
raise if @opts[:from] == [:a]
true
end
end
class DummyMummyDatabase < Sequel::Database
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
def transaction; yield; end
def dataset
DummyMummyDataset.new(self)
end
end
context "Dataset#table_exists?" do
setup do
@db = DummyMummyDatabase.new
@db.stub!(:tables).and_return([:a, :b])
@db2 = DummyMummyDatabase.new
end
specify "should use Database#tables if available" do
@db[:a].table_exists?.should be_true
@db[:b].table_exists?.should be_true
@db[:c].table_exists?.should be_false
end
specify "should otherwise try to select the first record from the table's dataset" do
@db2[:a].table_exists?.should be_false
@db2[:b].table_exists?.should be_true
end
specify "should raise Sequel::Error if dataset references more than one table" do
proc {@db.from(:a, :b).table_exists?}.should raise_error(Sequel::Error)
end
specify "should raise Sequel::Error if dataset is from a subquery" do
proc {@db.from(@db[:a]).table_exists?}.should raise_error(Sequel::Error)
end
specify "should raise Sequel::Error if dataset has fixed sql" do
proc {@db['select * from blah'].table_exists?}.should raise_error(Sequel::Error)
end
end
Fixed dataset spec for perfect coverage.
require File.join(File.dirname(__FILE__), "spec_helper")
context "Dataset" do
setup do
@dataset = Sequel::Dataset.new("db")
end
specify "should accept database and opts in initialize" do
db = "db"
opts = {:from => :test}
d = Sequel::Dataset.new(db, opts)
d.db.should be(db)
d.opts.should be(opts)
d = Sequel::Dataset.new(db)
d.db.should be(db)
d.opts.should be_a_kind_of(Hash)
d.opts.should == {}
end
specify "should provide clone_merge for chainability." do
d1 = @dataset.clone_merge(:from => :test)
d1.class.should == @dataset.class
d1.should_not == @dataset
d1.db.should be(@dataset.db)
d1.opts[:from].should == :test
@dataset.opts[:from].should be_nil
d2 = d1.clone_merge(:order => :name)
d2.class.should == @dataset.class
d2.should_not == d1
d2.should_not == @dataset
d2.db.should be(@dataset.db)
d2.opts[:from].should == :test
d2.opts[:order].should == :name
d1.opts[:order].should be_nil
end
specify "should include Enumerable" do
Sequel::Dataset.included_modules.should include(Enumerable)
end
specify "should raise ImplementedError for the dataset interface methods" do
proc {@dataset.fetch_rows('abc')}.should raise_error(NotImplementedError)
proc {@dataset.insert(1, 2, 3)}.should raise_error(NotImplementedError)
proc {@dataset.update(:name => 'abc')}.should raise_error(NotImplementedError)
proc {@dataset.delete}.should raise_error(NotImplementedError)
end
end
context "Dataset#clone_merge" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:items)
end
specify "should return a clone self" do
clone = @dataset.clone_merge({})
clone.class.should == @dataset.class
clone.db.should == @dataset.db
clone.opts.should == @dataset.opts
end
specify "should merge the specified options" do
clone = @dataset.clone_merge(1 => 2)
clone.opts.should == {1 => 2, :from => [:items]}
end
specify "should overwrite existing options" do
clone = @dataset.clone_merge(:from => [:other])
clone.opts.should == {:from => [:other]}
end
specify "should create a clone with a deep copy of options" do
clone = @dataset.clone_merge(:from => [:other])
@dataset.opts[:from].should == [:items]
clone.opts[:from].should == [:other]
end
specify "should return an object with the same modules included" do
m = Module.new do
def __xyz__; "xyz"; end
end
@dataset.extend(m)
@dataset.clone_merge({}).should respond_to(:__xyz__)
end
end
context "A simple dataset" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should format a select statement" do
@dataset.select_sql.should == 'SELECT * FROM test'
end
specify "should format a delete statement" do
@dataset.delete_sql.should == 'DELETE FROM test'
end
specify "should format an insert statement with default values" do
@dataset.insert_sql.should == 'INSERT INTO test DEFAULT VALUES'
end
specify "should format an insert statement with hash" do
@dataset.insert_sql(:name => 'wxyz', :price => 342).
should match(/INSERT INTO test \(name, price\) VALUES \('wxyz', 342\)|INSERT INTO test \(price, name\) VALUES \(342, 'wxyz'\)/)
@dataset.insert_sql({}).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with array with keys" do
v = [1, 2, 3]
v.keys = [:a, :b, :c]
@dataset.insert_sql(v).should == "INSERT INTO test (a, b, c) VALUES (1, 2, 3)"
v = []
v.keys = [:a, :b]
@dataset.insert_sql(v).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with string keys" do
@dataset.insert_sql('name' => 'wxyz', 'price' => 342).
should match(/INSERT INTO test \(name, price\) VALUES \('wxyz', 342\)|INSERT INTO test \(price, name\) VALUES \(342, 'wxyz'\)/)
end
specify "should format an insert statement with a model instance" do
dbb = Sequel::Database.new
@c = Class.new(Sequel::Model) do
attr_accessor :values
end
v = @c.new; v.values = {:a => 1}
@dataset.insert_sql(v).should == "INSERT INTO test (a) VALUES (1)"
v = @c.new; v.values = {}
@dataset.insert_sql(v).should == "INSERT INTO test DEFAULT VALUES"
end
specify "should format an insert statement with an arbitrary value" do
@dataset.insert_sql(123).should == "INSERT INTO test VALUES (123)"
end
specify "should format an insert statement with sub-query" do
@sub = Sequel::Dataset.new(nil).from(:something).filter(:x => 2)
@dataset.insert_sql(@sub).should == \
"INSERT INTO test (SELECT * FROM something WHERE (x = 2))"
end
specify "should format an insert statement with array" do
@dataset.insert_sql('a', 2, 6.5).should ==
"INSERT INTO test VALUES ('a', 2, 6.5)"
end
specify "should format an update statement" do
@dataset.update_sql(:name => 'abc').should ==
"UPDATE test SET name = 'abc'"
@dataset.update_sql {:x << :y}.should ==
"UPDATE test SET x = y"
end
specify "should format an update statement with array with keys" do
v = ['abc']
v.keys = [:name]
@dataset.update_sql(v).should == "UPDATE test SET name = 'abc'"
end
specify "should be able to return rows for arbitrary SQL" do
@dataset.select_sql(:sql => 'xxx yyy zzz').should ==
"xxx yyy zzz"
end
end
context "A dataset with multiple tables in its FROM clause" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:t1, :t2)
end
specify "should raise on #update_sql" do
proc {@dataset.update_sql(:a=>1)}.should raise_error
end
specify "should raise on #delete_sql" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should generate a select query FROM all specified tables" do
@dataset.select_sql.should == "SELECT * FROM t1, t2"
end
end
context "Dataset#where" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:region => 'Asia')
@d2 = @dataset.where('(region = ?)', 'Asia')
@d3 = @dataset.where("(a = 1)")
end
specify "should work with hashes" do
@dataset.where(:name => 'xyz', :price => 342).select_sql.
should match(/WHERE \(name = 'xyz'\) AND \(price = 342\)|WHERE \(price = 342\) AND \(name = 'xyz'\)/)
end
specify "should work with arrays (ala ActiveRecord)" do
@dataset.where('price < ? AND id in (?)', 100, [1, 2, 3]).select_sql.should ==
"SELECT * FROM test WHERE price < 100 AND id in (1, 2, 3)"
end
specify "should work with strings (custom SQL expressions)" do
@dataset.where('(a = 1 AND b = 2)').select_sql.should ==
"SELECT * FROM test WHERE (a = 1 AND b = 2)"
end
specify "should affect select, delete and update statements" do
@d1.select_sql.should == "SELECT * FROM test WHERE (region = 'Asia')"
@d1.delete_sql.should == "DELETE FROM test WHERE (region = 'Asia')"
@d1.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (region = 'Asia')"
@d2.select_sql.should == "SELECT * FROM test WHERE (region = 'Asia')"
@d2.delete_sql.should == "DELETE FROM test WHERE (region = 'Asia')"
@d2.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (region = 'Asia')"
@d3.select_sql.should == "SELECT * FROM test WHERE (a = 1)"
@d3.delete_sql.should == "DELETE FROM test WHERE (a = 1)"
@d3.update_sql(:GDP => 0).should == "UPDATE test SET GDP = 0 WHERE (a = 1)"
end
specify "should be composable using AND operator (for scoping)" do
# hashes are merged, no problem
@d1.where(:size => 'big').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (size = 'big')"
# hash and string
@d1.where('population > 1000').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (population > 1000)"
@d1.where('(a > 1) OR (b < 2)').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND ((a > 1) OR (b < 2))"
# hash and array
@d1.where('(GDP > ?)', 1000).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > 1000)"
# array and array
@d2.where('(GDP > ?)', 1000).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > 1000)"
# array and hash
@d2.where(:name => ['Japan', 'China']).select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (name IN ('Japan', 'China'))"
# array and string
@d2.where('GDP > ?').select_sql.should ==
"SELECT * FROM test WHERE (region = 'Asia') AND (GDP > ?)"
# string and string
@d3.where('b = 2').select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (b = 2)"
# string and hash
@d3.where(:c => 3).select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (c = 3)"
# string and array
@d3.where('(d = ?)', 4).select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (d = 4)"
# string and proc expr
@d3.where {:e < 5}.select_sql.should ==
"SELECT * FROM test WHERE (a = 1) AND (e < 5)"
end
specify "should raise if the dataset is grouped" do
proc {@dataset.group(:t).where(:a => 1)}.should_not raise_error
@dataset.group(:t).where(:a => 1).sql.should ==
"SELECT * FROM test WHERE (a = 1) GROUP BY t"
end
specify "should accept ranges" do
@dataset.filter(:id => 4..7).sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter(:id => 4...7).sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id < 7)'
@dataset.filter {:id == (4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter {:id.in?(4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter(:table__id => 4..7).sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
@dataset.filter(:table__id => 4...7).sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id < 7)'
@dataset.filter {:table__id == (4..7)}.sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
@dataset.filter {:table__id.in?(4..7)}.sql.should ==
'SELECT * FROM test WHERE (table.id >= 4 AND table.id <= 7)'
end
specify "should accept nil" do
@dataset.filter(:owner_id => nil).sql.should ==
'SELECT * FROM test WHERE (owner_id IS NULL)'
@dataset.filter{:owner_id.nil?}.sql.should ==
'SELECT * FROM test WHERE (owner_id IS NULL)'
end
specify "should accept a subquery" do
# select all countries that have GDP greater than the average for Asia
@dataset.filter('gdp > ?', @d1.select(:gdp.AVG)).sql.should ==
"SELECT * FROM test WHERE gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia'))"
@dataset.filter(:id => @d1.select(:id)).sql.should ==
"SELECT * FROM test WHERE (id IN (SELECT id FROM test WHERE (region = 'Asia')))"
end
specify "should accept a subquery for an EXISTS clause" do
a = @dataset.filter {:price < 100}
@dataset.filter(a.exists).sql.should ==
'SELECT * FROM test WHERE EXISTS (SELECT 1 FROM test WHERE (price < 100))'
end
specify "should accept proc expressions" do
d = @d1.select(:gdp.AVG)
@dataset.filter {:gdp > d}.sql.should ==
"SELECT * FROM test WHERE (gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia')))"
@dataset.filter {:id.in(4..7)}.sql.should ==
'SELECT * FROM test WHERE (id >= 4 AND id <= 7)'
@dataset.filter {:c == 3}.sql.should ==
'SELECT * FROM test WHERE (c = 3)'
@dataset.filter {:id == :items__id}.sql.should ==
'SELECT * FROM test WHERE (id = items.id)'
@dataset.filter {:a < 1}.sql.should ==
'SELECT * FROM test WHERE (a < 1)'
@dataset.filter {:a != 1}.sql.should ==
'SELECT * FROM test WHERE (NOT (a = 1))'
@dataset.filter {:a >= 1 && :b <= 2}.sql.should ==
'SELECT * FROM test WHERE ((a >= 1) AND (b <= 2))'
@dataset.filter {:c.like 'ABC%'}.sql.should ==
"SELECT * FROM test WHERE (c LIKE 'ABC%')"
@dataset.filter {:c.like? 'ABC%'}.sql.should ==
"SELECT * FROM test WHERE (c LIKE 'ABC%')"
end
specify "should raise if receiving a single boolean value" do
# the result of erroneous use of comparison not in a block
# so instead of filter{:x == y} someone writes filter(:x == y)
proc {@dataset.filter(:a == 1)}.should raise_error(Sequel::Error::InvalidFilter)
proc {@dataset.filter(:a != 1)}.should raise_error(Sequel::Error::InvalidFilter)
end
specify "should work for grouped datasets" do
@dataset.group(:a).filter(:b => 1).sql.should ==
'SELECT * FROM test WHERE (b = 1) GROUP BY a'
end
end
context "Dataset#or" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:x => 1)
end
specify "should raise if no filter exists" do
proc {@dataset.or(:a => 1)}.should raise_error(Sequel::Error)
end
specify "should add an alternative expression to the where clause" do
@d1.or(:y => 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (y = 2)'
end
specify "should accept all forms of filters" do
# probably not exhaustive, but good enough
@d1.or('(y > ?)', 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (y > 2)'
(@d1.or {:yy > 3}).sql.should ==
'SELECT * FROM test WHERE (x = 1) OR (yy > 3)'
end
specify "should correctly add parens to give predictable results" do
@d1.filter(:y => 2).or(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) AND (y = 2)) OR (z = 3)'
@d1.or(:y => 2).filter(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) OR (y = 2)) AND (z = 3)'
end
end
context "Dataset#and" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@d1 = @dataset.where(:x => 1)
end
specify "should raise if no filter exists" do
proc {@dataset.and(:a => 1)}.should raise_error(Sequel::Error)
proc {@dataset.where(:a => 1).group(:t).and(:b => 2)}.should_not raise_error(Sequel::Error)
@dataset.where(:a => 1).group(:t).and(:b => 2).sql ==
"SELECT * FROM test WHERE (a = 1) AND (b = 2) GROUP BY t"
end
specify "should add an alternative expression to the where clause" do
@d1.and(:y => 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (y = 2)'
end
specify "should accept all forms of filters" do
# probably not exhaustive, but good enough
@d1.and('(y > ?)', 2).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (y > 2)'
(@d1.and {:yy > 3}).sql.should ==
'SELECT * FROM test WHERE (x = 1) AND (yy > 3)'
end
specify "should correctly add parens to give predictable results" do
@d1.or(:y => 2).and(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) OR (y = 2)) AND (z = 3)'
@d1.and(:y => 2).or(:z => 3).sql.should ==
'SELECT * FROM test WHERE ((x = 1) AND (y = 2)) OR (z = 3)'
end
end
context "Dataset#exclude" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should correctly include the NOT operator when one condition is given" do
@dataset.exclude(:region=>'Asia').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia'))"
end
specify "should take multiple conditions as a hash and express the logic correctly in SQL" do
@dataset.exclude(:region => 'Asia', :name => 'Japan').select_sql.
should match(Regexp.union(/WHERE \(NOT \(\(region = 'Asia'\) AND \(name = 'Japan'\)\)\)/,
/WHERE \(NOT \(\(name = 'Japan'\) AND \(region = 'Asia'\)\)\)/))
end
specify "should parenthesize a single string condition correctly" do
@dataset.exclude("region = 'Asia' AND name = 'Japan'").select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia' AND name = 'Japan'))"
end
specify "should parenthesize an array condition correctly" do
@dataset.exclude('region = ? AND name = ?', 'Asia', 'Japan').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia' AND name = 'Japan'))"
end
specify "should correctly parenthesize when it is used twice" do
@dataset.exclude(:region => 'Asia').exclude(:name => 'Japan').select_sql.should ==
"SELECT * FROM test WHERE (NOT (region = 'Asia')) AND (NOT (name = 'Japan'))"
end
specify "should support proc expressions" do
@dataset.exclude {:id == (6...12)}.sql.should ==
'SELECT * FROM test WHERE (NOT ((id >= 6 AND id < 12)))'
end
end
context "Dataset#having" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
@grouped = @dataset.group(:region).select(:region, :population.SUM, :gdp.AVG)
@d1 = @grouped.having('sum(population) > 10')
@d2 = @grouped.having(:region => 'Asia')
@columns = "region, sum(population), avg(gdp)"
end
specify "should raise if the dataset is not grouped" do
proc {@dataset.having('avg(gdp) > 10')}.should raise_error
end
specify "should affect select statements" do
@d1.select_sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING sum(population) > 10"
end
specify "should support proc expressions" do
@grouped.having {:sum[:population] > 10}.sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING (sum(population) > 10)"
end
specify "should work with and on the having clause" do
@grouped.having{ :a > 1 }.and{ :b < 2 }.sql.should ==
"SELECT #{@columns} FROM test GROUP BY region HAVING (a > 1) AND (b < 2)"
end
end
context "a grouped dataset" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).group(:type_id)
end
specify "should raise when trying to generate an update statement" do
proc {@dataset.update_sql(:id => 0)}.should raise_error
end
specify "should raise when trying to generate a delete statement" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should specify the grouping in generated select statement" do
@dataset.select_sql.should ==
"SELECT * FROM test GROUP BY type_id"
end
specify "should format the right statement for counting (as a subquery)" do
db = MockDatabase.new
db[:test].select(:name).group(:name).count
db.sqls.should == ["SELECT COUNT(*) FROM (SELECT name FROM test GROUP BY name) t1"]
end
end
context "Dataset#group_by" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).group_by(:type_id)
end
specify "should raise when trying to generate an update statement" do
proc {@dataset.update_sql(:id => 0)}.should raise_error
end
specify "should raise when trying to generate a delete statement" do
proc {@dataset.delete_sql}.should raise_error
end
specify "should specify the grouping in generated select statement" do
@dataset.select_sql.should ==
"SELECT * FROM test GROUP BY type_id"
end
end
context "Dataset#literal" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should escape strings properly" do
@dataset.literal('abc').should == "'abc'"
@dataset.literal('a"x"bc').should == "'a\"x\"bc'"
@dataset.literal("a'bc").should == "'a''bc'"
@dataset.literal("a''bc").should == "'a''''bc'"
end
specify "should literalize numbers properly" do
@dataset.literal(1).should == "1"
@dataset.literal(1.5).should == "1.5"
end
specify "should literalize nil as NULL" do
@dataset.literal(nil).should == "NULL"
end
specify "should literalize an array properly" do
@dataset.literal([]).should == "NULL"
@dataset.literal([1, 'abc', 3]).should == "1, 'abc', 3"
@dataset.literal([1, "a'b''c", 3]).should == "1, 'a''b''''c', 3"
end
specify "should literalize symbols as column references" do
@dataset.literal(:name).should == "name"
@dataset.literal(:items__name).should == "items.name"
end
specify "should raise an error for unsupported types" do
proc {@dataset.literal({})}.should raise_error
end
specify "should literalize datasets as subqueries" do
d = @dataset.from(:test)
d.literal(d).should == "(#{d.sql})"
end
specify "should literalize Time properly" do
t = Time.now
s = t.strftime("TIMESTAMP '%Y-%m-%d %H:%M:%S'")
@dataset.literal(t).should == s
end
specify "should literalize Date properly" do
d = Date.today
s = d.strftime("DATE '%Y-%m-%d'")
@dataset.literal(d).should == s
end
specify "should not literalize expression strings" do
@dataset.literal('col1 + 2'.expr).should == 'col1 + 2'
@dataset.update_sql(:a => 'a + 2'.expr).should ==
'UPDATE test SET a = a + 2'
end
specify "should literalize BigDecimal instances correctly" do
@dataset.literal(BigDecimal.new("80")).should == "80.0"
end
end
context "Dataset#from" do
setup do
@dataset = Sequel::Dataset.new(nil)
end
specify "should accept a Dataset" do
proc {@dataset.from(@dataset)}.should_not raise_error
end
specify "should format a Dataset as a subquery if it has had options set" do
@dataset.from(@dataset.from(:a).where(:a=>1)).select_sql.should ==
"SELECT * FROM (SELECT * FROM a WHERE (a = 1)) t1"
end
specify "should automatically alias sub-queries" do
@dataset.from(@dataset.from(:a).group(:b)).select_sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) t1"
d1 = @dataset.from(:a).group(:b)
d2 = @dataset.from(:c).group(:d)
@dataset.from(d1, d2).sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) t1, (SELECT * FROM c GROUP BY d) t2"
end
specify "should accept a hash for aliasing" do
@dataset.from(:a => :b).sql.should ==
"SELECT * FROM a b"
@dataset.from(@dataset.from(:a).group(:b) => :c).sql.should ==
"SELECT * FROM (SELECT * FROM a GROUP BY b) c"
end
specify "should use the relevant table name if given a simple dataset" do
@dataset.from(@dataset.from(:a)).select_sql.should ==
"SELECT * FROM a"
end
specify "should raise if no source is given" do
proc {@dataset.from(@dataset.from).select_sql}.should raise_error(Sequel::Error)
end
end
context "Dataset#select" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should accept variable arity" do
@d.select(:name).sql.should == 'SELECT name FROM test'
@d.select(:a, :b, :test__c).sql.should == 'SELECT a, b, test.c FROM test'
end
specify "should accept symbols and literal strings" do
@d.select('aaa'.lit).sql.should == 'SELECT aaa FROM test'
@d.select(:a, 'b'.lit).sql.should == 'SELECT a, b FROM test'
@d.select(:test__cc, 'test.d AS e'.lit).sql.should ==
'SELECT test.cc, test.d AS e FROM test'
@d.select('test.d AS e'.lit, :test__cc).sql.should ==
'SELECT test.d AS e, test.cc FROM test'
# symbol helpers
@d.select(:test.ALL).sql.should ==
'SELECT test.* FROM test'
@d.select(:test__name.AS(:n)).sql.should ==
'SELECT test.name AS n FROM test'
@d.select(:test__name___n).sql.should ==
'SELECT test.name AS n FROM test'
end
specify "should use the wildcard if no arguments are given" do
@d.select.sql.should == 'SELECT * FROM test'
end
specify "should accept a hash for AS values" do
@d.select(:name => 'n', :__ggh => 'age').sql.should =~
/SELECT ((name AS n, __ggh AS age)|(__ggh AS age, name AS n)) FROM test/
end
specify "should overrun the previous select option" do
@d.select!(:a, :b, :c).select.sql.should == 'SELECT * FROM test'
@d.select!(:price).select(:name).sql.should == 'SELECT name FROM test'
end
specify "should accept arbitrary objects and literalize them correctly" do
@d.select(1, :a, 't').sql.should == "SELECT 1, a, 't' FROM test"
@d.select(nil, :sum[:t], :x___y).sql.should == "SELECT NULL, sum(t), x AS y FROM test"
@d.select(nil, 1, :x => :y).sql.should == "SELECT NULL, 1, x AS y FROM test"
end
end
context "Dataset#select_all" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should select the wildcard" do
@d.select_all.sql.should == 'SELECT * FROM test'
end
specify "should overrun the previous select option" do
@d.select!(:a, :b, :c).select_all.sql.should == 'SELECT * FROM test'
end
end
context "Dataset#select_more" do
setup do
@d = Sequel::Dataset.new(nil).from(:test)
end
specify "should act like #select for datasets with no selection" do
@d.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
@d.select_all.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
@d.select(:blah).select_all.select_more(:a, :b).sql.should == 'SELECT a, b FROM test'
end
specify "should add to the currently selected columns" do
@d.select(:a).select_more(:b).sql.should == 'SELECT a, b FROM test'
@d.select(:a.all).select_more(:b.all).sql.should == 'SELECT a.*, b.* FROM test'
end
end
context "Dataset#order" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.order(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, price DESC'
end
specify "should overrun a previous ordering" do
@dataset.order(:name).order(:stamp).sql.should ==
'SELECT * FROM test ORDER BY stamp'
end
specify "should accept a string" do
@dataset.order('dada ASC'.lit).sql.should ==
'SELECT * FROM test ORDER BY dada ASC'
end
end
context "Dataset#order_by" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order_by(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.order_by(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, price DESC'
end
specify "should overrun a previous ordering" do
@dataset.order_by(:name).order(:stamp).sql.should ==
'SELECT * FROM test ORDER BY stamp'
end
specify "should accept a string" do
@dataset.order_by('dada ASC'.lit).sql.should ==
'SELECT * FROM test ORDER BY dada ASC'
end
end
context "Dataset#order_more" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include an ORDER BY clause in the select statement" do
@dataset.order_more(:name).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should add to a previous ordering" do
@dataset.order(:name).order_more(:stamp.DESC).sql.should ==
'SELECT * FROM test ORDER BY name, stamp DESC'
end
end
context "Dataset#reverse_order" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should use DESC as default order" do
@dataset.reverse_order(:name).sql.should ==
'SELECT * FROM test ORDER BY name DESC'
end
specify "should invert the order given" do
@dataset.reverse_order(:name.DESC).sql.should ==
'SELECT * FROM test ORDER BY name'
end
specify "should accept multiple arguments" do
@dataset.reverse_order(:name, :price.DESC).sql.should ==
'SELECT * FROM test ORDER BY name DESC, price'
end
specify "should reverse a previous ordering if no arguments are given" do
@dataset.order(:name).reverse_order.sql.should ==
'SELECT * FROM test ORDER BY name DESC'
@dataset.order(:clumsy.DESC, :fool).reverse_order.sql.should ==
'SELECT * FROM test ORDER BY clumsy, fool DESC'
end
end
context "Dataset#limit" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should include a LIMIT clause in the select statement" do
@dataset.limit(10).sql.should ==
'SELECT * FROM test LIMIT 10'
end
specify "should accept ranges" do
@dataset.limit(3..7).sql.should ==
'SELECT * FROM test LIMIT 5 OFFSET 3'
@dataset.limit(3...7).sql.should ==
'SELECT * FROM test LIMIT 4 OFFSET 3'
end
specify "should include an offset if a second argument is given" do
@dataset.limit(6, 10).sql.should ==
'SELECT * FROM test LIMIT 6 OFFSET 10'
end
specify "should work with fixed sql datasets" do
@dataset.opts[:sql] = 'select * from cccc'
@dataset.limit(6, 10).sql.should ==
'SELECT * FROM (select * from cccc) t1 LIMIT 6 OFFSET 10'
end
end
context "Dataset#naked" do
setup do
@d1 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4})
@d2 = Sequel::Dataset.new(nil, {1 => 2, 3 => 4}).set_model(Object)
end
specify "should return a clone with :naked option set" do
naked = @d1.naked
naked.opts[:naked].should be_true
end
specify "should remove any existing reference to a model class" do
naked = @d2.naked
naked.opts[:models].should be_nil
end
end
context "Dataset#qualified_column_name" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test)
end
specify "should return the same if already qualified" do
@dataset.qualified_column_name('test.a'.lit, :items).should == 'test.a'
@dataset.qualified_column_name(:ccc__b, :items).should == :ccc__b
end
specify "should qualify the column with the supplied table name" do
@dataset.qualified_column_name('a'.lit, :items).to_s(@dataset).should == 'items.a'
@dataset.qualified_column_name(:b1, :items).to_s(@dataset).should == 'items.b1'
end
end
class DummyDataset < Sequel::Dataset
VALUES = [
{:a => 1, :b => 2},
{:a => 3, :b => 4},
{:a => 5, :b => 6}
]
def fetch_rows(sql, &block)
VALUES.each(&block)
end
end
context "Dataset#map" do
setup do
@d = DummyDataset.new(nil).from(:items)
end
specify "should provide the usual functionality if no argument is given" do
@d.map {|n| n[:a] + n[:b]}.should == [3, 7, 11]
end
specify "should map using #[column name] if column name is given" do
@d.map(:a).should == [1, 3, 5]
end
specify "should return the complete dataset values if nothing is given" do
@d.map.should == DummyDataset::VALUES
end
end
context "Dataset#to_hash" do
setup do
@d = DummyDataset.new(nil).from(:items)
end
specify "should provide a hash with the first column as key and the second as value" do
@d.to_hash(:a, :b).should == {1 => 2, 3 => 4, 5 => 6}
@d.to_hash(:b, :a).should == {2 => 1, 4 => 3, 6 => 5}
end
end
context "Dataset#uniq" do
setup do
@dataset = Sequel::Dataset.new(nil).from(:test).select(:name)
end
specify "should include DISTINCT clause in statement" do
@dataset.uniq.sql.should == 'SELECT DISTINCT name FROM test'
end
specify "should be aliased by Dataset#distinct" do
@dataset.distinct.sql.should == 'SELECT DISTINCT name FROM test'
end
specify "should accept an expression list" do
@dataset.uniq(:a, :b).sql.should == 'SELECT DISTINCT ON (a, b) name FROM test'
@dataset.uniq(:stamp.cast_as(:integer), :node_id).sql.should == 'SELECT DISTINCT ON (cast(stamp AS integer), node_id) name FROM test'
end
end
context "Dataset#count" do
setup do
@c = Class.new(Sequel::Dataset) do
def self.sql
@@sql
end
def fetch_rows(sql)
@@sql = sql
yield({1 => 1})
end
end
@dataset = @c.new(nil).from(:test)
end
specify "should format SQL properly" do
@dataset.count.should == 1
@c.sql.should == 'SELECT COUNT(*) FROM test'
end
specify "should be aliased by #size" do
@dataset.size.should == 1
end
specify "should include the where clause if it's there" do
@dataset.filter {:abc < 30}.count.should == 1
@c.sql.should == 'SELECT COUNT(*) FROM test WHERE (abc < 30)'
end
specify "should count properly for datasets with fixed sql" do
@dataset.opts[:sql] = "select abc from xyz"
@dataset.count.should == 1
@c.sql.should == "SELECT COUNT(*) FROM (select abc from xyz) t1"
end
end
context "Dataset#group_and_count" do
setup do
@c = Class.new(Sequel::Dataset) do
def self.sql
@@sql
end
def fetch_rows(sql)
@@sql = sql
yield({1 => 1})
end
end
@ds = @c.new(nil).from(:test)
end
specify "should format SQL properly" do
@ds.group_and_count(:name).sql.should == "SELECT name, count(*) AS count FROM test GROUP BY name ORDER BY count"
end
specify "should accept multiple columns for grouping" do
@ds.group_and_count(:a, :b).sql.should == "SELECT a, b, count(*) AS count FROM test GROUP BY a, b ORDER BY count"
end
end
context "Dataset#empty?" do
specify "should return true if #count == 0" do
@c = Class.new(Sequel::Dataset) do
def count
0
end
end
@dataset = @c.new(nil).from(:test)
@dataset.empty?.should be_true
@c = Class.new(Sequel::Dataset) do
def count
1
end
end
@dataset = @c.new(nil).from(:test)
@dataset.empty?.should be_false
end
end
context "Dataset#join_table" do
setup do
@d = Sequel::Dataset.new(nil).from(:items)
end
specify "should format the JOIN clause properly" do
@d.join_table(:left_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should include WHERE clause if applicable" do
@d.filter {:price < 100}.join_table(:right_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id) WHERE (price < 100)'
end
specify "should include ORDER BY clause if applicable" do
@d.order(:stamp).join_table(:full_outer, :categories, :category_id => :id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id) ORDER BY stamp'
end
specify "should support multiple joins" do
@d.join_table(:inner, :b, :items_id).join_table(:left_outer, :c, :b_id => :b__id).sql.should ==
'SELECT * FROM items INNER JOIN b ON (b.items_id = items.id) LEFT OUTER JOIN c ON (c.b_id = b.id)'
end
specify "should use id as implicit relation primary key if omitted" do
@d.join_table(:left_outer, :categories, :category_id).sql.should ==
@d.join_table(:left_outer, :categories, :category_id => :id).sql
# when doing multiple joins, id should be qualified using the last joined table
@d.join_table(:right_outer, :b, :items_id).join_table(:full_outer, :c, :b_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN b ON (b.items_id = items.id) FULL OUTER JOIN c ON (c.b_id = b.id)'
end
specify "should support left outer joins" do
@d.join_table(:left_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
@d.left_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support right outer joins" do
@d.join_table(:right_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id)'
@d.right_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items RIGHT OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support full outer joins" do
@d.join_table(:full_outer, :categories, :category_id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id)'
@d.full_outer_join(:categories, :category_id).sql.should ==
'SELECT * FROM items FULL OUTER JOIN categories ON (categories.category_id = items.id)'
end
specify "should support inner joins" do
@d.join_table(:inner, :categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
@d.inner_join(:categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
end
specify "should default to an inner join" do
@d.join_table(nil, :categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
@d.join(:categories, :category_id).sql.should ==
'SELECT * FROM items INNER JOIN categories ON (categories.category_id = items.id)'
end
specify "should raise if an invalid join type is specified" do
proc {@d.join_table(:invalid, :a, :b)}.should raise_error(Sequel::Error)
end
specify "should support aliased tables" do
@d.from('stats s').join('players p', :id => :player_id).sql.should ==
'SELECT * FROM stats s INNER JOIN players p ON (p.id = s.player_id)'
ds = Sequel::Dataset.new(nil).from(:foo => :f). \
join_table(:inner, :bar, :id => :bar_id).sql.should ==
'SELECT * FROM foo f INNER JOIN bar ON (bar.id = f.bar_id)'
end
specify "should allow for arbitrary conditions in the JOIN clause" do
@d.join_table(:left_outer, :categories, :status => 0).sql.should ==
'SELECT * FROM items LEFT OUTER JOIN categories ON (categories.status = 0)'
@d.join_table(:left_outer, :categories, :categorizable_type => "Post").sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.categorizable_type = 'Post')"
@d.join_table(:left_outer, :categories, :timestamp => "CURRENT_TIMESTAMP".lit).sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.timestamp = CURRENT_TIMESTAMP)"
@d.join_table(:left_outer, :categories, :status => [1, 2, 3]).sql.should ==
"SELECT * FROM items LEFT OUTER JOIN categories ON (categories.status IN (1, 2, 3))"
end
specify "should raise error for a table without a source" do
proc {Sequel::Dataset.new(nil).join('players p', :id => :player_id)}. \
should raise_error(Sequel::Error)
end
end
context "Dataset#[]=" do
setup do
c = Class.new(Sequel::Dataset) do
def last_sql
@@last_sql
end
def update(*args)
@@last_sql = update_sql(*args)
end
end
@d = c.new(nil).from(:items)
end
specify "should perform an update on the specified filter" do
@d[:a => 1] = {:x => 3}
@d.last_sql.should == 'UPDATE items SET x = 3 WHERE (a = 1)'
end
end
context "Dataset#set" do
setup do
c = Class.new(Sequel::Dataset) do
def last_sql
@@last_sql
end
def update(*args, &block)
@@last_sql = update_sql(*args, &block)
end
end
@d = c.new(nil).from(:items)
end
specify "should act as alias to #update" do
@d.set({:x => 3})
@d.last_sql.should == 'UPDATE items SET x = 3'
@d.set {:x << :x + 1}
@d.last_sql.should == 'UPDATE items SET x = (x + 1)'
@d.set {(:x|1) << (:x|2) + 1}
@d.last_sql.should == 'UPDATE items SET x[1] = (x[2] + 1)'
end
end
context "Dataset#insert_multiple" do
setup do
c = Class.new(Sequel::Dataset) do
attr_reader :inserts
def insert(arg)
@inserts ||= []
@inserts << arg
end
end
@d = c.new(nil)
end
specify "should insert all items in the supplied array" do
@d.insert_multiple [:aa, 5, 3, {1 => 2}]
@d.inserts.should == [:aa, 5, 3, {1 => 2}]
end
specify "should pass array items through the supplied block if given" do
a = ["inevitable", "hello", "the ticking clock"]
@d.insert_multiple(a) {|i| i.gsub('l', 'r')}
@d.inserts.should == ["inevitabre", "herro", "the ticking crock"]
end
end
context "Dataset aggregate methods" do
setup do
c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield({1 => sql})
end
end
@d = c.new(nil).from(:test)
end
specify "should include min" do
@d.min(:a).should == 'SELECT min(a) AS v FROM test'
end
specify "should include max" do
@d.max(:b).should == 'SELECT max(b) AS v FROM test'
end
specify "should include sum" do
@d.sum(:c).should == 'SELECT sum(c) AS v FROM test'
end
specify "should include avg" do
@d.avg(:d).should == 'SELECT avg(d) AS v FROM test'
end
specify "should accept qualified columns" do
@d.avg(:test__bc).should == 'SELECT avg(test.bc) AS v FROM test'
end
end
context "Dataset#range" do
setup do
c = Class.new(Sequel::Dataset) do
@@sql = nil
def last_sql; @@sql; end
def fetch_rows(sql)
@@sql = sql
yield(:v1 => 1, :v2 => 10)
end
end
@d = c.new(nil).from(:test)
end
specify "should generate a correct SQL statement" do
@d.range(:stamp)
@d.last_sql.should == "SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test LIMIT 1"
@d.filter {:price > 100}.range(:stamp)
@d.last_sql.should == "SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test WHERE (price > 100) LIMIT 1"
end
specify "should return a range object" do
@d.range(:tryme).should == (1..10)
@d.last_sql.should == "SELECT min(tryme) AS v1, max(tryme) AS v2 FROM test LIMIT 1"
end
end
context "Dataset#range" do
setup do
c = Class.new(Sequel::Dataset) do
@@sql = nil
def last_sql; @@sql; end
def fetch_rows(sql)
@@sql = sql
yield(:v => 1234)
end
end
@d = c.new(nil).from(:test)
end
specify "should generate a correct SQL statement" do
@d.interval(:stamp)
@d.last_sql.should == "SELECT (max(stamp) - min(stamp)) AS v FROM test LIMIT 1"
@d.filter {:price > 100}.interval(:stamp)
@d.last_sql.should == "SELECT (max(stamp) - min(stamp)) AS v FROM test WHERE (price > 100) LIMIT 1"
end
specify "should return a range object" do
@d.interval(:tryme).should == 1234
@d.last_sql.should == "SELECT (max(tryme) - min(tryme)) AS v FROM test LIMIT 1"
end
end
context "Dataset#first" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
@@last_opts = nil
def self.last_dataset
@@last_dataset
end
def self.last_opts
@@last_opts
end
def single_record(opts = nil)
@@last_opts = @opts.merge(opts || {})
{:a => 1, :b => 2}
end
def all
@@last_dataset = self
[{:a => 1, :b => 2}] * @opts[:limit]
end
end
@d = @c.new(nil).from(:test)
end
specify "should return the first matching record if a hash is specified" do
@d.first(:z => 26).should == {:a => 1, :b => 2}
@c.last_opts[:where].should == ('(z = 26)')
@d.first('z = ?', 15)
@c.last_opts[:where].should == ('z = 15')
end
specify "should return the first matching record if a block is given" do
@d.first {:z > 26}.should == {:a => 1, :b => 2}
@c.last_opts[:where].should == ('(z > 26)')
end
specify "should return a single record if no argument is given" do
@d.first.should == {:a => 1, :b => 2}
end
specify "should set the limit according to the given number" do
@d.first
@c.last_opts[:limit].should == 1
i = rand(10) + 10
@d.first(i)
@c.last_dataset.opts[:limit].should == i
end
specify "should return an array with the records if argument is greater than 1" do
i = rand(10) + 10
r = @d.first(i)
r.should be_a_kind_of(Array)
r.size.should == i
r.each {|row| row.should == {:a => 1, :b => 2}}
end
end
context "Dataset#last" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
def self.last_dataset
@@last_dataset
end
def single_record(opts = nil)
@@last_dataset = clone_merge(opts) if opts
{:a => 1, :b => 2}
end
def all
@@last_dataset = self
[{:a => 1, :b => 2}] * @opts[:limit]
end
end
@d = @c.new(nil).from(:test)
end
specify "should raise if no order is given" do
proc {@d.last}.should raise_error(Sequel::Error)
proc {@d.last(2)}.should raise_error(Sequel::Error)
proc {@d.order(:a).last}.should_not raise_error
proc {@d.order(:a).last(2)}.should_not raise_error
end
specify "should invert the order" do
@d.order(:a).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:a.DESC])
@d.order(:b.DESC).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal(:b)
@d.order(:c, :d).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:c.DESC, :d.DESC])
@d.order(:e.DESC, :f).last
@d.literal(@c.last_dataset.opts[:order]).should == @d.literal([:e, :f.DESC])
end
specify "should return the first matching record if a hash is specified" do
@d.order(:a).last(:z => 26).should == {:a => 1, :b => 2}
@c.last_dataset.opts[:where].should == ('(z = 26)')
@d.order(:a).last('z = ?', 15)
@c.last_dataset.opts[:where].should == ('z = 15')
end
specify "should return a single record if no argument is given" do
@d.order(:a).last.should == {:a => 1, :b => 2}
end
specify "should set the limit according to the given number" do
i = rand(10) + 10
r = @d.order(:a).last(i)
@c.last_dataset.opts[:limit].should == i
end
specify "should return an array with the records if argument is greater than 1" do
i = rand(10) + 10
r = @d.order(:a).last(i)
r.should be_a_kind_of(Array)
r.size.should == i
r.each {|row| row.should == {:a => 1, :b => 2}}
end
end
context "Dataset set operations" do
setup do
@a = Sequel::Dataset.new(nil).from(:a).filter(:z => 1)
@b = Sequel::Dataset.new(nil).from(:b).filter(:z => 2)
end
specify "should support UNION and UNION ALL" do
@a.union(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM b WHERE (z = 2)"
@b.union(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) UNION ALL SELECT * FROM a WHERE (z = 1)"
end
specify "should support INTERSECT and INTERSECT ALL" do
@a.intersect(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) INTERSECT SELECT * FROM b WHERE (z = 2)"
@b.intersect(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) INTERSECT ALL SELECT * FROM a WHERE (z = 1)"
end
specify "should support EXCEPT and EXCEPT ALL" do
@a.except(@b).sql.should == \
"SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM b WHERE (z = 2)"
@b.except(@a, true).sql.should == \
"SELECT * FROM b WHERE (z = 2) EXCEPT ALL SELECT * FROM a WHERE (z = 1)"
end
end
context "Dataset#[]" do
setup do
@c = Class.new(Sequel::Dataset) do
@@last_dataset = nil
def self.last_dataset
@@last_dataset
end
def single_record(opts = nil)
@@last_dataset = opts ? clone_merge(opts) : self
{1 => 2, 3 => 4}
end
end
@d = @c.new(nil).from(:test)
end
specify "should return a single record filtered according to the given conditions" do
@d[:name => 'didi'].should == {1 => 2, 3 => 4}
@c.last_dataset.opts[:where].should == "(name = 'didi')"
@d[:id => 5..45].should == {1 => 2, 3 => 4}
@c.last_dataset.opts[:where].should == "(id >= 5 AND id <= 45)"
end
end
context "Dataset#single_record" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield sql
end
end
@cc = Class.new(@c) do
def fetch_rows(sql); end
end
@d = @c.new(nil).from(:test)
@e = @cc.new(nil).from(:test)
end
specify "should call each and return the first record" do
@d.single_record.should == 'SELECT * FROM test'
end
specify "should pass opts to each" do
@d.single_record(:limit => 3).should == 'SELECT * FROM test LIMIT 3'
end
specify "should return nil if no record is present" do
@e.single_record.should be_nil
end
end
context "Dataset#single_value" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql)
yield({1 => sql})
end
end
@cc = Class.new(@c) do
def fetch_rows(sql); end
end
@d = @c.new(nil).from(:test)
@e = @cc.new(nil).from(:test)
end
specify "should call each and return the first value of the first record" do
@d.single_value.should == 'SELECT * FROM test'
end
specify "should pass opts to each" do
@d.single_value(:limit => 3).should == 'SELECT * FROM test LIMIT 3'
end
specify "should return nil" do
@e.single_value.should be_nil
end
end
context "Dataset#set_row_proc" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
# yield a hash with kind as the 1 bit of a number
(1..10).each {|i| block.call({:kind => i[0]})}
end
end
@dataset = @c.new(nil).from(:items)
end
specify "should cause dataset to pass all rows through the filter" do
@dataset.set_row_proc {|h| h[:der] = h[:kind] + 2; h}
rows = @dataset.all
rows.size.should == 10
rows.each {|r| r[:der].should == (r[:kind] + 2)}
end
specify "should be copied over when dataset is cloned" do
@dataset.set_row_proc {|h| h[:der] = h[:kind] + 2; h}
@dataset.filter(:a => 1).first.should == {:kind => 1, :der => 3}
end
end
context "Dataset#set_model" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
# yield a hash with kind as the 1 bit of a number
(1..10).each {|i| block.call({:kind => i[0]})}
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c, :args
def initialize(c, *args); @c = c; @args = args; end
def ==(o); (@c == o.c) && (@args = o.args); end
end
end
specify "should clear the models hash and restore the stock #each if nil is specified" do
@dataset.set_model(@m)
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
@dataset.model_classes.should be_nil
end
specify "should clear the models hash and restore the stock #each if nothing is specified" do
@dataset.set_model(@m)
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
@dataset.model_classes.should be_nil
end
specify "should alter #each to provide model instances" do
@dataset.first.should == {:kind => 1}
@dataset.set_model(@m)
@dataset.first.should == @m.new({:kind => 1})
end
specify "should extend the dataset with a #destroy method" do
@dataset.should_not respond_to(:destroy)
@dataset.set_model(@m)
@dataset.should respond_to(:destroy)
end
specify "should set opts[:naked] to nil" do
@dataset.opts[:naked] = true
@dataset.set_model(@m)
@dataset.opts[:naked].should be_nil
end
specify "should send additional arguments to the models' initialize method" do
@dataset.set_model(@m, 7, 6, 5)
@dataset.first.should == @m.new({:kind => 1}, 7, 6, 5)
end
specify "should provide support for polymorphic model instantiation" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:kind, 0 => @m1, 1 => @m2)
@dataset.opts[:polymorphic_key].should == :kind
all = @dataset.all
all[0].class.should == @m2
all[1].class.should == @m1
all[2].class.should == @m2
all[3].class.should == @m1
#...
# denude model
@dataset.set_model(nil)
@dataset.first.should == {:kind => 1}
end
specify "should send additional arguments for polymorphic models as well" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:kind, {0 => @m1, 1 => @m2}, :hey => :wow)
all = @dataset.all
all[0].class.should == @m2; all[0].args.should == [{:hey => :wow}]
all[1].class.should == @m1; all[1].args.should == [{:hey => :wow}]
all[2].class.should == @m2; all[2].args.should == [{:hey => :wow}]
all[3].class.should == @m1; all[3].args.should == [{:hey => :wow}]
end
specify "should raise for invalid parameters" do
proc {@dataset.set_model('kind')}.should raise_error(ArgumentError)
proc {@dataset.set_model(0)}.should raise_error(ArgumentError)
proc {@dataset.set_model(:kind)}.should raise_error(ArgumentError) # no hash given
end
end
context "Dataset#model_classes" do
setup do
@c = Class.new(Sequel::Dataset) do
# # We don't need that for now
# def fetch_rows(sql, &block)
# (1..10).each(&block)
# end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should return nil for a naked dataset" do
@dataset.model_classes.should == nil
end
specify "should return a {nil => model_class} hash for a model dataset" do
@dataset.set_model(@m)
@dataset.model_classes.should == {nil => @m}
end
specify "should return the polymorphic hash for a polymorphic model dataset" do
@m1 = Class.new(@m)
@m2 = Class.new(@m)
@dataset.set_model(:key, 0 => @m1, 1 => @m2)
@dataset.model_classes.should == {0 => @m1, 1 => @m2}
end
end
context "Dataset#polymorphic_key" do
setup do
@c = Class.new(Sequel::Dataset) do
# # We don't need this for now
# def fetch_rows(sql, &block)
# (1..10).each(&block)
# end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should return nil for a naked dataset" do
@dataset.polymorphic_key.should be_nil
end
specify "should return the polymorphic key" do
@dataset.set_model(:id, nil => @m)
@dataset.polymorphic_key.should == :id
end
end
context "A model dataset" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(1..10).each(&block)
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
@dataset.set_model(@m)
end
specify "should supply naked records if the naked option is specified" do
@dataset.each {|r| r.class.should == @m}
@dataset.each(:naked => true) {|r| r.class.should == Fixnum}
end
end
context "A polymorphic model dataset" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(1..10).each {|i| block.call(:bit => i[0])}
end
end
@dataset = @c.new(nil).from(:items)
@m = Class.new do
attr_accessor :c
def initialize(c); @c = c; end
def ==(o); @c == o.c; end
end
end
specify "should use a nil key in the polymorphic hash to specify the default model class" do
@m2 = Class.new(@m)
@dataset.set_model(:bit, nil => @m, 1 => @m2)
all = @dataset.all
all[0].class.should == @m2
all[1].class.should == @m
all[2].class.should == @m2
all[3].class.should == @m
#...
end
specify "should raise Sequel::Error if no suitable class is found in the polymorphic hash" do
@m2 = Class.new(@m)
@dataset.set_model(:bit, 1 => @m2)
proc {@dataset.all}.should raise_error(Sequel::Error)
end
specify "should supply naked records if the naked option is specified" do
@dataset.set_model(:bit, nil => @m)
@dataset.each(:naked => true) {|r| r.class.should == Hash}
end
end
context "A dataset with associated model class(es)" do
setup do
@c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
block.call({:x => 1, :y => 2})
end
end
@dataset = @c.new(nil).from(:items)
@m1 = Class.new do
attr_accessor :v
def initialize(v); @v = v; end
end
@m2 = Class.new do
attr_accessor :v, :vv
def initialize(v = nil); @v = v; end
def self.load(v); o = new(nil); o.vv = v; o; end
end
@m3 = Class.new(@m2)
end
specify "should instantiate an instance by passing the record hash as argument" do
@dataset.set_model(@m1)
o = @dataset.first
o.class.should == @m1
o.v.should == {:x => 1, :y => 2}
end
specify "should use the .load constructor if available" do
@dataset.set_model(@m2)
o = @dataset.first
o.class.should == @m2
o.v.should == nil
o.vv.should == {:x => 1, :y => 2}
end
specify "should use the .load constructor also for polymorphic datasets" do
@dataset.set_model(:y, 1 => @m2, 2 => @m3)
o = @dataset.first
o.class.should == @m3
o.v.should == nil
o.vv.should == {:x => 1, :y => 2}
end
end
context "Dataset#destroy" do
setup do
db = Object.new
m = Module.new do
def transaction; yield; end
end
db.extend(m)
$DESTROYED = []
@m = Class.new do
def initialize(c)
@c = c
end
attr_accessor :c
def ==(o)
@c == o.c
end
def destroy
$DESTROYED << self
end
end
$MODELS = [@m.new(12), @m.new(13)]
c = Class.new(Sequel::Dataset) do
def fetch_rows(sql, &block)
(12..13).each(&block)
end
end
@d = c.new(db).from(:test)
@d.set_model(@m)
end
specify "should call destroy for every model instance in the dataset" do
count = @d.destroy
count.should == 2
$DESTROYED.should == $MODELS
end
specify "should raise error if no models are associated with the dataset" do
proc {@d.naked.destroy}.should raise_error(Sequel::Error)
end
end
context "Dataset#<<" do
setup do
@d = Sequel::Dataset.new(nil)
@d.meta_def(:insert) do
1234567890
end
end
specify "should call #insert" do
(@d << {:name => 1}).should == 1234567890
end
end
context "A paginated dataset" do
setup do
@d = Sequel::Dataset.new(nil)
@d.meta_def(:count) {153}
@paginated = @d.paginate(1, 20)
end
specify "should set the limit and offset options correctly" do
@paginated.opts[:limit].should == 20
@paginated.opts[:offset].should == 0
end
specify "should set the page count correctly" do
@paginated.page_count.should == 8
@d.paginate(1, 50).page_count.should == 4
end
specify "should set the current page number correctly" do
@paginated.current_page.should == 1
@d.paginate(3, 50).current_page.should == 3
end
specify "should return the next page number or nil if we're on the last" do
@paginated.next_page.should == 2
@d.paginate(4, 50).next_page.should be_nil
end
specify "should return the previous page number or nil if we're on the last" do
@paginated.prev_page.should be_nil
@d.paginate(4, 50).prev_page.should == 3
end
specify "should return the page range" do
@paginated.page_range.should == (1..8)
@d.paginate(4, 50).page_range.should == (1..4)
end
specify "should return the record range for the current page" do
@paginated.current_page_record_range.should == (1..20)
@d.paginate(4, 50).current_page_record_range.should == (151..153)
@d.paginate(5, 50).current_page_record_range.should == (0..0)
end
specify "should return the record count for the current page" do
@paginated.current_page_record_count.should == 20
@d.paginate(3, 50).current_page_record_count.should == 50
@d.paginate(4, 50).current_page_record_count.should == 3
@d.paginate(5, 50).current_page_record_count.should == 0
end
specify "should work with fixed sql" do
ds = @d.clone_merge(:sql => 'select * from blah')
ds.meta_def(:count) {150}
ds.paginate(2, 50).sql.should == 'SELECT * FROM (select * from blah) t1 LIMIT 50 OFFSET 50'
end
end
context "Dataset#columns" do
setup do
@dataset = DummyDataset.new(nil).from(:items)
@dataset.meta_def(:columns=) {|c| @columns = c}
@dataset.meta_def(:first) {@columns = select_sql(nil)}
end
specify "should return the value of @columns" do
@dataset.columns = [:a, :b, :c]
@dataset.columns.should == [:a, :b, :c]
end
specify "should call first if @columns is nil" do
@dataset.columns = nil
@dataset.columns.should == 'SELECT * FROM items'
@dataset.opts[:from] = [:nana]
@dataset.columns.should == 'SELECT * FROM items'
end
end
require 'stringio'
context "Dataset#print" do
setup do
@output = StringIO.new
@orig_stdout = $stdout
$stdout = @output
@dataset = DummyDataset.new(nil).from(:items)
end
teardown do
$stdout = @orig_stdout
end
specify "should print out a table with the values" do
@dataset.print(:a, :b)
@output.rewind
@output.read.should == \
"+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n"
end
specify "should default to the dataset's columns" do
@dataset.meta_def(:columns) {[:a, :b]}
@dataset.print
@output.rewind
@output.read.should == \
"+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n"
end
end
context "Dataset#multi_insert" do
setup do
@dbc = Class.new do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
def transaction
@sqls ||= []
@sqls << 'BEGIN'
yield
@sqls << 'COMMIT'
end
end
@db = @dbc.new
@ds = Sequel::Dataset.new(@db).from(:items)
@list = [{:name => 'abc'}, {:name => 'def'}, {:name => 'ghi'}]
end
specify "should join all inserts into a single SQL string" do
@ds.multi_insert(@list)
@db.sqls.should == [
'BEGIN',
"INSERT INTO items (name) VALUES ('abc')",
"INSERT INTO items (name) VALUES ('def')",
"INSERT INTO items (name) VALUES ('ghi')",
'COMMIT'
]
end
specify "should accept the commit_every option for committing every x records" do
@ds.multi_insert(@list, :commit_every => 2)
@db.sqls.should == [
'BEGIN',
"INSERT INTO items (name) VALUES ('abc')",
"INSERT INTO items (name) VALUES ('def')",
'COMMIT',
'BEGIN',
"INSERT INTO items (name) VALUES ('ghi')",
'COMMIT'
]
end
end
context "Dataset#query" do
setup do
@d = Sequel::Dataset.new(nil)
end
specify "should support #from" do
q = @d.query {from :xxx}
q.class.should == @d.class
q.sql.should == "SELECT * FROM xxx"
end
specify "should support #select" do
q = @d.query do
select :a, :b___mongo
from :yyy
end
q.class.should == @d.class
q.sql.should == "SELECT a, b AS mongo FROM yyy"
end
specify "should support #where" do
q = @d.query do
from :zzz
where {:x + 2 > :y + 3}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE ((x + 2) > (y + 3))"
q = @d.from(:zzz).query do
where {:x > 1 && :y > 2}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE ((x > 1) AND (y > 2))"
q = @d.from(:zzz).query do
where :x => 33
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM zzz WHERE (x = 33)"
end
specify "should support #group_by and #having" do
q = @d.query do
from :abc
group_by :id
having {:x >= 2}
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM abc GROUP BY id HAVING (x >= 2)"
end
specify "should support #order, #order_by" do
q = @d.query do
from :xyz
order_by :stamp
end
q.class.should == @d.class
q.sql.should == "SELECT * FROM xyz ORDER BY stamp"
end
specify "should raise on non-chainable method calls" do
proc {@d.query {count}}.should raise_error(Sequel::Error)
end
specify "should raise on each, insert, update, delete" do
proc {@d.query {each}}.should raise_error(Sequel::Error)
proc {@d.query {insert(:x => 1)}}.should raise_error(Sequel::Error)
proc {@d.query {update(:x => 1)}}.should raise_error(Sequel::Error)
proc {@d.query {delete}}.should raise_error(Sequel::Error)
end
end
context "Dataset" do
setup do
@d = Sequel::Dataset.new(nil).from(:x)
end
specify "should support self-changing select!" do
@d.select!(:y)
@d.sql.should == "SELECT y FROM x"
end
specify "should support self-changing from!" do
@d.from!(:y)
@d.sql.should == "SELECT * FROM y"
end
specify "should support self-changing order!" do
@d.order!(:y)
@d.sql.should == "SELECT * FROM x ORDER BY y"
end
specify "should support self-changing filter!" do
@d.filter!(:y => 1)
@d.sql.should == "SELECT * FROM x WHERE (y = 1)"
end
specify "should support self-changing filter! with block" do
@d.filter! {:y == 2}
@d.sql.should == "SELECT * FROM x WHERE (y = 2)"
end
specify "should raise for ! methods that don't return a dataset" do
proc {@d.opts!}.should raise_error(NameError)
end
specify "should raise for missing methods" do
proc {@d.xuyz}.should raise_error(NameError)
proc {@d.xyz!}.should raise_error(NameError)
proc {@d.xyz?}.should raise_error(NameError)
end
specify "should support chaining of bang methods" do
@d.order!(:y)
@d.filter!(:y => 1)
@d.sql.should == "SELECT * FROM x WHERE (y = 1) ORDER BY y"
end
end
context "Dataset#transform" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :raw
attr_accessor :sql
def fetch_rows(sql, &block)
block[@raw]
end
def insert(v)
@sql = insert_sql(v)
end
def update(v)
@sql = update_sql(v)
end
end
@ds = @c.new(nil).from(:items)
@ds.transform(:x => [
proc {|v| Marshal.load(v)},
proc {|v| Marshal.dump(v)}
])
end
specify "should change the dataset to transform values loaded from the database" do
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.all.should == [{:x => [1, 2, 3], :y => 'hello'}]
end
specify "should change the dataset to transform values saved to the database" do
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{Marshal.dump(['dream'])}'"
end
specify "should be transferred to cloned datasets" do
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
end
specify "should work correctly together with set_row_proc" do
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
end
context "Dataset#transform" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :raw
attr_accessor :sql
def fetch_rows(sql, &block)
block[@raw]
end
def insert(v)
@sql = insert_sql(v)
end
def update(v)
@sql = update_sql(v)
end
end
@ds = @c.new(nil).from(:items)
end
specify "should raise Sequel::Error for invalid transformations" do
proc {@ds.transform(:x => 'mau')}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => :mau)}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => [])}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => ['mau'])}.should raise_error(Sequel::Error::InvalidTransform)
proc {@ds.transform(:x => [proc {|v|}, proc {|v|}])}.should_not raise_error(Sequel::Error::InvalidTransform)
end
specify "should support stock YAML transformation" do
@ds.transform(:x => :yaml)
@ds.raw = {:x => [1, 2, 3].to_yaml, :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{:toast.to_yaml}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{['dream'].to_yaml}'"
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => [1, 2, 3].to_yaml, :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{:toast.to_yaml}')"
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => "wow".to_yaml, :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => "wow".to_yaml, :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
specify "should support stock Marshal transformation" do
@ds.transform(:x => :marshal)
@ds.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds.insert(:x => :toast)
@ds.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.insert(:y => 'butter')
@ds.sql.should == "INSERT INTO items (y) VALUES ('butter')"
@ds.update(:x => ['dream'])
@ds.sql.should == "UPDATE items SET x = '#{Marshal.dump(['dream'])}'"
@ds2 = @ds.filter(:a => 1)
@ds2.raw = {:x => Marshal.dump([1, 2, 3]), :y => 'hello'}
@ds2.first.should == {:x => [1, 2, 3], :y => 'hello'}
@ds2.insert(:x => :toast)
@ds2.sql.should == "INSERT INTO items (x) VALUES ('#{Marshal.dump(:toast)}')"
@ds.set_row_proc {|r| r[:z] = r[:x] * 2; r}
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.first.should == {:x => "wow", :y => 'hello', :z => "wowwow"}
f = nil
@ds.raw = {:x => Marshal.dump("wow"), :y => 'hello'}
@ds.each(:naked => true) {|r| f = r}
f.should == {:x => "wow", :y => 'hello'}
end
specify "should return self" do
@ds.transform(:x => :marshal).should be(@ds)
end
end
context "Dataset#to_csv" do
setup do
@c = Class.new(Sequel::Dataset) do
attr_accessor :data
attr_accessor :cols
def fetch_rows(sql, &block)
@columns = @cols
@data.each {|r| r.keys = @columns; block[r]}
end
# naked should return self here because to_csv wants a naked result set.
def naked
self
end
end
@ds = @c.new(nil).from(:items)
@ds.cols = [:a, :b, :c]
@ds.data = [
[1, 2, 3], [4, 5, 6], [7, 8, 9]
]
end
specify "should format a CSV representation of the records" do
@ds.to_csv.should ==
"a, b, c\r\n1, 2, 3\r\n4, 5, 6\r\n7, 8, 9\r\n"
end
specify "should exclude column titles if so specified" do
@ds.to_csv(false).should ==
"1, 2, 3\r\n4, 5, 6\r\n7, 8, 9\r\n"
end
end
context "Dataset#each_hash" do
setup do
@c = Class.new(Sequel::Dataset) do
def each(&block)
a = [[1, 2, 3], [4, 5, 6]]
a.each {|r| r.keys = [:a, :b, :c]; block[r]}
end
end
@ds = @c.new(nil).from(:items)
end
specify "should yield records converted to hashes" do
hashes = []
@ds.each_hash {|h| hashes << h}
hashes.should == [{:a => 1, :b => 2, :c => 3}, {:a => 4, :b => 5, :c => 6}]
end
end
context "Dataset magic methods" do
setup do
@c = Class.new(Sequel::Dataset) do
@@sqls = []
def self.sqls; @@sqls; end
def fetch_rows(sql)
@@sqls << sql
yield({:a => 1, :b => 2})
end
end
@ds = @c.new(nil).from(:items)
end
specify "should support order_by_xxx" do
@ds.should_not respond_to(:order_by_name)
proc {@ds.order_by_name}.should_not raise_error
@ds.should respond_to(:order_by_name)
@ds.order_by_name.should be_a_kind_of(@c)
@ds.order_by_name.sql.should == "SELECT * FROM items ORDER BY name"
end
specify "should support group_by_xxx" do
@ds.should_not respond_to(:group_by_name)
proc {@ds.group_by_name}.should_not raise_error
@ds.should respond_to(:group_by_name)
@ds.group_by_name.should be_a_kind_of(@c)
@ds.group_by_name.sql.should == "SELECT * FROM items GROUP BY name"
end
specify "should support count_by_xxx" do
@ds.should_not respond_to(:count_by_name)
proc {@ds.count_by_name}.should_not raise_error
@ds.should respond_to(:count_by_name)
@ds.count_by_name.should be_a_kind_of(@c)
@ds.count_by_name.sql.should == "SELECT name, count(*) AS count FROM items GROUP BY name ORDER BY count"
end
specify "should support filter_by_xxx" do
@ds.should_not respond_to(:filter_by_name)
proc {@ds.filter_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:filter_by_name)
@ds.filter_by_name('sharon').should be_a_kind_of(@c)
@ds.filter_by_name('sharon').sql.should == "SELECT * FROM items WHERE (name = 'sharon')"
end
specify "should support all_by_xxx" do
@ds.should_not respond_to(:all_by_name)
proc {@ds.all_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:all_by_name)
@ds.all_by_name('sharon').should == [{:a => 1, :b => 2}]
@c.sqls.should == ["SELECT * FROM items WHERE (name = 'sharon')"] * 2
end
specify "should support find_by_xxx" do
@ds.should_not respond_to(:find_by_name)
proc {@ds.find_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:find_by_name)
@ds.find_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items WHERE (name = 'sharon') LIMIT 1"] * 2
end
specify "should support first_by_xxx" do
@ds.should_not respond_to(:first_by_name)
proc {@ds.first_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:first_by_name)
@ds.first_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items ORDER BY name LIMIT 1"] * 2
end
specify "should support last_by_xxx" do
@ds.should_not respond_to(:last_by_name)
proc {@ds.last_by_name('sharon')}.should_not raise_error
@ds.should respond_to(:last_by_name)
@ds.last_by_name('sharon').should == {:a => 1, :b => 2}
@c.sqls.should == ["SELECT * FROM items ORDER BY name DESC LIMIT 1"] * 2
end
end
context "Dataset#create_view" do
setup do
@dbc = Class.new(Sequel::Database) do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
end
@db = @dbc.new
@ds = @db[:items].order(:abc).filter(:category => 'ruby')
end
specify "should create a view with the dataset's sql" do
@ds.create_view(:xyz)
@db.sqls.should == ["CREATE VIEW xyz AS #{@ds.sql}"]
end
end
context "Dataset#create_or_replace_view" do
setup do
@dbc = Class.new(Sequel::Database) do
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
end
@db = @dbc.new
@ds = @db[:items].order(:abc).filter(:category => 'ruby')
end
specify "should create a view with the dataset's sql" do
@ds.create_or_replace_view(:xyz)
@db.sqls.should == ["CREATE OR REPLACE VIEW xyz AS #{@ds.sql}"]
end
end
context "Dataset#update_sql" do
setup do
@ds = Sequel::Dataset.new(nil).from(:items)
end
specify "should accept strings" do
@ds.update_sql("a = b").should == "UPDATE items SET a = b"
end
specify "should accept hash with string keys" do
@ds.update_sql('c' => 'd').should == "UPDATE items SET c = 'd'"
end
specify "should accept array subscript references" do
@ds.update_sql((:day|1) => 'd').should == "UPDATE items SET day[1] = 'd'"
end
end
class DummyMummyDataset < Sequel::Dataset
def first
raise if @opts[:from] == [:a]
true
end
end
class DummyMummyDatabase < Sequel::Database
attr_reader :sqls
def execute(sql)
@sqls ||= []
@sqls << sql
end
def transaction; yield; end
def dataset
DummyMummyDataset.new(self)
end
end
context "Dataset#table_exists?" do
setup do
@db = DummyMummyDatabase.new
@db.stub!(:tables).and_return([:a, :b])
@db2 = DummyMummyDatabase.new
end
specify "should use Database#tables if available" do
@db[:a].table_exists?.should be_true
@db[:b].table_exists?.should be_true
@db[:c].table_exists?.should be_false
end
specify "should otherwise try to select the first record from the table's dataset" do
@db2[:a].table_exists?.should be_false
@db2[:b].table_exists?.should be_true
end
specify "should raise Sequel::Error if dataset references more than one table" do
proc {@db.from(:a, :b).table_exists?}.should raise_error(Sequel::Error)
end
specify "should raise Sequel::Error if dataset is from a subquery" do
proc {@db.from(@db[:a]).table_exists?}.should raise_error(Sequel::Error)
end
specify "should raise Sequel::Error if dataset has fixed sql" do
proc {@db['select * from blah'].table_exists?}.should raise_error(Sequel::Error)
end
end
|
#: http://stackoverflow.com/questions/3108395/serve-current-directory-from-command-line
require 'webrick'
include WEBrick
s = HTTPServer.new(:Port => 9090, :DocumentRoot => Dir::pwd)
trap("INT"){ s.shutdown }
s.start
make serve.rb an executable
#!/usr/bin/ruby
#: http://stackoverflow.com/questions/3108395/serve-current-directory-from-command-line
require 'webrick'
include WEBrick
s = HTTPServer.new(:Port => 9090, :DocumentRoot => Dir::pwd)
trap("INT"){ s.shutdown }
s.start
|
# Title: Jekyll Img Tag
# Authors: Rob Wierzbowski : @robwierzbowski
#
# Description: Easy dynamic images for Jekyll.
#
# Download: https://github.com/robwierzbowski/jekyll-img-tag
# Documentation: https://github.com/robwierzbowski/jekyll-img-tag/readme.md
# Issues: https://github.com/robwierzbowski/jekyll-img-tag/issues
#
# Syntax: {% img [preset or dimensions] path/to/img.jpg [attr="value"] %}
# Example: {% img poster.jpg alt="The strange case of Dr. Jekyll" %}
# {% img gallery poster.jpg alt="The strange case of Dr. Jekyll" class="gal-img" data-selected %}
# {% img 350xAUTO poster.jpg alt="The strange case of Dr. Jekyll" class="gal-img" data-selected %}
#
# See the documentation for full configuration and usage instructions.
require 'fileutils'
require 'pathname'
require 'digest/md5'
require 'mini_magick'
module Jekyll
class Img < Liquid::Tag
def initialize(tag_name, markup, tokens)
tag = /^(?:(?<preset>[^\s.:\/]+)\s+)?(?<image_src>[^\s]+\.[a-zA-Z0-9]{3,4})\s*(?<source_src>(?:(source_[^\s.:\/]+:\s+[^\s]+\.[a-zA-Z0-9]{3,4})\s*)+)?(?<html_attr>[\s\S]+)?$/.match(markup)
raise "Picture Tag can't read this tag. Try {% picture [preset] path/to/img.jpg [source_key: path/to/alt-img.jpg] [attr=\"value\"] %}." unless tag
@preset = tag[:preset] || 'default'
@image_src = tag[:image_src]
@source_src = if tag[:source_src]
Hash[ *tag[:source_src].gsub(/:/, '').split ]
else
{}
end
@html_attr = if tag[:html_attr]
Hash[ *tag[:html_attr].scan(/(?<attr>[^\s="]+)(?:="(?<value>[^"]+)")?\s?/).flatten ]
else
{}
end
super
end
def render(context)
# Gather settings
site = context.registers[:site]
settings = site.config['picture']
markup = settings['markup'] || 'picturefill'
image_source = settings['source_path'] || '.'
image_dest = settings['output_path'] || File.join(image_source, 'generated')
# Prevent Jekyll from erasing our copied files
site.config['keep_files'] << image_dest unless site.config['keep_files'].include?(image_dest)
# Deep copy preset to sources for single instance manipulation
sources = Marshal.load(Marshal.dump(settings['presets'][@preset]))
# Process html attributes
html_attr = if sources['attr']
sources.delete('attr').merge!(@html_attr)
else
@html_attr
end
if markup == 'picturefill'
html_attr['data-picture'] = nil
html_attr['data-alt'] = html_attr.delete('alt')
end
html_attr_string = ''
html_attr.each { |key, value|
if value && value != 'nil'
html_attr_string += "#{key}=\"#{value}\" "
else
html_attr_string += "#{key} "
end
}
# Prepare ppi variables
ppi = if sources['ppi'] then sources.delete('ppi').sort.reverse else nil end
ppi_sources = {}
# Store source keys in an array for ordering the sources object
source_keys = sources.keys
# Raise some exceptions before we start expensive processing
raise "Picture Tag can't find this preset. Check picture: presets: #{@preset} in _config.yml for a list of presets." unless settings['presets'][@preset]
raise "Picture Tag can't find this preset source. Check picture: presets: #{@preset} in _config.yml for a list of sources." unless (@source_src.keys - source_keys).empty?
# Process sources
# Add image paths for each source
sources.each_key { |key|
sources[key][:src] = @source_src[key] || @image_src
}
# Construct ppi sources
# Generates -webkit-device-ratio and resolution: dpi media value for cross browser support
# Reference: http://www.brettjankord.com/2012/11/28/cross-browser-retinahigh-resolution-media-queries/
if ppi
sources.each { |key, value|
ppi.each { |p|
if p != 1
ppi_key = "#{key}-x#{p}"
ppi_sources[ppi_key] = {
'width' => if value['width'] then (value['width'].to_f * p).round else nil end,
'height' => if value['height'] then (value['height'].to_f * p).round else nil end,
'media' => if value['media']
"#{value['media']} and (-webkit-min-device-pixel-ratio: #{p}), #{value['media']} and (min-resolution: #{(p * 96).round}dpi)"
else
"(-webkit-min-device-pixel-ratio: #{p}), (min-resolution: #{(p * 96).to_i}dpi)"
end,
:src => value[:src]
}
# Add ppi_key to the source keys order
source_keys.insert(source_keys.index(key), ppi_key)
end
}
}
sources.merge!(ppi_sources)
end
# Generate resized images
sources.each { |key, source|
sources[key][:generated_src] = generate_image(source, site.source, site.dest, image_source, image_dest)
}
# Construct and return tag
if settings['markup'] == 'picturefill'
source_tags = ''
# Picturefill uses reverse source order
# Reference: https://github.com/scottjehl/picturefill/issues/79
source_keys.reverse.each { |source|
media = " data-media=\"#{sources[source]['media']}\"" unless source == 'source_default'
source_tags += "<span data-src=\"#{sources[source][:generated_src]}\"#{media}></span>\n"
}
# Note: we can't indent html output because markdown parsers will turn 4 spaces into code blocks
picture_tag = "<span #{html_attr_string}>\n"\
"#{source_tags}"\
"<noscript>\n"\
"<img src=\"#{sources['source_default'][:generated_src]}\" alt=\"#{html_attr['data-alt']}\">\n"\
"</noscript>\n"\
"</span>\n"
elsif settings['markup'] == 'picture'
source_tags = ''
source_keys.each { |source|
media = " media=\"#{sources[source]['media']}\"" unless source == 'source_default'
source_tags += "<source src=\"#{sources[source][:generated_src]}\"#{media}>\n"
}
# Note: we can't indent html output because markdown parsers will turn 4 spaces into code blocks
picture_tag = "<picture #{html_attr_string}>\n"\
"#{source_tags}"\
"<p>#{html_attr['alt']}</p>\n"\
"</picture>\n"
end
# Return the markup!
picture_tag
end
def generate_image(source, site_source, site_dest, image_source, image_dest)
raise "Sources must have at least one of width and height in the _config.yml." unless source['width'] || source['height']
src_image = MiniMagick::Image.open(File.join(site_source, image_source, source[:src]))
src_digest = Digest::MD5.hexdigest(src_image.to_blob).slice!(0..5)
src_width = src_image[:width].to_f
src_height = src_image[:height].to_f
src_ratio = src_width/src_height
src_dir = File.dirname(source[:src])
ext = File.extname(source[:src])
src_name = File.basename(source[:src], ext)
gen_width = if source['width'] then source['width'].to_f else src_ratio * source['height'].to_f end
gen_height = if source['height'] then source['height'].to_f else source['width'].to_f / src_ratio end
gen_ratio = gen_width/gen_height
# Don't allow upscaling. If the image is smaller than the requested dimensions, recalculate.
if src_image[:width] < gen_width || src_image[:height] < gen_height
undersized = true
gen_width = if gen_ratio < src_ratio then src_height * gen_ratio else src_width end
gen_height = if gen_ratio > src_ratio then src_width/gen_ratio else src_height end
end
gen_name = "#{src_name}-#{gen_width.round}x#{gen_height.round}-#{src_digest}" + ext
gen_dest_path = File.join(site_dest, image_dest, src_dir)
gen_jekyll_path = Pathname.new(File.join('/', image_dest, src_dir, gen_name)).cleanpath
# Generate resized files
unless File.exists?(File.join(gen_dest_path, gen_name))
warn "Warning:".yellow + " #{source[:src]} is smaller than the requested output file. It will be resized without upscaling." unless not undersized
# If the destination directory doesn't exist, create it
FileUtils.mkdir_p(gen_dest_path) unless File.exist?(gen_dest_path)
# Let people know their images are being generated
puts "Generating #{gen_name}"
# Scale and crop
src_image.combine_options do |i|
i.resize "#{gen_width.round}x#{gen_height.round}^"
i.gravity "center"
i.crop "#{gen_width.round}x#{gen_height.round}+0+0"
end
src_image.write File.join(gen_dest_path, gen_name)
end
# Return path for html
gen_jekyll_path
end
end
end
Liquid::Template.register_tag('img', Jekyll::Img)
Add image regex, trim picture specific code
# Title: Jekyll Img Tag
# Authors: Rob Wierzbowski : @robwierzbowski
#
# Description: Easy dynamic images for Jekyll.
#
# Download: https://github.com/robwierzbowski/jekyll-img-tag
# Documentation: https://github.com/robwierzbowski/jekyll-img-tag/readme.md
# Issues: https://github.com/robwierzbowski/jekyll-img-tag/issues
#
# Syntax: {% img [preset or WxH] path/to/img.jpg [attr="value"] %}
# Example: {% img poster.jpg alt="The strange case of Dr. Jekyll" %}
# {% img gallery poster.jpg alt="The strange case of Dr. Jekyll" class="gal-img" data-selected %}
# {% img 350xAUTO poster.jpg alt="The strange case of Dr. Jekyll" class="gal-img" data-selected %}
#
# See the documentation for full configuration and usage instructions.
require 'fileutils'
require 'pathname'
require 'digest/md5'
require 'mini_magick'
module Jekyll
class Img < Liquid::Tag
def initialize(tag_name, markup, tokens)
tag = /^(?:(?<preset>[^\s.:\/]+)\s+)?(?<image_src>[^\s]+\.[a-zA-Z0-9]{3,4})\s*(?<html_attr>[\s\S]+)?$/.match(markup)
raise "Img Tag can't read this tag. Try {% img [preset or WxH] path/to/img.jpg [attr=\"value\"] %}." unless tag
@preset = if dim = /^(?<width>\d+|auto)(?:x)(?<height>\d+|auto)$/i.match(tag[:preset])
Hash[ :width, dim[:width], :height, dim[:height] ]
else
tag[:preset] || 'default'
end
@image_src = tag[:image_src]
@html_attr = if tag[:html_attr]
Hash[ *tag[:html_attr].scan(/(?<attr>[^\s="]+)(?:="(?<value>[^"]+)")?\s?/).flatten ]
else
{}
end
super
end
def render(context)
# Gather settings
site = context.registers[:site]
settings = site.config['img']
image_source = settings['source_path'] || '.'
image_dest = settings['output_path'] || File.join(image_source, 'generated')
# Prevent Jekyll from erasing our copied files
site.config['keep_files'] << image_dest unless site.config['keep_files'].include?(image_dest)
### RWRW Add preset / dimensions handling here
# Deep copy preset for single instance manipulation
preset = Marshal.load(Marshal.dump(settings['presets'][@preset]))
# Process html attributes
html_attr = if preset['attr']
preset.delete('attr').merge!(@html_attr)
else
@html_attr
end
html_attr_string = ''
html_attr.each { |key, value|
if value && value != 'nil'
html_attr_string += "#{key}=\"#{value}\" "
else
html_attr_string += "#{key} "
end
}
# Raise some exceptions before we start expensive processing
raise "Img Tag can't find this preset. Check img: presets: #{@preset} in _config.yml for a list of presets." unless settings['presets'][@preset]
# Process sources
# Generate resized images
sources.each { |key, source|
sources[key][:generated_src] = generate_image(source, site.source, site.dest, image_source, image_dest)
}
# Construct and return tag
if settings['markup'] == 'picturefill'
source_tags = ''
# Picturefill uses reverse source order
# Reference: https://github.com/scottjehl/picturefill/issues/79
source_keys.reverse.each { |source|
media = " data-media=\"#{sources[source]['media']}\"" unless source == 'source_default'
source_tags += "<span data-src=\"#{sources[source][:generated_src]}\"#{media}></span>\n"
}
# Note: we can't indent html output because markdown parsers will turn 4 spaces into code blocks
picture_tag = "<span #{html_attr_string}>\n"\
"#{source_tags}"\
"<noscript>\n"\
"<img src=\"#{sources['source_default'][:generated_src]}\" alt=\"#{html_attr['data-alt']}\">\n"\
"</noscript>\n"\
"</span>\n"
elsif settings['markup'] == 'picture'
source_tags = ''
source_keys.each { |source|
media = " media=\"#{sources[source]['media']}\"" unless source == 'source_default'
source_tags += "<source src=\"#{sources[source][:generated_src]}\"#{media}>\n"
}
# Note: we can't indent html output because markdown parsers will turn 4 spaces into code blocks
picture_tag = "<picture #{html_attr_string}>\n"\
"#{source_tags}"\
"<p>#{html_attr['alt']}</p>\n"\
"</picture>\n"
end
# Return the markup!
picture_tag
end
def generate_image(source, site_source, site_dest, image_source, image_dest)
raise "Sources must have at least one of width and height in the _config.yml." unless source['width'] || source['height']
src_image = MiniMagick::Image.open(File.join(site_source, image_source, source[:src]))
src_digest = Digest::MD5.hexdigest(src_image.to_blob).slice!(0..5)
src_width = src_image[:width].to_f
src_height = src_image[:height].to_f
src_ratio = src_width/src_height
src_dir = File.dirname(source[:src])
ext = File.extname(source[:src])
src_name = File.basename(source[:src], ext)
gen_width = if source['width'] then source['width'].to_f else src_ratio * source['height'].to_f end
gen_height = if source['height'] then source['height'].to_f else source['width'].to_f / src_ratio end
gen_ratio = gen_width/gen_height
# Don't allow upscaling. If the image is smaller than the requested dimensions, recalculate.
if src_image[:width] < gen_width || src_image[:height] < gen_height
undersized = true
gen_width = if gen_ratio < src_ratio then src_height * gen_ratio else src_width end
gen_height = if gen_ratio > src_ratio then src_width/gen_ratio else src_height end
end
gen_name = "#{src_name}-#{gen_width.round}x#{gen_height.round}-#{src_digest}" + ext
gen_dest_path = File.join(site_dest, image_dest, src_dir)
gen_jekyll_path = Pathname.new(File.join('/', image_dest, src_dir, gen_name)).cleanpath
# Generate resized files
unless File.exists?(File.join(gen_dest_path, gen_name))
warn "Warning:".yellow + " #{source[:src]} is smaller than the requested output file. It will be resized without upscaling." unless not undersized
# If the destination directory doesn't exist, create it
FileUtils.mkdir_p(gen_dest_path) unless File.exist?(gen_dest_path)
# Let people know their images are being generated
puts "Generating #{gen_name}"
# Scale and crop
src_image.combine_options do |i|
i.resize "#{gen_width.round}x#{gen_height.round}^"
i.gravity "center"
i.crop "#{gen_width.round}x#{gen_height.round}+0+0"
end
src_image.write File.join(gen_dest_path, gen_name)
end
# Return path for html
gen_jekyll_path
end
end
end
Liquid::Template.register_tag('img', Jekyll::Img)
|
# encoding: utf-8
# This file is part of the K5 bot project.
# See files README.md and COPYING for copyright and licensing information.
# Translate plugin
require_relative '../../IRCPlugin'
require 'rubygems'
require 'nokogiri'
require 'net/http'
class Translate < IRCPlugin
def on_privmsg(msg)
return unless msg.tail
if msg.botcommand == :t
text = msg.tail
t = containsJapanese?(text) ? (translate text, 'jaen') : (translate text, 'enja')
msg.reply t if t
else
pairs = {
:je => 'jaen',
:ej => 'enja',
:cj => 'zhja',
:jc => 'jazh',
:twj => 'twja',
:jtw => 'jatw',
:kj => 'koja',
:jk => 'jako'
}
if lp = pairs[msg.botcommand]
t = translate msg.tail, lp
msg.reply t if t
end
end
false
end
def describe
"Uses the translation engine from www.ocn.ne.jp to translate between Japanese and English."
end
def commands
{
:t => "determines if specified text is Japanese or not, then translates appropriately J>E or E>J",
:je => "translates specified text from Japanese to English",
:ej => "translates specified text from English to Japanese",
:cj => "translates specified text from Simplified Chinese to Japanese",
:jc => "translates specified text from Japanese to Simplified Chinese",
:twj => "translates specified text from Traditional Chinese to Japanese",
:jtw => "translates specified text from Japanese to Traditional Chinese",
:kj => "translates specified text from Korean to Japanese",
:jk => "translates specified text from Japanese to Korean"
}
end
def ocnTranslate(text, lp)
result = Net::HTTP.post_form(
URI.parse('http://cgi01.ocn.ne.jp/cgi-bin/translation/index.cgi'),
{'sourceText' => text, 'langpair' => lp})
result.body.force_encoding 'utf-8'
return if [Net::HTTPSuccess, Net::HTTPRedirection].include? result
doc = Nokogiri::HTML result.body
doc.css('textarea[name = "responseText"]').text.chomp
rescue => e
puts "Cannot translate: #{e}\n\t#{e.backtrace.join("\n\t")}"
false
end
alias translate ocnTranslate
def containsJapanese?(text)
# 3040-309F hiragana
# 30A0-30FF katakana
# 4E00-9FC2 kanji
# FF61-FF9D half-width katakana
# 31F0-31FF katakana phonetic extensions
# 3000-303F CJK punctuation
#
# Source: http://www.unicode.org/charts/
!!(text =~ /[\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FC2\uFF61-\uFF9D\u31F0-\u31FF\u3000-\u303F]/)
end
end
Further simplify
# encoding: utf-8
# This file is part of the K5 bot project.
# See files README.md and COPYING for copyright and licensing information.
# Translate plugin
require_relative '../../IRCPlugin'
require 'rubygems'
require 'nokogiri'
require 'net/http'
class Translate < IRCPlugin
TranslationPairs = {
:je => 'jaen',
:ej => 'enja',
:cj => 'zhja',
:jc => 'jazh',
:twj => 'twja',
:jtw => 'jatw',
:kj => 'koja',
:jk => 'jako'
}
def on_privmsg(msg)
return unless msg.tail
if msg.botcommand == :t
text = msg.tail
t = containsJapanese?(text) ? (translate text, 'jaen') : (translate text, 'enja')
msg.reply t if t
else
if lp = TranslationPairs[msg.botcommand]
t = translate msg.tail, lp
msg.reply t if t
end
end
false
end
def describe
"Uses the translation engine from www.ocn.ne.jp to translate between Japanese and English."
end
def commands
{
:t => "determines if specified text is Japanese or not, then translates appropriately J>E or E>J",
:je => "translates specified text from Japanese to English",
:ej => "translates specified text from English to Japanese",
:cj => "translates specified text from Simplified Chinese to Japanese",
:jc => "translates specified text from Japanese to Simplified Chinese",
:twj => "translates specified text from Traditional Chinese to Japanese",
:jtw => "translates specified text from Japanese to Traditional Chinese",
:kj => "translates specified text from Korean to Japanese",
:jk => "translates specified text from Japanese to Korean"
}
end
def ocnTranslate(text, lp)
result = Net::HTTP.post_form(
URI.parse('http://cgi01.ocn.ne.jp/cgi-bin/translation/index.cgi'),
{'sourceText' => text, 'langpair' => lp})
result.body.force_encoding 'utf-8'
return if [Net::HTTPSuccess, Net::HTTPRedirection].include? result
doc = Nokogiri::HTML result.body
doc.css('textarea[name = "responseText"]').text.chomp
rescue => e
puts "Cannot translate: #{e}\n\t#{e.backtrace.join("\n\t")}"
false
end
alias translate ocnTranslate
def containsJapanese?(text)
# 3040-309F hiragana
# 30A0-30FF katakana
# 4E00-9FC2 kanji
# FF61-FF9D half-width katakana
# 31F0-31FF katakana phonetic extensions
# 3000-303F CJK punctuation
#
# Source: http://www.unicode.org/charts/
!!(text =~ /[\u3040-\u309F\u30A0-\u30FF\u4E00-\u9FC2\uFF61-\uFF9D\u31F0-\u31FF\u3000-\u303F]/)
end
end
|
Gem::Specification.new do |s|
s.name = 'curator'
s.version = '0.11.2'
s.summary = "Model and repository framework"
s.description = "Model and repository framework"
s.authors = ["Braintree"]
s.email = 'code@getbraintree.com'
s.homepage = "http://github.com/braintree/curator"
s.licenses = ["MIT"]
s.files = Dir.glob("lib/**/*.rb")
s.add_dependency('activesupport', '>= 3.0.0')
s.add_dependency('activemodel', '>= 3.0.0')
s.add_dependency('json')
s.add_dependency('riak-client', '~> 2.3.2')
end
Bump gem version.
Gem::Specification.new do |s|
s.name = 'curator'
s.version = '0.12.0'
s.summary = "Model and repository framework"
s.description = "Model and repository framework"
s.authors = ["Braintree"]
s.email = 'code@getbraintree.com'
s.homepage = "http://github.com/braintree/curator"
s.licenses = ["MIT"]
s.files = Dir.glob("lib/**/*.rb")
s.add_dependency('activesupport', '>= 3.0.0')
s.add_dependency('activemodel', '>= 3.0.0')
s.add_dependency('json')
s.add_dependency('riak-client', '~> 2.3.2')
end
|
ヤングアニマル追加
# -*- coding: utf-8 -*-
require 'rss_conv'
require 'rubygems'
require 'mechanize'
require 'digest/md5'
class YoungAnimalDensi < RssConv::Scraper
URL = "http://younganimal-densi.com/index"
TITLE = "ヤングアニマル Densi"
DESCRIPTION = "#{TITLE} 更新情報"
attr_reader :title, :link, :description
def initialize
@title = TITLE
@link = URL
@description = DESCRIPTION
end
def scrape
agent = Mechanize.new
page = agent.get URL
latest = page.search('#latest').first
return nil if latest.nil?
date = page.search('.update_date').first
return nil if date.nil?
digests = []
digests << Digest::MD5.hexdigest(latest.content)
digests << Digest::MD5.hexdigest(date.content)
hash = "#%s%s" % digests
[{:title => date.content, :link => URL + hash,
:description => latest.to_html}]
end
p new.scrape if $0 == __FILE__
end
|
require_relative '../../src/all_avatars_names'
require_relative 'test_base'
class MultiOSTest < TestBase
def self.hex_prefix
'3759D'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def self.os_test(hex_suffix, *lines, &test_block)
alpine_lines = ['[Alpine]'] + lines
test(hex_suffix+'0', *alpine_lines, &test_block)
ubuntu_lines = ['[Ubuntu]'] + lines
test(hex_suffix+'1', *ubuntu_lines, &test_block)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
os_test 'C3A',
'invalid avatar_name raises' do
in_kata { assert_invalid_avatar_name_raises }
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
os_test '8A3',
'in-container tests' do
in_kata_as(salmon) {
assert_run_is_initially_red
assert_pid_1_is_running_init_process
assert_time_stamp_microseconds_granularity
assert_env_vars_exist
assert_avatar_users_exist
assert_cyber_dojo_group_exists
assert_avatar_has_home
assert_avatar_sandbox_properties
assert_starting_files_properties
assert_ulimits
assert_baseline_speed_test
# these two create new files
assert_files_can_be_in_sub_dirs_of_sandbox
assert_files_can_be_in_sub_sub_dirs_of_sandbox
}
end
private
def assert_invalid_avatar_name_raises
error = assert_raises(ArgumentError) {
run_cyber_dojo_sh({ avatar_name:'polaroid' })
}
assert_equal 'avatar_name:invalid', error.message
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_run_is_initially_red
run_cyber_dojo_sh
assert_colour 'red'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_pid_1_is_running_init_process
cmd = 'cat /proc/1/cmdline'
proc1 = assert_cyber_dojo_sh(cmd).strip
# odd, but there _is_ an embedded nul-character
expected = '/dev/init' + 0.chr + '--'
assert proc1.start_with?(expected), proc1
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_env_vars_exist
cmd = 'printenv CYBER_DOJO_KATA_ID'
env_kata_id = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_AVATAR_NAME'
env_avatar_name = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_SANDBOX'
env_sandbox_dir = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_RUNNER'
env_runner = assert_cyber_dojo_sh(cmd).strip
assert_equal kata_id, env_kata_id
assert_equal avatar_name, env_avatar_name
assert_equal sandbox_dir, env_sandbox_dir
assert_equal 'stateless', env_runner
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
include AllAvatarsNames
def assert_avatar_users_exist
etc_passwd = assert_cyber_dojo_sh 'cat /etc/passwd'
all_avatars_names.each do |name|
assert etc_passwd.include?(user_id.to_s),
"#{name}:#{user_id}:#{etc_passwd}:#{image_name}"
end
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_cyber_dojo_group_exists
stdout = assert_cyber_dojo_sh("getent group #{group}").strip
entries = stdout.split(':') # cyber-dojo:x:5000
assert_equal group, entries[0], stdout
assert_equal group_id, entries[2].to_i, stdout
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_avatar_has_home
home = assert_cyber_dojo_sh('printenv HOME').strip
assert_equal home_dir, home
cd_home_pwd = assert_cyber_dojo_sh('cd ~ && pwd').strip
assert_equal home_dir, cd_home_pwd
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_avatar_sandbox_properties
assert_cyber_dojo_sh "[ -d #{sandbox_dir} ]" # sandbox exists
ls = assert_cyber_dojo_sh "ls -A #{sandbox_dir}"
refute_equal '', ls # sandbox is not empty
stat_uid = assert_cyber_dojo_sh("stat -c '%u' #{sandbox_dir}").strip.to_i
stat_gid = assert_cyber_dojo_sh("stat -c '%g' #{sandbox_dir}").strip.to_i
stat_perms = assert_cyber_dojo_sh("stat -c '%A' #{sandbox_dir}").strip
assert_equal user_id, stat_uid, 'stat <user>'
assert_equal group_id, stat_gid, 'stat <gid>'
assert_equal 'drwxr-xr-x', stat_perms, 'stat <permissions>'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_starting_files_properties
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => ls_cmd }
})
assert_colour 'amber' # doing an ls
assert_stderr ''
ls_stdout = stdout
ls_files = ls_parse(ls_stdout)
assert_equal starting_files.keys.sort, ls_files.keys.sort
starting_files.each do |filename,content|
if filename == 'cyber-dojo.sh'
content = ls_cmd
end
assert_equal_atts(filename, '-rw-r--r--', user_id, group, content.length, ls_files)
end
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_ulimits
etc_issue = assert_cyber_dojo_sh('cat /etc/issue')
lines = assert_cyber_dojo_sh('ulimit -a').split("\n")
assert_equal 0, ulimit(lines, :core_size, etc_issue)
assert_equal 128, ulimit(lines, :file_locks, etc_issue)
assert_equal 128, ulimit(lines, :no_files, etc_issue)
assert_equal 128, ulimit(lines, :processes, etc_issue)
kb = 1024
mb = 1024 * kb
gb = 1024 * mb
expected_max_data_size = 4 * gb / kb
expected_max_file_size = 16 * mb / (block_size = 512)
expected_max_stack_size = 8 * mb / kb
actual_max_data_size = ulimit(lines, :data_size, etc_issue)
actual_max_file_size = ulimit(lines, :file_size, etc_issue)
actual_max_stack_size = ulimit(lines, :stack_size, etc_issue)
assert_equal expected_max_data_size, actual_max_data_size
assert_equal expected_max_file_size, actual_max_file_size
assert_equal expected_max_stack_size, actual_max_stack_size
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ulimit(lines, key, etc_issue)
table = { # alpine, ubuntu
:core_size => [ '-c: core file size (blocks)', 'coredump(blocks)'],
:data_size => [ '-d: data seg size (kb)', 'data(kbytes)' ],
:file_locks => [ '-w: locks', 'locks' ],
:file_size => [ '-f: file size (blocks)', 'file(blocks)' ],
:no_files => [ '-n: file descriptors', 'nofiles' ],
:processes => [ '-p: processes', 'process' ],
:stack_size => [ '-s: stack size (kb)', 'stack(kbytes)' ],
}
row = table[key]
diagnostic = "no ulimit table entry for #{key}"
refute_nil row, diagnostic
if etc_issue.include? 'Alpine'
txt = row[0]
end
if etc_issue.include? 'Ubuntu'
txt = row[1]
end
line = lines.detect { |limit| limit.start_with? txt }
line.split[-1].to_i
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_baseline_speed_test
timings = []
(1..5).each do
started_at = Time.now
assert_cyber_dojo_sh('true')
stopped_at = Time.now
diff = Time.at(stopped_at - started_at).utc
secs = diff.strftime("%S").to_i
millisecs = diff.strftime("%L").to_i
timings << (secs * 1000 + millisecs)
end
mean = timings.reduce(0, :+) / timings.size
assert mean < max=800, "mean=#{mean}, max=#{max}"
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_files_can_be_in_sub_dirs_of_sandbox
sub_dir = 'z'
filename = 'hello.txt'
content = 'the boy stood on the burning deck'
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => "cd #{sub_dir} && #{ls_cmd}" },
new_files: { "#{sub_dir}/#{filename}" => content }
})
ls_files = ls_parse(stdout)
assert_equal_atts(filename, '-rw-r--r--', user_id, group, content.length, ls_files)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_files_can_be_in_sub_sub_dirs_of_sandbox
sub_sub_dir = 'a/b'
filename = 'goodbye.txt'
content = 'goodbye cruel world'
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => "cd #{sub_sub_dir} && #{ls_cmd}" },
new_files: { "#{sub_sub_dir}/#{filename}" => content }
})
ls_files = ls_parse(stdout)
assert_equal_atts(filename, '-rw-r--r--', user_id, group, content.length, ls_files)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_time_stamp_microseconds_granularity
# On _default_ Alpine date-time file-stamps are to the second granularity.
# In other words, the microseconds value is always '000000000'.
# Make sure the tar-piped files have fixed this.
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => ls_cmd }
})
count = 0
ls_parse(stdout).each do |filename,atts|
count += 1
refute_nil atts, filename
stamp = atts[:time_stamp] # eg '07:03:14.835233538'
microsecs = stamp.split((/[\:\.]/))[-1]
assert_equal 9, microsecs.length
refute_equal '0'*9, microsecs
end
assert_equal 5, count
end
private
def assert_equal_atts(filename, permissions, user, group, size, ls_files)
atts = ls_files[filename]
refute_nil atts, filename
diagnostic = { filename => atts }
assert_equal user, atts[:user ], diagnostic
assert_equal group, atts[:group], diagnostic
assert_equal size, atts[:size ], diagnostic
assert_equal permissions, atts[:permissions], diagnostic
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ls_parse(ls_stdout)
Hash[ls_stdout.split("\n").collect { |line|
attr = line.split
[attr[0], { # filename
permissions: attr[1],
user: attr[2].to_i,
group: attr[3],
size: attr[4].to_i,
time_stamp: attr[6],
}]
}]
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ls_cmd;
# Works on Ubuntu and Alpine
'stat -c "%n %A %u %G %s %y" *'
# hiker.h -rw-r--r-- 40045 cyber-dojo 136 2016-06-05 07:03:14.539952547
# | | | | | | |
# filename permissions user group size date time
# 0 1 2 3 4 5 6
# Stat
# %z == time of last status change
# %y == time of last data modification <<=====
# %x == time of last access
# %w == time of file birth
end
end
test refactoring
require_relative '../../src/all_avatars_names'
require_relative 'test_base'
class MultiOSTest < TestBase
def self.hex_prefix
'3759D'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def self.os_test(hex_suffix, *lines, &test_block)
alpine_lines = ['[Alpine]'] + lines
test(hex_suffix+'0', *alpine_lines, &test_block)
ubuntu_lines = ['[Ubuntu]'] + lines
test(hex_suffix+'1', *ubuntu_lines, &test_block)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
os_test 'C3A',
'invalid avatar_name raises' do
in_kata { assert_invalid_avatar_name_raises }
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
os_test '8A3',
'in-container tests' do
in_kata_as(salmon) {
assert_run_is_initially_red
assert_pid_1_is_running_init_process
assert_time_stamp_microseconds_granularity
assert_env_vars_exist
assert_avatar_users_exist
assert_cyber_dojo_group_exists
assert_avatar_has_home
assert_avatar_sandbox_properties
assert_starting_files_properties
assert_ulimits
assert_baseline_speed_test
# these two create new files
assert_files_can_be_in_sub_dirs_of_sandbox
assert_files_can_be_in_sub_sub_dirs_of_sandbox
}
end
private
def assert_invalid_avatar_name_raises
error = assert_raises(ArgumentError) {
run_cyber_dojo_sh({ avatar_name:'polaroid' })
}
assert_equal 'avatar_name:invalid', error.message
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_run_is_initially_red
run_cyber_dojo_sh
assert_colour 'red'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_pid_1_is_running_init_process
cmd = 'cat /proc/1/cmdline'
proc1 = assert_cyber_dojo_sh(cmd).strip
# odd, but there _is_ an embedded nul-character
expected = '/dev/init' + 0.chr + '--'
assert proc1.start_with?(expected), proc1
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_env_vars_exist
cmd = 'printenv CYBER_DOJO_KATA_ID'
env_kata_id = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_AVATAR_NAME'
env_avatar_name = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_SANDBOX'
env_sandbox_dir = assert_cyber_dojo_sh(cmd).strip
cmd = 'printenv CYBER_DOJO_RUNNER'
env_runner = assert_cyber_dojo_sh(cmd).strip
assert_equal kata_id, env_kata_id
assert_equal avatar_name, env_avatar_name
assert_equal sandbox_dir, env_sandbox_dir
assert_equal 'stateless', env_runner
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
include AllAvatarsNames
def assert_avatar_users_exist
etc_passwd = assert_cyber_dojo_sh 'cat /etc/passwd'
all_avatars_names.each do |name|
assert etc_passwd.include?(user_id.to_s),
"#{name}:#{user_id}:#{etc_passwd}:#{image_name}"
end
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_cyber_dojo_group_exists
stdout = assert_cyber_dojo_sh("getent group #{group}").strip
entries = stdout.split(':') # cyber-dojo:x:5000
assert_equal group, entries[0], stdout
assert_equal group_id, entries[2].to_i, stdout
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_avatar_has_home
home = assert_cyber_dojo_sh('printenv HOME').strip
assert_equal home_dir, home
cd_home_pwd = assert_cyber_dojo_sh('cd ~ && pwd').strip
assert_equal home_dir, cd_home_pwd
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_avatar_sandbox_properties
assert_cyber_dojo_sh "[ -d #{sandbox_dir} ]" # sandbox exists
ls = assert_cyber_dojo_sh "ls -A #{sandbox_dir}"
refute_equal '', ls # sandbox is not empty
stat_uid = assert_cyber_dojo_sh("stat -c '%u' #{sandbox_dir}").strip.to_i
stat_gid = assert_cyber_dojo_sh("stat -c '%g' #{sandbox_dir}").strip.to_i
stat_perms = assert_cyber_dojo_sh("stat -c '%A' #{sandbox_dir}").strip
assert_equal user_id, stat_uid, 'stat <user>'
assert_equal group_id, stat_gid, 'stat <gid>'
assert_equal 'drwxr-xr-x', stat_perms, 'stat <permissions>'
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_starting_files_properties
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => ls_cmd }
})
assert_colour 'amber' # doing an ls
assert_stderr ''
ls_stdout = stdout
ls_files = ls_parse(ls_stdout)
assert_equal starting_files.keys.sort, ls_files.keys.sort
starting_files.each do |filename,content|
if filename == 'cyber-dojo.sh'
content = ls_cmd
end
assert_stats(filename, '-rw-r--r--', content.length)
end
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_ulimits
etc_issue = assert_cyber_dojo_sh('cat /etc/issue')
lines = assert_cyber_dojo_sh('ulimit -a').split("\n")
assert_equal 0, ulimit(lines, :core_size, etc_issue)
assert_equal 128, ulimit(lines, :file_locks, etc_issue)
assert_equal 128, ulimit(lines, :no_files, etc_issue)
assert_equal 128, ulimit(lines, :processes, etc_issue)
kb = 1024
mb = 1024 * kb
gb = 1024 * mb
expected_max_data_size = 4 * gb / kb
expected_max_file_size = 16 * mb / (block_size = 512)
expected_max_stack_size = 8 * mb / kb
actual_max_data_size = ulimit(lines, :data_size, etc_issue)
actual_max_file_size = ulimit(lines, :file_size, etc_issue)
actual_max_stack_size = ulimit(lines, :stack_size, etc_issue)
assert_equal expected_max_data_size, actual_max_data_size
assert_equal expected_max_file_size, actual_max_file_size
assert_equal expected_max_stack_size, actual_max_stack_size
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ulimit(lines, key, etc_issue)
table = { # alpine, ubuntu
:core_size => [ '-c: core file size (blocks)', 'coredump(blocks)'],
:data_size => [ '-d: data seg size (kb)', 'data(kbytes)' ],
:file_locks => [ '-w: locks', 'locks' ],
:file_size => [ '-f: file size (blocks)', 'file(blocks)' ],
:no_files => [ '-n: file descriptors', 'nofiles' ],
:processes => [ '-p: processes', 'process' ],
:stack_size => [ '-s: stack size (kb)', 'stack(kbytes)' ],
}
row = table[key]
diagnostic = "no ulimit table entry for #{key}"
refute_nil row, diagnostic
if etc_issue.include? 'Alpine'
txt = row[0]
end
if etc_issue.include? 'Ubuntu'
txt = row[1]
end
line = lines.detect { |limit| limit.start_with? txt }
line.split[-1].to_i
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_baseline_speed_test
timings = []
(1..5).each do
started_at = Time.now
assert_cyber_dojo_sh('true')
stopped_at = Time.now
diff = Time.at(stopped_at - started_at).utc
secs = diff.strftime("%S").to_i
millisecs = diff.strftime("%L").to_i
timings << (secs * 1000 + millisecs)
end
mean = timings.reduce(0, :+) / timings.size
assert mean < max=800, "mean=#{mean}, max=#{max}"
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_files_can_be_in_sub_dirs_of_sandbox
sub_dir = 'z'
filename = 'hello.txt'
content = 'the boy stood on the burning deck'
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => "cd #{sub_dir} && #{ls_cmd}" },
new_files: { "#{sub_dir}/#{filename}" => content }
})
assert_stats(filename, '-rw-r--r--', content.length)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_files_can_be_in_sub_sub_dirs_of_sandbox
sub_sub_dir = 'a/b'
filename = 'goodbye.txt'
content = 'goodbye cruel world'
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => "cd #{sub_sub_dir} && #{ls_cmd}" },
new_files: { "#{sub_sub_dir}/#{filename}" => content }
})
assert_stats(filename, '-rw-r--r--', content.length)
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def assert_time_stamp_microseconds_granularity
# On _default_ Alpine date-time file-stamps are to
# the second granularity. In other words, the
# microseconds value is always '000000000'.
# Make sure the tar-piped files have fixed this.
run_cyber_dojo_sh({
changed_files: { 'cyber-dojo.sh' => ls_cmd }
})
count = 0
ls_parse(stdout).each do |filename,atts|
count += 1
refute_nil atts, filename
stamp = atts[:time_stamp] # eg '07:03:14.835233538'
microsecs = stamp.split((/[\:\.]/))[-1]
assert_equal 9, microsecs.length
refute_equal '0'*9, microsecs
end
assert_equal 5, count
end
private
def assert_stats(filename, permissions, size)
ls_files = ls_parse(stdout)
stats = ls_files[filename]
refute_nil stats, filename
diagnostic = { filename => stats }
assert_equal permissions, stats[:permissions], diagnostic
assert_equal user_id, stats[:user ], diagnostic
assert_equal group, stats[:group], diagnostic
assert_equal size, stats[:size ], diagnostic
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ls_parse(ls_stdout)
Hash[ls_stdout.split("\n").collect { |line|
attr = line.split
[attr[0], { # filename
permissions: attr[1],
user: attr[2].to_i,
group: attr[3],
size: attr[4].to_i,
time_stamp: attr[6],
}]
}]
end
# - - - - - - - - - - - - - - - - - - - - - - - - - -
def ls_cmd;
# Works on Ubuntu and Alpine
'stat -c "%n %A %u %G %s %y" *'
# hiker.h -rw-r--r-- 40045 cyber-dojo 136 2016-06-05 07:03:14.539952547
# | | | | | | |
# filename permissions user group size date time
# 0 1 2 3 4 5 6
# Stat
# %z == time of last status change
# %y == time of last data modification <<=====
# %x == time of last access
# %w == time of file birth
end
end
|
require_relative "boggle_board"
describe BoggleGame do
describe "#initialize" do
let(:valid_game) {BoggleGame.new}
it "initializes with a board" do
expect(valid_game.board).to_not be(nil)
end
it "" do
end
it "" do
end
it "" do
end
it "" do
end
it "" do
end
end
# require_relative 'boggle_board'
# describe "Boggle Board - Release 0" do
# let(:board) { BoggleBoard.new }
# it "can be shaken" do
# expect{board.shake!}.not_to raise_error
# end
# it 'has a printable representation of 4 underscores in each of 4 rows on separate lines BEFORE it is shaken' do
# expect(board.to_s).to match(/^_{4}${4}/)
# end
# it 'has a printable representation of 4 letters in each of 4 rows on separate lines AFTER it is shaken' do
# board.shake!
# expect(board.to_s).to match(/^[A-Z]{4}${4}/)
# end
# it 'has the same state every time to_s is called' do
# first_string = board.to_s
# second_string = board.to_s
# expect(first_string).to eq second_string
# end
# it 'has a different state after shake is called' do
# first_string = board.to_s
# board.shake!
# second_string = board.to_s
# expect(first_string).not_to eq second_string
# end
# it 'has 4 rows' do
# expect(board.shake!.length).to eq(4)
# end
# it 'has 4 rows with 4 letters each' do
# expect(board.shake!.all? {|row| row.length == 4}).to be_true
# end
# end
# describe "Boggle Board - Release 1" do
# pending "Write your own tests here"
# end
# describe "Boggle Board - Release 2" do
# pending "Write your own tests here"
# end
remove blank tests
require_relative "boggle_board"
describe BoggleGame do
describe "#initialize" do
let(:valid_game) {BoggleGame.new}
it "initializes with a board" do
expect(valid_game.board).to_not be(nil)
end
end
end
# require_relative 'boggle_board'
# describe "Boggle Board - Release 0" do
# let(:board) { BoggleBoard.new }
# it "can be shaken" do
# expect{board.shake!}.not_to raise_error
# end
# it 'has a printable representation of 4 underscores in each of 4 rows on separate lines BEFORE it is shaken' do
# expect(board.to_s).to match(/^_{4}${4}/)
# end
# it 'has a printable representation of 4 letters in each of 4 rows on separate lines AFTER it is shaken' do
# board.shake!
# expect(board.to_s).to match(/^[A-Z]{4}${4}/)
# end
# it 'has the same state every time to_s is called' do
# first_string = board.to_s
# second_string = board.to_s
# expect(first_string).to eq second_string
# end
# it 'has a different state after shake is called' do
# first_string = board.to_s
# board.shake!
# second_string = board.to_s
# expect(first_string).not_to eq second_string
# end
# it 'has 4 rows' do
# expect(board.shake!.length).to eq(4)
# end
# it 'has 4 rows with 4 letters each' do
# expect(board.shake!.all? {|row| row.length == 4}).to be_true
# end
# end
# describe "Boggle Board - Release 1" do
# pending "Write your own tests here"
# end
# describe "Boggle Board - Release 2" do
# pending "Write your own tests here"
# end
|
#
# Cookbook Name:: munin
# Recipe:: gatherer
#
# Copyright 2012, Cogini
#
include_recipe 'munin::default'
package 'munin' do
action :install
end
template '/etc/munin/munin.conf' do
mode 0644
source 'munin.conf.erb'
notifies :restart, 'service[munin-node]'
end
restart munin after updating munin.conf
#
# Cookbook Name:: munin
# Recipe:: gatherer
#
# Copyright 2012, Cogini
#
include_recipe 'munin::default'
package 'munin' do
action :install
end
service 'munin' do
action [:enable, :start]
end
template '/etc/munin/munin.conf' do
mode 0644
source 'munin.conf.erb'
notifies :restart, 'service[munin]'
end
|
module Devise
module Oauth2Providable
class AuthorizationsController < ApplicationController
before_filter :authenticate_user!
rescue_from Rack::OAuth2::Server::Authorize::BadRequest do |e|
@error = e
render :error, :status => e.status
end
def new
respond *authorize_endpoint.call(request.env)
end
def create
respond *authorize_endpoint(:allow_approval).call(request.env)
end
private
def respond(status, header, response)
["WWW-Authenticate"].each do |key|
headers[key] = header[key] if header[key].present?
end
if response.redirect?
redirect_to header['Location']
else
render :new
end
end
def authorize_endpoint(allow_approval = false)
Rack::OAuth2::Server::Authorize.new do |req, res|
@client = Client.find_by_identifier(req.client_id) || req.bad_request!
res.redirect_uri = @redirect_uri = req.verify_redirect_uri!(@client.redirect_uri)
if allow_approval
if params[:approve].present?
case req.response_type
when :code
authorization_code = current_user.authorization_codes.create!(:client => @client)
res.code = authorization_code.token
when :token
access_token = current_user.access_tokens.create!(:client => @client).token
bearer_token = Rack::OAuth2::AccessToken::Bearer.new(:access_token => access_token)
res.access_token = bearer_token
res.uid = current_user.id
end
res.approve!
else
req.access_denied!
end
else
@response_type = req.response_type
end
end
end
end
end
end
remove error prone line
module Devise
module Oauth2Providable
class AuthorizationsController < ApplicationController
before_filter :authenticate_user!
rescue_from Rack::OAuth2::Server::Authorize::BadRequest do |e|
@error = e
render :error, :status => e.status
end
def new
respond *authorize_endpoint.call(request.env)
end
def create
respond *authorize_endpoint(:allow_approval).call(request.env)
end
private
def respond(status, header, response)
["WWW-Authenticate"].each do |key|
headers[key] = header[key] if header[key].present?
end
if response.redirect?
redirect_to header['Location']
else
render :new
end
end
def authorize_endpoint(allow_approval = false)
Rack::OAuth2::Server::Authorize.new do |req, res|
@client = Client.find_by_identifier(req.client_id) || req.bad_request!
res.redirect_uri = @redirect_uri = req.verify_redirect_uri!(@client.redirect_uri)
if allow_approval
if params[:approve].present?
case req.response_type
when :code
authorization_code = current_user.authorization_codes.create!(:client => @client)
res.code = authorization_code.token
when :token
access_token = current_user.access_tokens.create!(:client => @client).token
bearer_token = Rack::OAuth2::AccessToken::Bearer.new(:access_token => access_token)
res.access_token = bearer_token
# res.uid = current_user.id
end
res.approve!
else
req.access_denied!
end
else
@response_type = req.response_type
end
end
end
end
end
end
|
module Mixins
module Actions
module VmActions
module DisassociateFloatingIp
def disassociate_floating_ip_vms
assert_privileges("instance_disassociate_floating_ip")
recs = checked_or_params
@record = find_record_with_rbac(VmCloud, recs.first)
if @record.supports_disassociate_floating_ip? && @record.ext_management_system.present?
if @explorer
disassociate_floating_ip
@refresh_partial = "vm_common/disassociate_floating_ip"
else
render :update do |page|
page << javascript_prologue
page.redirect_to :controller => 'vm',
:action => 'disassociate_floating_ip',
:rec_id => @record.id,
:escape => false
end
end
else
add_flash(_("Unable to disassociate Floating IP from Instance \"%{name}\": %{details}") % {
:name => @record.name,
:details => @record.unsupported_reason(:disassociate_floating_ip)}, :error)
end
end
alias instance_disassociate_floating_ip disassociate_floating_ip_vms
def disassociate_floating_ip
assert_privileges("instance_disassociate_floating_ip")
@record ||= find_record_with_rbac(VmCloud, params[:rec_id])
drop_breadcrumb(
:name => _("Disssociate Floating IP from Instance '%{name}'") % {:name => @record.name},
:url => "/vm_cloud/disassociate_floating_ip"
) unless @explorer
@sb[:explorer] = @explorer
@in_a_form = true
@live_migrate = true
render :action => "show" unless @explorer
end
def disassociate_floating_ip_form_fields
assert_privileges("instance_disassociate_floating_ip")
@record = find_record_with_rbac(VmCloud, params[:id])
floating_ips = []
unless @record.ext_management_system.nil?
@record.floating_ips.each do |floating_ip|
floating_ips << floating_ip
end
end
render :json => {
:floating_ips => floating_ips
}
end
def disassociate_floating_ip_vm
assert_privileges("instance_disassociate_floating_ip")
@record = find_record_with_rbac(VmCloud, params[:id])
case params[:button]
when "cancel" then disassociate_handle_cancel_button
when "submit" then disassociate_handle_submit_button
end
end
private
def disassociate_handle_cancel_button
add_flash(_("Disassociation of Floating IP from Instance \"%{name}\" was cancelled by the user") % {:name => @record.name})
@record = @sb[:action] = nil
if @sb[:explorer]
replace_right_cell
else
flash_to_session
render :update do |page|
page << javascript_prologue
page.redirect_to previous_breadcrumb_url
end
end
end
def disassociate_handle_submit_button
if @record.supports_disassociate_floating_ip?
floating_ip = params[:floating_ip][:address]
begin
@record.disassociate_floating_ip_queue(session[:userid], floating_ip)
add_flash(_("Disassociating Floating IP %{address} from Instance \"%{name}\"") % {
:address => floating_ip,
:name => @record.name
})
rescue => ex
add_flash(_("Unable to disassociate Floating IP %{address} from Instance \"%{name}\": %{details}") % {
:address => floating_ip,
:name => @record.name,
:details => get_error_message_from_fog(ex.to_s)
}, :error)
end
else
add_flash(_("Unable to disassociate Floating IP from Instance \"%{name}\": %{details}") % {
:name => @record.name,
:details => @record.unsupported_reason(:disassociate_floating_ip)
}, :error)
end
params[:id] = @record.id.to_s # reset id in params for show
@record = nil
@sb[:action] = nil
if @sb[:explorer]
replace_right_cell
else
flash_to_session
render :update do |page|
page << javascript_prologue
page.redirect_to previous_breadcrumb_url
end
end
end
end
end
end
end
Fix rubocop warnings in DisassociateFloatingIp mixin
module Mixins
module Actions
module VmActions
module DisassociateFloatingIp
def disassociate_floating_ip_vms
assert_privileges("instance_disassociate_floating_ip")
recs = checked_or_params
@record = find_record_with_rbac(VmCloud, recs.first)
if @record.supports_disassociate_floating_ip? && @record.ext_management_system.present?
if @explorer
disassociate_floating_ip
@refresh_partial = "vm_common/disassociate_floating_ip"
else
render :update do |page|
page << javascript_prologue
page.redirect_to(:controller => 'vm',
:action => 'disassociate_floating_ip',
:rec_id => @record.id,
:escape => false)
end
end
else
add_flash(_("Unable to disassociate Floating IP from Instance \"%{name}\": %{details}") % {
:name => @record.name,
:details => @record.unsupported_reason(:disassociate_floating_ip)
}, :error)
end
end
alias instance_disassociate_floating_ip disassociate_floating_ip_vms
def disassociate_floating_ip
assert_privileges("instance_disassociate_floating_ip")
@record ||= find_record_with_rbac(VmCloud, params[:rec_id])
unless @explorer
drop_breadcrumb(
:name => _("Disssociate Floating IP from Instance '%{name}'") % {:name => @record.name},
:url => "/vm_cloud/disassociate_floating_ip"
)
end
@sb[:explorer] = @explorer
@in_a_form = true
@live_migrate = true
render :action => "show" unless @explorer
end
def disassociate_floating_ip_form_fields
assert_privileges("instance_disassociate_floating_ip")
@record = find_record_with_rbac(VmCloud, params[:id])
floating_ips = []
unless @record.ext_management_system.nil?
@record.floating_ips.each do |floating_ip|
floating_ips << floating_ip
end
end
render :json => {
:floating_ips => floating_ips
}
end
def disassociate_floating_ip_vm
assert_privileges("instance_disassociate_floating_ip")
@record = find_record_with_rbac(VmCloud, params[:id])
case params[:button]
when "cancel" then disassociate_handle_cancel_button
when "submit" then disassociate_handle_submit_button
end
end
private
def disassociate_handle_cancel_button
add_flash(_("Disassociation of Floating IP from Instance \"%{name}\" was cancelled by the user") % {:name => @record.name})
@record = @sb[:action] = nil
if @sb[:explorer]
replace_right_cell
else
flash_to_session
render :update do |page|
page << javascript_prologue
page.redirect_to(previous_breadcrumb_url)
end
end
end
def disassociate_handle_submit_button
if @record.supports_disassociate_floating_ip?
floating_ip = params[:floating_ip][:address]
begin
@record.disassociate_floating_ip_queue(session[:userid], floating_ip)
add_flash(_("Disassociating Floating IP %{address} from Instance \"%{name}\"") % {
:address => floating_ip,
:name => @record.name
})
rescue => ex
add_flash(_("Unable to disassociate Floating IP %{address} from Instance \"%{name}\": %{details}") % {
:address => floating_ip,
:name => @record.name,
:details => get_error_message_from_fog(ex.to_s)
}, :error)
end
else
add_flash(_("Unable to disassociate Floating IP from Instance \"%{name}\": %{details}") % {
:name => @record.name,
:details => @record.unsupported_reason(:disassociate_floating_ip)
}, :error)
end
params[:id] = @record.id.to_s # reset id in params for show
@record = nil
@sb[:action] = nil
if @sb[:explorer]
replace_right_cell
else
flash_to_session
render :update do |page|
page << javascript_prologue
page.redirect_to(previous_breadcrumb_url)
end
end
end
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.