repo
stringlengths
5
92
file_url
stringlengths
80
287
file_path
stringlengths
5
197
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:37:27
2026-01-04 17:58:21
truncated
bool
2 classes
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/invoice_payment.rb
lib/harvest/invoice_payment.rb
module Harvest class InvoicePayment < Hashie::Mash include Harvest::Model api_path '/payments' def self.json_root; 'payment'; end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/contact.rb
lib/harvest/contact.rb
module Harvest # The model that contains information about a client contact # # == Fields # [+id+] (READONLY) the id of the contact # [+client_id+] (REQUIRED) the id of the client this contact is associated with # [+first_name+] (REQUIRED) the first name of the contact # [+last_name+] (REQUIRED) the last name of the contact # [+email+] the email of the contact # [+title+] the title of the contact # [+phone_office+] the office phone number of the contact # [+phone_moble+] the moble phone number of the contact # [+fax+] the fax number of the contact class Contact < Hashie::Mash include Harvest::Model api_path '/contacts' end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/model.rb
lib/harvest/model.rb
module Harvest module Model def self.included(base) base.send :include, InstanceMethods base.send :extend, ClassMethods end module InstanceMethods def to_json(*args) as_json(*args).to_json(*args) end def as_json(args = {}) inner_json = self.to_hash.stringify_keys inner_json.delete("cache_version") if self.class.skip_json_root? inner_json else { self.class.json_root => inner_json } end end def to_i; id; end def ==(other) other.kind_of?(self.class) && id == other.id end def impersonated_user_id if respond_to?(:of_user) && respond_to?(:user_id) of_user || user_id elsif !respond_to?(:of_user) && respond_to?(:user_id) user_id elsif respond_to?(:of_user) of_user end end def json_root self.class.json_root end end module ClassMethods # This sets the API path so the API collections can use them in an agnostic way # @return [void] def api_path(path = nil) @_api_path ||= path end def skip_json_root(skip = nil) @_skip_json_root ||= skip end def skip_json_root? @_skip_json_root == true end def parse(json) parsed = String === json ? JSON.parse(json) : json Array.wrap(parsed).map {|attrs| skip_json_root? ? new(attrs) : new(attrs[json_root])} end def json_root Harvest::Model::Utility.underscore( Harvest::Model::Utility.demodulize(to_s) ) end def wrap(model_or_attrs) case model_or_attrs when Hashie::Mash model_or_attrs when Hash new(model_or_attrs) else model_or_attrs end end def delegate_methods(options) raise "no methods given" if options.empty? options.each do |source, dest| class_eval <<-EOV def #{source} #{dest} end EOV end end end module Utility class << self # Both methods are shamelessly ripped from https://github.com/rails/rails/blob/master/activesupport/lib/active_support/inflector/inflections.rb # Removes the module part from the expression in the string. # # Examples: # "ActiveRecord::CoreExtensions::String::Inflections".demodulize # => "Inflections" # "Inflections".demodulize def demodulize(class_name_in_module) class_name_in_module.to_s.gsub(/^.*::/, '') end # Makes an underscored, lowercase form from the expression in the string. # # Changes '::' to '/' to convert namespaces to paths. # # Examples: # "ActiveRecord".underscore # => "active_record" # "ActiveRecord::Errors".underscore # => active_record/errors # # As a rule of thumb you can think of +underscore+ as the inverse of +camelize+, # though there are cases where that does not hold: # # "SSLError".underscore.camelize # => "SslError" def underscore(camel_cased_word) word = camel_cased_word.to_s.dup word.gsub!(/::/, '/') word.gsub!(/([A-Z]+)([A-Z][a-z])/,'\1_\2') word.gsub!(/([a-z\d])([A-Z])/,'\1_\2') word.tr!("-", "_") word.downcase! word end end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/hardy_client.rb
lib/harvest/hardy_client.rb
module Harvest class HardyClient < Delegator def initialize(client, max_retries) super(client) @_sd_obj = @client = client @max_retries = max_retries (@client.public_methods - Object.public_instance_methods).each do |name| instance_eval <<-END def #{name}(*args) wrap_collection do @client.send('#{name}', *args) end end END end end def __getobj__; @_sd_obj; end def __setobj__(obj); @_sd_obj = obj; end def wrap_collection collection = yield HardyCollection.new(collection, self, @max_retries) end class HardyCollection < Delegator def initialize(collection, client, max_retries) super(collection) @_sd_obj = @collection = collection @client = client @max_retries = max_retries (@collection.public_methods - Object.public_instance_methods).each do |name| instance_eval <<-END def #{name}(*args) retry_rate_limits do @collection.send('#{name}', *args) end end END end end def __getobj__; @_sd_obj; end def __setobj__(obj); @_sd_obj = obj; end def retry_rate_limits retries = 0 retry_func = lambda do |e| if retries < @max_retries retries += 1 true else raise e end end begin yield rescue Harvest::RateLimited => e seconds = if e.response.headers["retry-after"] e.response.headers["retry-after"].to_i else 16 end sleep(seconds) retry rescue Harvest::Unavailable, Harvest::InformHarvest => e would_retry = retry_func.call(e) sleep(16) if @client.account.rate_limit_status.over_limit? retry if would_retry rescue Net::HTTPError, Net::HTTPFatalError => e retry if retry_func.call(e) rescue SystemCallError => e retry if e.is_a?(Errno::ECONNRESET) && retry_func.call(e) end end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/user.rb
lib/harvest/user.rb
module Harvest # The model that contains information about a task # # == Fields # [+id+] (READONLY) the id of the user # [+email+] the email of the user # [+first_name+] the first name for the user # [+last_name+] the last name for the user # [+telephone+] the telephone for the user # [+department] the department for the user # [+has_access_to_all_future_projects+] whether the user should be added to future projects by default # [+hourly_rate+] what the default hourly rate for the user is # [+admin?+] whether the user is an admin # [+contractor?+] whether the user is a contractor # [+contractor?+] whether the user is a contractor # [+timezone+] the timezone for the user. class User < Hashie::Mash include Harvest::Model api_path '/people' delegate_methods(:active? => :is_active, :admin? => :is_admin, :contractor? => :is_contractor) def initialize(args = {}, _ = nil) args = args.to_hash.stringify_keys args["is_admin"] = args.delete("admin") if args["admin"] self.timezone = args.delete("timezone") if args["timezone"] super end # Sets the timezone for the user. This can be done in a variety of ways. # # == Examples # user.timezone = :cst # the easiest way. CST, EST, MST, and PST are supported # # user.timezone = 'america/chicago' # a little more verbose # # user.timezone = 'Central Time (US & Canada)' # the most explicit way def timezone=(timezone) tz = timezone.to_s.downcase case tz when 'cst', 'cdt' then self.timezone = 'america/chicago' when 'est', 'edt' then self.timezone = 'america/new_york' when 'mst', 'mdt' then self.timezone = 'america/denver' when 'pst', 'pdt' then self.timezone = 'america/los_angeles' else if Harvest::Timezones::MAPPING[tz] self["timezone"] = Harvest::Timezones::MAPPING[tz] else self["timezone"] = timezone end end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/expenses.rb
lib/harvest/api/expenses.rb
module Harvest module API class Expenses < Base api_model Harvest::Expense include Harvest::Behavior::Crud def all(date = ::Time.now, user = nil) date = ::Time.parse(date) if String === date response = request(:get, credentials, "#{api_model.api_path}/#{date.yday}/#{date.year}", :query => of_user_query(user)) api_model.parse(response.parsed_response) end def attach(expense, filename, receipt) body = "" body << "--__X_ATTACH_BOUNDARY__\r\n" body << %Q{Content-Disposition: form-data; name="expense[receipt]"; filename="#{filename}"\r\n} body << "\r\n#{receipt.read}" body << "\r\n--__X_ATTACH_BOUNDARY__--\r\n\r\n" request( :post, credentials, "#{api_model.api_path}/#{expense.to_i}/receipt", :headers => { 'Content-Type' => 'multipart/form-data; charset=utf-8; boundary=__X_ATTACH_BOUNDARY__', 'Content-Length' => body.length.to_s, }, :body => body) end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/users.rb
lib/harvest/api/users.rb
module Harvest module API class Users < Base api_model Harvest::User include Harvest::Behavior::Crud include Harvest::Behavior::Activatable # Triggers Harvest to reset the user's password and sends them an email to change it. # @overload reset_password(id) # @param [Integer] id the id of the user you want to reset the password for # @overload reset_password(user) # @param [Harvest::User] user the user you want to reset the password for # @return [Harvest::User] the user you passed in def reset_password(user) request(:post, credentials, "#{api_model.api_path}/#{user.to_i}/reset_password") user end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/account.rb
lib/harvest/api/account.rb
module Harvest module API # API Methods to contain all account actions class Account < Base # Returns the current rate limit information # @return [Harvest::RateLimitStatus] def rate_limit_status response = request(:get, credentials, '/account/rate_limit_status') Harvest::RateLimitStatus.parse(response.body).first end # Returns the current logged in user # @return [Harvest::User] def who_am_i response = request(:get, credentials, '/account/who_am_i') parsed = JSON.parse(response.body) Harvest::User.parse(parsed).first.tap do |user| user.company = parsed["company"] end end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/contacts.rb
lib/harvest/api/contacts.rb
module Harvest module API class Contacts < Base api_model Harvest::Contact include Harvest::Behavior::Crud def all(client_id = nil) response = if client_id request(:get, credentials, "/clients/#{client_id}/contacts") else request(:get, credentials, "/contacts") end api_model.parse(response.parsed_response) end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/time.rb
lib/harvest/api/time.rb
module Harvest module API class Time < Base def find(id, user = nil) response = request(:get, credentials, "/daily/show/#{id.to_i}", :query => of_user_query(user)) Harvest::TimeEntry.parse(response.parsed_response).first end def all(date = ::Time.now, user = nil) Harvest::TimeEntry.parse(daily(date, user)["day_entries"]) end def trackable_projects(date = ::Time.now, user = nil) Harvest::TrackableProject.parse(daily(date, user)["projects"]) end def toggle(id, user = nil) response = request(:get, credentials, "/daily/timer/#{id}", :query => of_user_query(user)) Harvest::TimeEntry.parse(response.parsed_response).first end def create(entry, user = nil) response = request(:post, credentials, '/daily/add', :body => entry.to_json, :query => of_user_query(user)) Harvest::TimeEntry.parse(response.parsed_response).first end def update(entry, user = nil) request(:put, credentials, "/daily/update/#{entry.to_i}", :body => entry.to_json, :query => of_user_query(user)) find(entry.id, user) end def delete(entry, user = nil) request(:delete, credentials, "/daily/delete/#{entry.to_i}", :query => of_user_query(user)) entry.id end private def daily(date, user) date = ::Time.parse(date) if String === date response = request(:get, credentials, "/daily/#{date.yday}/#{date.year}", :query => of_user_query(user)) JSON.parse(response.body) end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/invoices.rb
lib/harvest/api/invoices.rb
module Harvest module API class Invoices < Base api_model Harvest::Invoice include Harvest::Behavior::Crud # == Retrieves invoices # # == Available options # - :status - invoices by status # - :page # - :updated_since # - :timeframe (must be a nested hash with :to and :from) # # @overload all() # @overload all(options) # @param [Hash] filtering options # # @return [Array<Harvest::Invoice>] an array of invoices def all(options = {}) query = {} query[:status] = options[:status] if options[:status] query[:page] = options[:page] if options[:page] query[:updated_since] = options[:updated_since] if options[:updated_since] if options[:timeframe] query[:from] = options[:timeframe][:from] query[:to] = options[:timeframe][:to] end response = request(:get, credentials, "/invoices", :query => query) api_model.parse(response.parsed_response) end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/invoice_payments.rb
lib/harvest/api/invoice_payments.rb
module Harvest module API class InvoicePayments < Base api_model Harvest::InvoicePayment include Harvest::Behavior::Crud def all(invoice) response = request(:get, credentials, "/invoices/#{invoice.to_i}/payments") api_model.parse(response.parsed_response) end def find(invoice, payment) response = request(:get, credentials, "/invoices/#{invoice.to_i}/payments/#{payment.to_i}") api_model.parse(response.parsed_response).first end def create(payment) payment = api_model.wrap(payment) response = request(:post, credentials, "/invoices/#{payment.invoice_id}/payments", :body => payment.to_json) id = response.headers["location"].match(/\/.*\/(\d+)\/.*\/(\d+)/)[2] find(payment.invoice_id, id) end def delete(payment) request(:delete, credentials, "/invoices/#{payment.invoice_id}/payments/#{payment.to_i}") payment.id end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/reports.rb
lib/harvest/api/reports.rb
module Harvest module API class Reports < Base TIME_FORMAT = '%Y%m%d' def time_by_project(project, start_date, end_date, options = {}) query = { from: start_date.strftime(TIME_FORMAT), to: end_date.strftime(TIME_FORMAT) } query[:user_id] = options.delete(:user).to_i if options[:user] query[:billable] = (options.delete(:billable) ? "yes" : "no") unless options[:billable].nil? query[:updated_since] = options.delete(:updated_since).to_s if options[:updated_since] query.update(options) response = request(:get, credentials, "/projects/#{project.to_i}/entries", query: query) Harvest::TimeEntry.parse(JSON.parse(response.body).map {|h| h["day_entry"]}) end def time_by_user(user, start_date, end_date, options = {}) query = { from: start_date.strftime(TIME_FORMAT), to: end_date.strftime(TIME_FORMAT) } query[:project_id] = options.delete(:project).to_i if options[:project] query[:billable] = (options.delete(:billable) ? "yes" : "no") unless options[:billable].nil? query[:updated_since] = options.delete(:updated_since).to_s if options[:updated_since] query.update(options) response = request(:get, credentials, "/people/#{user.to_i}/entries", query: query) Harvest::TimeEntry.parse(JSON.parse(response.body).map {|h| h["day_entry"]}) end def expenses_by_user(user, start_date, end_date, options = {}) query = { from: start_date.strftime(TIME_FORMAT), to: end_date.strftime(TIME_FORMAT) } query[:updated_since] = options.delete(:updated_since).to_s if options[:updated_since] query.update(options) response = request(:get, credentials, "/people/#{user.to_i}/expenses", query: query) Harvest::Expense.parse(response.parsed_response) end def expenses_by_project(project, start_date, end_date, options = {}) query = { from: start_date.strftime(TIME_FORMAT), to: end_date.strftime(TIME_FORMAT) } query[:updated_since] = options.delete(:updated_since).to_s if options[:updated_since] query.update(options) response = request(:get, credentials, "/projects/#{project.to_i}/expenses", query: query) Harvest::Expense.parse(response.parsed_response) end def projects_by_client(client) response = request(:get, credentials, "/projects?client=#{client.to_i}") Harvest::Project.parse(response.parsed_response) end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/invoice_messages.rb
lib/harvest/api/invoice_messages.rb
module Harvest module API class InvoiceMessages < Base api_model Harvest::InvoiceMessage include Harvest::Behavior::Crud def all(invoice) response = request(:get, credentials, "/invoices/#{invoice.to_i}/messages") api_model.parse(response.parsed_response) end def find(invoice, message) response = request(:get, credentials, "/invoices/#{invoice.to_i}/messages/#{message.to_i}") api_model.parse(response.parsed_response).first end def create(message) message = api_model.wrap(message) response = request(:post, credentials, "/invoices/#{message.invoice_id}/messages", :body => message.to_json) id = response.headers["location"].match(/\/.*\/(\d+)\/.*\/(\d+)/)[2] find(message.invoice_id, id) end def delete(message) request(:delete, credentials, "/invoices/#{message.invoice_id}/messages/#{message.to_i}") message.id end # Create a message for marking an invoice as sent. # # @param [Harvest::InvoiceMessage] The message you want to send # @return [Harvest::InvoiceMessage] The sent message def mark_as_sent(message) send_status_message(message, 'mark_as_sent') end # Create a message and mark an open invoice as closed (writing an invoice off) # # @param [Harvest::InvoiceMessage] The message you want to send # @return [Harvest::InvoiceMessage] The sent message def mark_as_closed(message) send_status_message(message, 'mark_as_closed') end # Create a message and mark a closed (written-off) invoice as open # # @param [Harvest::InvoiceMessage] The message you want to send # @return [Harvest::InvoiceMessage] The sent message def re_open(message) send_status_message(message, 're_open') end # Create a message for marking an open invoice as draft # # @param [Harvest::InvoiceMessage] The message you want to send # @return [Harvest::InvoiceMessage] The sent message def mark_as_draft(message) send_status_message(message, 'mark_as_draft') end private def send_status_message(message, action) message = api_model.wrap(message) response = request( :post, credentials, "/invoices/#{message.invoice_id}/messages/#{action}", :body => message.to_json ) message end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/projects.rb
lib/harvest/api/projects.rb
module Harvest module API class Projects < Base api_model Harvest::Project include Harvest::Behavior::Crud # Retrieves all projects. Note: this requires project manager or administrator authorization. # @return [Harvest::Project] def all(*) super rescue NotFound => e raise NotFound.new(e.response, e.params, "Do you have sufficient privileges? If not, consider using time.trackable_projects instead.") end # Creates and Assigns a task to the project # # == Examples # project = harvest.projects.find(401) # harvest.projects.create_task(project, 'Bottling Glue') # creates and assigns a task to the project # # @return [Harvest::Project] def create_task(project, task_name) response = request(:post, credentials, "/projects/#{project.to_i}/task_assignments/add_with_create_new_task", :body => {"task" => {"name" => task_name}}.to_json) id = response.headers["location"].match(/\/.*\/(\d+)\/.*\/(\d+)/)[1] find(id) end # Deactivates the project. Does nothing if the project is already deactivated # # @param [Harvest::Project] project the project you want to deactivate # @return [Harvest::Project] the deactivated project def deactivate(project) if project.active? request(:put, credentials, "#{api_model.api_path}/#{project.to_i}/toggle", :headers => {'Content-Length' => '0'}) project.active = false end project end # Activates the project. Does nothing if the project is already activated # # @param [Harvest::Project] project the project you want to activate # @return [Harvest::Project] the activated project def activate(project) if !project.active? request(:put, credentials, "#{api_model.api_path}/#{project.to_i}/toggle", :headers => {'Content-Length' => '0'}) project.active = true end project end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/base.rb
lib/harvest/api/base.rb
module Harvest module API class Base attr_reader :credentials def initialize(credentials) @credentials = credentials end class << self def api_model(klass) class_eval <<-END def api_model #{klass} end END end end protected def request(method, credentials, path, options = {}) params = { path: path, options: options, method: method } httparty_options = { query: options[:query], body: options[:body], format: :plain, headers: { "Accept" => "application/json", "Content-Type" => "application/json; charset=utf-8", "User-Agent" => "Harvested/#{Harvest::VERSION}" }.update(options[:headers] || {}) } credentials.set_authentication(httparty_options) response = HTTParty.send(method, "#{credentials.host}#{path}", httparty_options) params[:response] = response.inspect.to_s case response.code when 200..201 response when 400 raise Harvest::BadRequest.new(response, params) when 401 raise Harvest::AuthenticationFailed.new(response, params) when 404 raise Harvest::NotFound.new(response, params, "Do you have sufficient privileges?") when 500 raise Harvest::ServerError.new(response, params) when 502 raise Harvest::Unavailable.new(response, params) when 503 raise Harvest::RateLimited.new(response, params) else raise Harvest::InformHarvest.new(response, params) end end def of_user_query(user) query = user.nil? ? {} : {"of_user" => user.to_i} end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/clients.rb
lib/harvest/api/clients.rb
module Harvest module API class Clients < Base api_model Harvest::Client include Harvest::Behavior::Crud include Harvest::Behavior::Activatable end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/user_assignments.rb
lib/harvest/api/user_assignments.rb
module Harvest module API class UserAssignments < Base def all(project, query = {}) response = request(:get, credentials, "/projects/#{project.to_i}/user_assignments", {query: query}) Harvest::UserAssignment.parse(response.parsed_response) end def find(project, id) response = request(:get, credentials, "/projects/#{project.to_i}/user_assignments/#{id}") Harvest::UserAssignment.parse(response.parsed_response).first end def create(user_assignment) user_assignment = Harvest::UserAssignment.wrap(user_assignment) response = request(:post, credentials, "/projects/#{user_assignment.project_id}/user_assignments", :body => user_assignment.user_as_json.to_json) id = response.headers["location"].match(/\/.*\/(\d+)\/.*\/(\d+)/)[2] find(user_assignment.project_id, id) end def update(user_assignment) user_assignment = Harvest::UserAssignment.wrap(user_assignment) request(:put, credentials, "/projects/#{user_assignment.project_id}/user_assignments/#{user_assignment.id}", :body => user_assignment.to_json) find(user_assignment.project_id, user_assignment.id) end def delete(user_assignment) request(:delete, credentials, "/projects/#{user_assignment.project_id}/user_assignments/#{user_assignment.to_i}") user_assignment.id end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/tasks.rb
lib/harvest/api/tasks.rb
module Harvest module API class Tasks < Base api_model Harvest::Task include Harvest::Behavior::Crud # Deactivating tasks is not yet supported by the Harvest API. # Deactivates the task. Does nothing if the task is already deactivated # # @param [Harvest::Task] task the task you want to deactivate # @return [Harvest::Task] the deactivated task #def deactivate(task) # if task.active? # request(:post, credentials, "#{api_model.api_path}/#{task.to_i}/deactivate", :headers => {'Content-Length' => '0'}) # task.active = false # end # task #end # Activates the task. Does nothing if the task is already activated # # @param [Harvest::Task] task the task you want to activate # @return [Harvest::Task] the activated task def activate(task) if !task.active? request(:post, credentials, "#{api_model.api_path}/#{task.to_i}/activate", :headers => {'Content-Length' => '0'}) task.active = true end task end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/expense_categories.rb
lib/harvest/api/expense_categories.rb
module Harvest module API class ExpenseCategories < Base api_model Harvest::ExpenseCategory include Harvest::Behavior::Crud end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/invoice_categories.rb
lib/harvest/api/invoice_categories.rb
module Harvest module API class InvoiceCategories < Base api_model Harvest::InvoiceCategory include Harvest::Behavior::Crud def find(*) raise "find is unsupported for InvoiceCategories" end def create(model) model = api_model.wrap(model) response = request(:post, credentials, "#{api_model.api_path}", :body => model.to_json) id = response.headers["location"].match(/\/.*\/(\d+)/)[1] all.detect {|c| c.id == id.to_i } end def update(model, user = nil) model = api_model.wrap(model) request(:put, credentials, "#{api_model.api_path}/#{model.to_i}", :body => model.to_json, :query => of_user_query(user)) all.detect {|c| c.id == model.id } end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/api/task_assignments.rb
lib/harvest/api/task_assignments.rb
module Harvest module API class TaskAssignments < Base def all(project, query = {}) response = request(:get, credentials, "/projects/#{project.to_i}/task_assignments", {query: query}) Harvest::TaskAssignment.parse(response.parsed_response) end def find(project, id) response = request(:get, credentials, "/projects/#{project.to_i}/task_assignments/#{id}") Harvest::TaskAssignment.parse(response.parsed_response).first end def create(task_assignment) task_assignment = Harvest::TaskAssignment.wrap(task_assignment) response = request(:post, credentials, "/projects/#{task_assignment.project_id}/task_assignments", :body => task_assignment.task_as_json.to_json) id = response.headers["location"].match(/\/.*\/(\d+)\/.*\/(\d+)/)[2] find(task_assignment.project_id, id) end def update(task_assignment) task_assignment = Harvest::TaskAssignment.wrap(task_assignment) request(:put, credentials, "/projects/#{task_assignment.project_id}/task_assignments/#{task_assignment.to_i}", :body => task_assignment.to_json) find(task_assignment.project_id, task_assignment.id) end def delete(task_assignment) response = request(:delete, credentials, "/projects/#{task_assignment.project_id}/task_assignments/#{task_assignment.to_i}") task_assignment.id end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/behavior/activatable.rb
lib/harvest/behavior/activatable.rb
module Harvest module Behavior # Activate/Deactivate behaviors that can be brought into API collections module Activatable # Deactivates the item. Does nothing if the item is already deactivated # # @param [Harvest::BaseModel] model the model you want to deactivate # @return [Harvest::BaseModel] the deactivated model def deactivate(model) if model.active? request(:post, credentials, "#{api_model.api_path}/#{model.to_i}/toggle") model.is_active = false end model end # Activates the item. Does nothing if the item is already activated # # @param [Harvest::BaseModel] model the model you want to activate # @return [Harvest::BaseModel] the activated model def activate(model) if !model.active? request(:post, credentials, "#{api_model.api_path}/#{model.to_i}/toggle") model.is_active = true end model end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/harvest/behavior/crud.rb
lib/harvest/behavior/crud.rb
module Harvest module Behavior module Crud # Retrieves all items # @return [Array<Harvest::BaseModel>] an array of models depending on where you're calling it from (e.g. [Harvest::Client] from Harvest::Base#clients) def all(user = nil, query_options = {}) query = query_options.merge!(of_user_query(user)) response = request(:get, credentials, api_model.api_path, :query => query) api_model.parse(response.parsed_response) end # Retrieves an item by id # @overload find(id) # @param [Integer] the id of the item you want to retreive # @overload find(id) # @param [String] id the String version of the id # @overload find(model) # @param [Harvest::BaseModel] id you can pass a model and it will return a refreshed version # # @return [Harvest::BaseModel] the model depends on where you're calling it from (e.g. Harvest::Client from Harvest::Base#clients) def find(id, user = nil) raise "id required" unless id response = request(:get, credentials, "#{api_model.api_path}/#{id}", :query => of_user_query(user)) api_model.parse(response.parsed_response).first end # Creates an item # @param [Harvest::BaseModel] model the item you want to create # @return [Harvest::BaseModel] the created model depending on where you're calling it from (e.g. Harvest::Client from Harvest::Base#clients) def create(model, user = nil) model = api_model.wrap(model) response = request(:post, credentials, "#{api_model.api_path}", :body => model.to_json, :query => of_user_query(user)) id = response.headers["location"].match(/\/.*\/(\d+)/)[1] if user find(id, user) else find(id) end end # Updates an item # @param [Harvest::BaseModel] model the model you want to update # @return [Harvest::BaseModel] the created model depending on where you're calling it from (e.g. Harvest::Client from Harvest::Base#clients) def update(model, user = nil) model = api_model.wrap(model) request(:put, credentials, "#{api_model.api_path}/#{model.to_i}", :body => model.to_json, :query => of_user_query(user)) find(model.id) end # Deletes an item # @overload delete(model) # @param [Harvest::BaseModel] model the item you want to delete # @overload delete(id) # @param [Integer] id the id of the item you want to delete # @overload delete(id) # @param [String] id the String version of the id of the item you want to delete # # @return [Integer] the id of the item deleted def delete(model, user = nil) request(:delete, credentials, "#{api_model.api_path}/#{model.to_i}", :query => of_user_query(user)) model.to_i end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/ext/array.rb
lib/ext/array.rb
# Shamelessly ripped from https://github.com/rails/rails/blob/master/activesupport/lib/active_support/core_ext/array/wrap.rb unless Array.respond_to?(:wrap) class Array # Wraps its argument in an array unless it is already an array (or array-like). # # Specifically: # # * If the argument is +nil+ an empty list is returned. # * Otherwise, if the argument responds to +to_ary+ it is invoked, and its result returned. # * Otherwise, returns an array with the argument as its single element. # # Array.wrap(nil) # => [] # Array.wrap([1, 2, 3]) # => [1, 2, 3] # Array.wrap(0) # => [0] # # This method is similar in purpose to <tt>Kernel#Array</tt>, but there are some differences: # # * If the argument responds to +to_ary+ the method is invoked. <tt>Kernel#Array</tt> # moves on to try +to_a+ if the returned value is +nil+, but <tt>Arraw.wrap</tt> returns # such a +nil+ right away. # * If the returned value from +to_ary+ is neither +nil+ nor an +Array+ object, <tt>Kernel#Array</tt> # raises an exception, while <tt>Array.wrap</tt> does not, it just returns the value. # * It does not call +to_a+ on the argument, though special-cases +nil+ to return an empty array. # # The last point is particularly worth comparing for some enumerables: # # Array(:foo => :bar) # => [[:foo, :bar]] # Array.wrap(:foo => :bar) # => [{:foo => :bar}] # # Array("foo\nbar") # => ["foo\n", "bar"], in Ruby 1.8 # Array.wrap("foo\nbar") # => ["foo\nbar"] # # There's also a related idiom that uses the splat operator: # # [*object] # # which returns <tt>[nil]</tt> for +nil+, and calls to <tt>Array(object)</tt> otherwise. # # Thus, in this case the behavior is different for +nil+, and the differences with # <tt>Kernel#Array</tt> explained above apply to the rest of +object+s. def self.wrap(object) if object.nil? [] elsif object.respond_to?(:to_ary) object.to_ary else [object] end end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/ext/time.rb
lib/ext/time.rb
unless Time.respond_to?(:to_time) class Time def to_time; self; end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/ext/hash.rb
lib/ext/hash.rb
# Shamelessly ripped from https://github.com/rails/rails/blob/master/activesupport/lib/active_support/core_ext/hash/keys.rb unless Hash.respond_to?(:stringify_keys) class Hash # Return a new hash with all keys converted to strings. def stringify_keys dup.stringify_keys! end def stringify_keys! keys.each do |key| self[key.to_s] = delete(key) end self end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
zmoazeni/harvested
https://github.com/zmoazeni/harvested/blob/33d26049651fde6adf651d5c8aff8fff97156210/lib/ext/date.rb
lib/ext/date.rb
# Shamelessly ripped from https://github.com/rails/rails/blob/master/activesupport/lib/active_support/core_ext/date/conversions.rb unless ::Date.respond_to?(:to_time) class ::Date def to_time(*) ::Time.utc(year, month, day) end end end
ruby
MIT
33d26049651fde6adf651d5c8aff8fff97156210
2026-01-04T17:50:27.559404Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/ebnf.rb
ebnf.rb
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/rule_spec.rb
spec/rule_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::Rule do let(:debug) {[]} let(:ebnf) {EBNF.parse(File.open(File.expand_path("../../etc/ebnf.ebnf", __FILE__)), format: :native)} subject {EBNF::Rule.new(:rule, "0", [:seq, :foo])} describe ".from_sxp" do context "accepts valid variations" do { "ebnf[1]": [ %{(rule ebnf "1" (star (alt declaration rule)))}, EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]) ], "ebnf[1] parsed": [ [:rule, :ebnf, "1", [:star, [:alt, :declaration, :rule]]], EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]], kind: :rule) ], "pass": [ %{(pass _pass (plus (range "#x9#xA#xD#x20")))}, EBNF::Rule.new(nil, nil, [:plus, [:range, "#x9#xA#xD#x20"]], kind: :pass) ], "alt": [ %{(rule alt (alt a b c))}, EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule) ], "diff": [ %{(terminal R_CHAR "21" (diff CHAR "]"))}, EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal) ], "istr": [ %{(terminal nc (istr "foo"))}, EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal) ], "not": [ %{(rule _a_1 "n.1" (not op1))}, EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule) ], "opt": [ %{(rule _diff_1 "7.1" (opt _diff_2))}, EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule) ], "plus": [ %{(rule seq "6" (plus diff))}, EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule) ], "rept": [ %{(rule rept "6" (rept 1 "*" diff))}, EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]) ], "rept m.n": [ %{(rule rept "6" (rept 3 5 diff))}, EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]) ], "seq": [ %{(rule seq (seq a b c))}, EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule) ], "star": [ %{(rule _alt_1 "5.1" (star _alt_2))}, EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule) ] }.each do |title, (sxp, expected)| it title do res = EBNF::Rule.from_sxp(sxp) expect(res).to eq expected end end end context "rejects invalid variations" do { "alt (empty)": %{(rule alt (alt))}, "diff (empty)": %{(terminal R_CHAR "21" (diff))}, "diff (one)": %{(terminal R_CHAR "21" (diff CHAR))}, "diff (three)": %{(terminal R_CHAR "21" (diff CHAR "]" ","))}, "hex (empty)": %{(terminal hex (hex))}, "hex (two)": %{(terminal hex (hex #x01 #x02))}, "istr (empty)": %{(terminal nc (istr))}, "istr (two)": %{(terminal nc (istr "foo" "bar"))}, "not (empty)": %{(rule _a_1 "n.1" (not))}, "not (two)": %{(rule _a_1 "n.1" (not op1 op2))}, "opt (empty)": %{(rule _diff_1 "7.1" (opt))}, "plus (empty)": %{(rule seq "6" (plus))}, "plus (two)": %{(rule seq "6" (plus diff extra))}, "rept (empty)": %{(rule rept "6" (rept))}, "rept (one)": %{(rule rept "6" (rept 1))}, "rept (two)": %{(rule rept "6" (rept 1 "*"))}, "rept (four)": %{(rule rept "6" (rept 1 "*" diff extra))}, "rept (float min)": %{(rule rept "6" (rept 1.1 1 diff))}, "rept (negative min)": %{(rule rept "6" (rept -1 1 diff))}, "rept (float max)": %{(rule rept "6" (rept 1 1.1 diff))}, "rept (negative max)": %{(rule rept "6" (rept 1 -1 diff))}, "star (empty)": %{(rule _alt_1 "5.1" (star))}, "star (two)": %{(rule _alt_1 "5.1" (star diff extra))}, "not op": %{(rule _bad nil (_bad))} }.each do |title, (sxp, expected)| it title do expect {EBNF::Rule.from_sxp(sxp)}.to raise_error(ArgumentError) end end end end describe "#to_sxp" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), %{(rule ebnf "1" (star (alt declaration rule)))}, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), %{(pass _pass (plus (range "#x20\\\\t\\\\r\\\\n")))}, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), %{(rule alt (alt a b c))}, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), %{(terminal R_CHAR "21" (diff CHAR "]"))}, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), %{(terminal nc (istr "foo"))}, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), %{(rule _a_1 "n.1" (not op1))}, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), %{(rule _diff_1 "7.1" (opt _diff_2))}, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), %{(rule seq "6" (plus diff))}, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), %{(rule rept "6" (rept 1 "*" diff))}, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), %{(rule rept "6" (rept 3 5 diff))}, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), %{(rule seq (seq a b c))}, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), %{(rule _alt_1 "5.1" (star _alt_2))}, ] }.each do |title, (rule, sxp)| it title do expect(rule.to_sxp).to eq sxp end end end describe "#to_ttl" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), %{ :ebnf rdfs:label "ebnf"; dc:identifier "1"; g:star [ g:alt ( :declaration :rule ) ] .}, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), %{ :_pass rdfs:label "_pass"; g:plus [ re:matches "[\\\\u0020\\\\t\\\\r\\\\n]" ] .}, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), %{ :alt rdfs:label "alt"; g:alt ( :a :b :c ) .}, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), %{ :R_CHAR rdfs:label "R_CHAR"; dc:identifier "21"; re:diff ( :CHAR "]" ) .}, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), %{ :nc rdfs:label "nc"; re:matches "foo" .}, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), %{ :_a_1 rdfs:label "_a_1"; dc:identifier "n.1"; g:not :op1 .}, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), %{ :_diff_1 rdfs:label "_diff_1"; dc:identifier "7.1"; g:opt :_diff_2 .}, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), %{ :seq rdfs:label "seq"; dc:identifier "6"; g:plus :diff .}, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), %{ :rept rdfs:label "rept"; dc:identifier "6"; g:min 1; g:max "*"; g:rept :diff .}, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), %{ :rept rdfs:label "rept"; dc:identifier "6"; g:min 3; g:max 5; g:rept :diff .}, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), %{ :seq rdfs:label "seq"; g:seq ( :a :b :c ) .}, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), %{ :_alt_1 rdfs:label "_alt_1"; dc:identifier "5.1"; g:star :_alt_2 .}, ] }.each do |title, (rule, ttl)| it title do expect(rule.to_ttl.gsub(/\s+/m, " ")).to eq ttl.gsub(/\s+/m, " ") end end end describe "#to_ruby" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), %{EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]])}, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), %{EBNF::Rule.new(:_pass, nil, [:plus, [:range, \"#x20\\\\t\\\\r\\\\n\"]], kind: :pass)}, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), %{EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c])}, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), %{EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal)}, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), %{EBNF::Rule.new(:_a_1, "n.1", [:not, :op1])}, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), %{EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2])}, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), %{EBNF::Rule.new(:seq, "6", [:plus, :diff])}, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), %{EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff])}, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), %{EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff])}, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), %{EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c])}, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), %{EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2])}, ] }.each do |title, (rule, ruby)| it title do expect(rule.to_ruby).to eq ruby end end end describe "#to_bnf" do { "no-rewrite" => [ [:seq, :foo], [EBNF::Rule.new(:rule, "0", [:seq, :foo])] ], "embedded rule" => [ [:seq, [:alt, :foo]], [EBNF::Rule.new(:rule, "0", [:seq, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :foo])] ], "opt rule" => [ [:opt, :foo], [EBNF::Rule.new(:rule, "0", [:alt, :_empty, :foo])] ], "two opt rule" => [ [:alt, [:opt, :foo], [:opt, :bar]], [EBNF::Rule.new(:rule, "0", [:alt, :_rule_1, :_rule_2]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :_empty, :foo]), EBNF::Rule.new(:_rule_2, "0.2", [:alt, :_empty, :bar])] ], "star rule" => [ [:star, :foo], [EBNF::Rule.new(:rule, "0", [:alt, :_empty, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:seq, :foo, :rule])] ], "plus rule" => [ [:plus, :foo], [EBNF::Rule.new(:rule, "0", [:seq, :foo, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :_empty, :_rule_2]), EBNF::Rule.new(:_rule_2, "0.2", [:seq, :foo, :_rule_1])] ], # Diff may be a Rule or a Terminal now. #"diff rule" => [ # [:diff, "a", "b"], # [EBNF::Rule.new(:rule, "0", [:diff, "a", "b"])] #], "hex rule" => [ [:hex, "#x00B7"], [EBNF::Rule.new(:rule, "0", [:hex, "#x00B7"], kind: :terminal)] ], "range rule" => [ [:range, "a"], [EBNF::Rule.new(:rule, "0", [:range, "a"], kind: :terminal)] ], "ebnf[1]" => [ [:star, [:alt, :declaration, :rule]], [EBNF::Rule.new(:rule, "0", [:alt, :_empty, :_rule_2]), EBNF::Rule.new(:_rule_2, "0.2", [:seq, :_rule_1, :rule]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :declaration, :rule])] ], "ebnf[9]" => [ [:seq, :primary, [:opt, [:range, "?*+"]]], [EBNF::Rule.new(:rule, "0", [:seq, :primary, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :_empty, :_rule_2]), EBNF::Rule.new(:_rule_2, "0.2", [:range, "?*+"], kind: :terminal)] ], "IRIREF" => [ [:seq, "<", [:star, [:alt, [:range, "^#x00-#x20<>\"{}|^`\\"], :UCHAR]], ">"], [EBNF::Rule.new(:rule, "0", [:seq, "<", :_rule_1, ">"]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :_empty, :_rule_3]), EBNF::Rule.new(:_rule_3, "0.3", [:seq, :_rule_2, :_rule_1]), EBNF::Rule.new(:_rule_2, "0.2", [:alt, :_rule_4, :UCHAR]), EBNF::Rule.new(:_rule_4, "0.4", [:range, "^#x00-#x20<>\"{}|^`\\"], kind: :terminal)] ] }.each do |title, (expr, expected)| it title do rule = EBNF::Rule.new(:rule, "0", expr) expect(rule.to_bnf).to eq expected case expr.first when :seq, :alt expect(rule).to be_starts_with(expr[1]) else expect(rule).not_to be_starts_with(expr[1]) end end end context "exceptions" do { "diff" => [:diff, "foo", "foobar"], "not" => [:not, "foo"], "rept" => [:rept, 1, 2, "foo"], }.each do |title, expr| it title do rule = EBNF::Rule.new(:rule, "0", expr) expect {rule.to_bnf}.to raise_error(RuntimeError) end end end end describe "#to_peg" do { "no-rewrite" => [ [:seq, :foo], [EBNF::Rule.new(:rule, "0", [:seq, :foo])] ], "embedded rule" => [ [:seq, [:alt, :foo]], [EBNF::Rule.new(:rule, "0", [:seq, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :foo])] ], "opt rule" => [ [:opt, :foo], [EBNF::Rule.new(:rule, "0", [:opt, :foo])] ], "two opt rule" => [ [:alt, [:opt, :foo], [:opt, :bar]], [EBNF::Rule.new(:rule, "0", [:alt, :_rule_1, :_rule_2]), EBNF::Rule.new(:_rule_1, "0.1", [:opt, :foo]), EBNF::Rule.new(:_rule_2, "0.2", [:opt, :bar])] ], "star rule" => [ [:star, :foo], [EBNF::Rule.new(:rule, "0", [:star, :foo])] ], "plus rule" => [ [:plus, :foo], [EBNF::Rule.new(:rule, "0", [:plus, :foo])] ], "diff rule" => [ [:diff, "a", "b"], [EBNF::Rule.new(:rule, "0", [:seq, :_rule_1, "a"]), EBNF::Rule.new(:_rule_1, "0.1", [:not, "b"])] ], "hex rule" => [ [:hex, "#x00B7"], [EBNF::Rule.new(:rule, "0", [:hex, "#x00B7"], kind: :terminal)] ], "range rule" => [ [:range, "a"], [EBNF::Rule.new(:rule, "0", [:range, "a"], kind: :terminal)] ], "ebnf[1]" => [ [:star, [:alt, :declaration, :rule]], [EBNF::Rule.new(:rule, "0", [:star, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:alt, :declaration, :rule])] ], "ebnf[9]" => [ [:seq, :primary, [:opt, [:range, "?*+"]]], [EBNF::Rule.new(:rule, "0", [:seq, :primary, :_rule_1]), EBNF::Rule.new(:_rule_1, "0.1", [:opt, :_rule_2]), EBNF::Rule.new(:_rule_2, "0.2", [:range, "?*+"])] ], "IRIREF" => [ [:seq, "<", [:star, [:alt, [:range, "^#x00-#x20<>\"{}|^`\\"], :UCHAR]], ">"], [EBNF::Rule.new(:rule, "0", [:seq, "<", :_rule_1, ">"]), EBNF::Rule.new(:_rule_1, "0.1", [:star, :_rule_2]), EBNF::Rule.new(:_rule_2, "0.2", [:alt, :_rule_3, :UCHAR]), EBNF::Rule.new(:_rule_3, "0.3", [:range, "^#x00-#x20<>\"{}|^`\\"])] ] }.each do |title, (expr, expected)| it title do rule = EBNF::Rule.new(:rule, "0", expr) expect(rule.to_peg).to eq expected end end it "extends with EBNF::PEG::Rule" do rule = EBNF::Rule.new(:rule, "0", [:seq, :foo]).to_peg.first expect(rule).to be_a(EBNF::PEG::Rule) end end describe "#to_regexp" do { hex: [:hex, "#x20", /\ /], hex2: [:hex, "#x5c", /\\/], range: [:range, "a-b", /[a-b]/], range2: [:range, "a-zA-Z", /[a-zA-Z]/], range3: [:range, "abc-", /[abc-]/], range4: [:range, "#x23-#x5b", /[#-\[]/], range5: [:range, "#x5d-#x5e", /[\]-^]/], range6: [:range, "#x5c-#x5e", /[\\-^]/], }.each do |title, (op, exp, regexp)| it title do expect(EBNF::Rule.new(title, nil, [op, exp]).to_regexp).to eql regexp end end { istr: ["foo", /foo/ui], }.each do |title, (exp, regexp)| it title, ruby: "!jruby" do expect(EBNF::Rule.new(title, nil, [title, exp]).to_regexp).to eql regexp end end it "raises an error for other operation" do expect {EBNF::Rule.new(:seq, nil, [:seq, :a]).to_regexp}.to raise_error(/Can't turn/) end end describe "#terminal?" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), false, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), false, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), false, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), true, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), true, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), false, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), false, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), false, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), false, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), false, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), false, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), false, ] }.each do |title, (rule, bool)| it "#{title} => #{bool.inspect}" do expect(rule.terminal?).to eq bool end end end describe "#pass?" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), false, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), true, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), false, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), false, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), false, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), false, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), false, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), false, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), false, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), false, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), false, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), false, ] }.each do |title, (rule, bool)| it "#{title} => #{bool.inspect}" do expect(rule.pass?).to eq bool end end end describe "#rule?" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), true, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), false, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), true, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), false, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), false, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), true, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), true, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), true, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), true, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), true, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), true, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), true, ] }.each do |title, (rule, bool)| it "#{title} => #{bool.inspect}" do expect(rule.rule?).to eq bool end end end describe "#alt?" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), false, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), false, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), true, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), false, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), false, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), false, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), false, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), false, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), false, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), false, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), false, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), false, ] }.each do |title, (rule, bool)| it "#{title} => #{bool.inspect}" do expect(rule.alt?).to eq bool end end end describe "#seq?" do { "ebnf[1]": [ EBNF::Rule.new(:ebnf, "1", [:star, [:alt, :declaration, :rule]]), false, ], "pass": [ EBNF::Rule.new(nil, nil, [:plus, [:range, "#x20\\t\\r\\n"]], kind: :pass), false, ], "alt": [ EBNF::Rule.new(:alt, nil, [:alt, :a, :b, :c], kind: :rule), false, ], "diff": [ EBNF::Rule.new(:R_CHAR, "21", [:diff, :CHAR, "]"], kind: :terminal), false, ], "istr": [ EBNF::Rule.new(:nc, nil, [:istr, "foo"], kind: :terminal), false, ], "not": [ EBNF::Rule.new(:_a_1, "n.1", [:not, :op1], kind: :rule), false, ], "opt": [ EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2], kind: :rule), false, ], "plus": [ EBNF::Rule.new(:seq, "6", [:plus, :diff], kind: :rule), false, ], "rept": [ EBNF::Rule.new(:rept, "6", [:rept, 1, "*", :diff]), false, ], "rept m.n": [ EBNF::Rule.new(:rept, "6", [:rept, 3, 5, :diff]), false, ], "seq": [ EBNF::Rule.new(:seq, nil, [:seq, :a, :b, :c], kind: :rule), true, ], "star": [ EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2], kind: :rule), false, ] }.each do |title, (rule, bool)| it "#{title} => #{bool.inspect}" do expect(rule.seq?).to eq bool end end end describe "#==" do let(:rule1) {EBNF::Rule.new(:foo, nil, [:seq, "FOO"])} let(:rule2) {EBNF::Rule.new(:foo, nil, [:seq, "FOO"])} let(:rule3) {EBNF::Rule.new(:bar, nil, [:seq, "FOO"])} it "equals itself" do expect(rule1).to eq(rule1) end it "equals an equivalent rule" do expect(rule1).to eq(rule2) end it "does not equal a rule with a different symbol that has the same expression" do expect(rule1).not_to eq(rule3) end end describe "#eql?" do let(:rule1) {EBNF::Rule.new(:foo, nil, [:seq, "FOO"])} let(:rule2) {EBNF::Rule.new(:foo, nil, [:seq, "FOO"])} let(:rule3) {EBNF::Rule.new(:bar, nil, [:seq, "FOO"])} it "equals itself" do expect(rule1).to eql(rule1) end it "equals an equivalent rule" do expect(rule1).to eql(rule2) end it "equals a rule with a different symbol that has the same expression" do expect(rule1).to eql(rule3) end end describe "#translate_codepoints" do { "#x20" => " ", "#xffff" => "\u{ffff}" }.each do |str, cp| specify {expect(subject.translate_codepoints(str)).to eql(cp)} end end describe "#non_terminals" do subject {ebnf} { _pass: [], ebnf: [:declaration, :rule], declaration: [:pass], alt: [:seq], seq: [:diff], diff: [:postfix], postfix: [:primary], primary: [], pass: [], LHS: [], SYMBOL: [], HEX: [], RANGE: [], O_RANGE: [], STRING1: [], STRING2: [], CHAR: [], R_CHAR: [], POSTFIX: [], PASS: [] }.each do |sym, expected| it "#{sym} => #{expected.inspect}" do res = subject.ast.find {|r| r.sym == sym} expect(res.non_terminals(subject.ast).map(&:sym)).to eq expected end end end describe "#terminals" do subject {ebnf} { _pass: [:PASS], ebnf: [], declaration: ["@terminals"], alt: [], seq: [], diff: [], postfix: [], primary: [:HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], pass: ["@pass"], LHS: ["["], SYMBOL: ["<", :O_SYMBOL], O_SYMBOL: ["a-z", "A-Z", "0-9", "_", "."], HEX: ["#x"], RANGE: ["["], O_RANGE: ["[^"], STRING1: ['"'], STRING2: ["'"], CHAR: ["#x9#xA#xD", "#x20-#xD7FF", "#xE000-#xFFFD", "#x10000-#x10FFFF"], R_CHAR: [:CHAR, "]", "-", :HEX], POSTFIX: ["?*+"], PASS: ["#x9#xA#xD#x20", "#", "#x", "//", "/*", "(*"] }.each do |sym, expected| it "#{sym} => #{expected.inspect}" do res = subject.ast.find {|r| r.sym == sym} expect(res.terminals(subject.ast).map {|r| r.is_a?(EBNF::Rule) ? r.sym : r}).to eq expected end end end describe "#symbols" do subject {ebnf} { _pass: [:PASS], ebnf: [:declaration, :rule], declaration: [:pass], alt: [:seq], seq: [:diff], diff: [:postfix], postfix: [:primary, :POSTFIX], primary: [:HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, :expression], pass: [:expression], LHS: [:SYMBOL], SYMBOL: [:O_SYMBOL], O_SYMBOL: [], HEX: [], RANGE: [:R_CHAR, :HEX], O_RANGE: [:R_CHAR, :HEX], STRING1: [:CHAR], STRING2: [:CHAR], CHAR: [], R_CHAR: [:CHAR, :HEX], POSTFIX: [], PASS: [] }.each do |sym, expected| it "#{sym} => #{expected.inspect}" do res = subject.ast.find {|r| r.sym == sym} expect(res.symbols).to eq expected end end end describe "#validate!" do let(:gram) {EBNF.parse("a ::= 'b'?")} subject {gram.ast.first} { "mixed enum char and hex": [ "a ::= [b#x20]", %(In rule a: Range must be of form HEX+ or R_CHAR+: was "b#x20") ], "mixed enum char and hex (2)": [ "a ::= [#x20z]", %(In rule a: Range must be of form HEX+ or R_CHAR+: was "#x20z") ], }.each do |name, (rule, message)| it name do expect(EBNF.parse(rule)).to be_valid end end { "missing rule": [ "a ::= b", /In rule a: No rule found for b/ ], "illegal string": [ %{a ::= "\u{01}"}, /syntax error/ ], "empty range": [ "a ::= []", /syntax error/ ], "mixed range char and hex": [ "a ::= [b-#x20]", /Range contains illegal components/ ], "mixed range char and hex (2)": [ "a ::= [#x20-b]", /Range contains illegal components/ ], "incomplete range": [ "a ::= [-b]", /syntax error,/ ], "extra range": [ "a ::= [a-b-c]", /syntax error,/ ], }.each do |name, (rule, message)| it name do expect {EBNF.parse(rule, validate: true)}.to raise_error SyntaxError, message end end # Validate rules that can only be created through modification { "alt (empty)": [:alt], "diff (empty)": [:diff], "diff (one)": [:diff, 'A'], "diff (three)": [:diff, 'A', 'B', 'C'], "hex (empty)": [:hex], "hex (two)": [:hex, '#x01', '#x02'], "hex (string)": [:hex, 'string'], "istr (empty)": [:istr], "istr (two)": [:istr, 'A', 'B'], "not (empty)": [:not], "not (two)": [:not, 'A', 'B'], "opt (empty)": [:opt], "plus (empty)": [:plus], "plus (two)": [:plus, 'A', 'B'], "rept (empty)": [:rept], "rept (one)": [:rept, 1], "rept (two)": [:rept, 1, 2], "rept (four)": [:rept, 1, 2, 'A', 'B'], "rept (float min)": [:rept, 1.1, 2, 'A'], "rept (negative min)": [:rept, -1, 2, 'A'], "rept (float max)": [:rept, 1, 2.1, 'A'], "rept (negative max)": [:rept, 1, -1, 'A'], "star (empty)": [:star], "star (two)": [:star, 'A', 'B'], "not op": [:bad] }.each do |title, expr| it title do subject.expr = expr expect {subject.validate!(gram.ast)}.to raise_error(SyntaxError) end end end describe "#valid?" do subject {EBNF.parse("a ::= b")} it "notes missing rule" do expect(subject.ast.first.valid?(subject.ast)).to be_falsey end it "validates EBNF" do ebnf = EBNF.parse(File.open(File.expand_path("../../etc/ebnf.ebnf", __FILE__))) expect(ebnf.ast.first).to be_valid(ebnf.ast) end end describe "#cclass" do { "passes normal stuff" => [ %{^<>'{}|^`}, %{[^<>'{}|^`]} ], "turns regular hex range into unicode range" => [ %{#x0300-#x036F}, %{[\\u0300-\\u036F]} ], "turns short hex range into unicode range" => [ %{#xC0-#xD6}, %{[\\u00C0-\\u00D6]} ], "turns 3 char hex range into unicode range" => [ %{#x370-#x37D}, %{[\\u0370-\\u037D]} ], "turns long hex range into unicode range" => [ %{#x000300-#x00036F}, %{[\\U00000300-\\U0000036F]} ], "turns 5 char hex range into unicode range" => [ %{#x00370-#x0037D}, %{[\\U00000370-\\U0000037D]} ], }.each do |title, (input, expected)| it title do expect(subject.send(:cclass, input)).to produce(expected, debug) end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/writer_spec.rb
spec/writer_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' require 'nokogiri' describe EBNF::Writer do RSpec::Matchers.define :have_xpath do |path, value| match do |actual| doc = Nokogiri::HTML.parse(actual) return false unless doc @result = doc.at_xpath(path.to_s) rescue false case value when false @result.nil? when true !@result.nil? when Array @result.to_s.split(" ").include?(*value) when Regexp @result.to_s =~ value else @result.to_s == value end end failure_message do |actual| msg = "expected that #{path.inspect}\nwould be: #{value.inspect}" msg += "\n was: #{@result}" msg += "\nsource:" + actual msg end failure_message_when_negated do |actual| msg = "expected that #{path.inspect}\nwould not be #{value.inspect}" msg += "\nsource:" + actual msg end end describe ".string" do { prolog: [ %{[2] Prolog ::= BaseDecl? PrefixDecl*}, %{[2] Prolog ::= BaseDecl? PrefixDecl*\n} ], }.each do |title, (grammar, plain)| context title do subject {EBNF::Base.new(grammar, format: :native).ast} it "generates plain" do expect(EBNF::Writer.string(*subject)).to eq plain end end end end describe ".print" do { prolog: [ %{[2] Prolog ::= BaseDecl? PrefixDecl*}, %{[2] Prolog ::= BaseDecl? PrefixDecl*\n} ], }.each do |title, (grammar, plain)| context title do subject {EBNF::Base.new(grammar, format: :native).ast} it "generates plain" do expect {EBNF::Writer.print(*subject)}.to write(plain).to(:output) end end end end describe ".html" do { prolog: [ %{[2] Prolog ::= BaseDecl? PrefixDecl*}, { '//table/@class': "grammar", '//table/tbody/@id': "grammar-productions", '//tbody/tr/@id': "grammar-production-Prolog", '//tbody/tr/td[1]/text()': "[2]", '//tbody/tr/td[2]/code/text()': "Prolog", '//tbody/tr/td[3]/text()': "::=", #'//tbody/tr/td[4]/text()': /BaseDecl\? PrefixDecl\*/, } ], statement: [ %{[2] statement ::= directive | triples '.'}, { '//table/@class': "grammar", '//table/tbody/@id': "grammar-productions", '//tbody/tr/@id': "grammar-production-statement", '//tbody/tr/td[1]/text()': "[2]", '//tbody/tr/td[2]/code/text()': "statement", '//tbody/tr/td[3]/text()': "::=", #'//tbody/tr/td[4]/text()': /directive | triples '.'/, } ], }.each do |title, (grammar, xpaths)| context title do subject {EBNF::Writer.html(*EBNF::Base.new(grammar, format: :native).ast)} specify {is_expected.to be_valid_html} xpaths.each do |path, value| specify {is_expected.to have_xpath(path, value)} end end end end context "EBNF" do describe "#initialize" do { prolog: { ebnf: %{[2] Prolog ::= BaseDecl? PrefixDecl*}, plain: %{[2] Prolog ::= BaseDecl? PrefixDecl*\n} }, backslash: { ebnf: %{LHS ::= [^'\] | ECHAR}, plain: %{LHS ::= [^'\] | ECHAR} } }.each do |title, params| context title do subject {EBNF::Base.new(params[:ebnf], format: :native).ast} it "generates plain" do expect {EBNF::Writer.new(subject)}.to write(params[:plain]).to(:output) end end end end describe "#format_ebnf" do subject {EBNF::Writer.new([])} context "legal expressions" do { "alt": [ [:alt, :A, :B], "A | B" ], "diff": [ [:diff, :A, :B], "A - B" ], "hex": [ [:hex, "#x20"], "#x20" ], "istr": [ [:istr, "foo"], %("foo") ], "str": [ 'foo', %("foo") ], "opt": [ [:opt, :A], "A?" ], "plus": [ [:plus, :A], "A+" ], "range": [ [:range, "a-zA-Z"], "[a-zA-Z]" ], "rept 0 1": [ [:rept, 0, 1, :A], "A?" ], "rept 0 *": [ [:rept, 0, '*', :A], "A*" ], "rept 1 1": [ [:rept, 1, 1, :A], "A" ], "rept 1 *": [ [:rept, 1, '*', :A], "A+" ], "rept 1 2": [ [:rept, 1, 2, :A], "A A?" ], "rept 1 3": [ [:rept, 1, 3, :A], "A (A A?)?" ], "rept 2 *": [ [:rept, 2, "*", :A], "A A A*" ], "rept 1 3 (A B)": [ [:rept, 1, 3, [:seq, :A, :B]], "(A B) ((A B) (A B)?)?" ], "rept 1 3 (A | B)": [ [:rept, 1, 3, [:alt, :A, :B]], "(A | B) ((A | B) (A | B)?)?" ], "star": [ [:star, :A], "A*" ], "string '\\r'": [ [:seq, "\r"], %{#x0D} ], "string ' '": [ [:seq, " "], %{#x20} ], "string 'a'": [ [:seq, "a"], %{"a"} ], "string '\"'": [ [:seq, '"'], %{'"'} ], "string \"'\"": [ [:seq, '\''], %{"'"} ], "string \"\€\"": [ [:seq, '€'], %{"€"} ], "n3 path": [ [:seq, :pathItem, [:alt, [:seq, "!", :path], [:seq, "^", :path]]], %{pathItem (("!" path) | ("^" path))} ], }.each do |title, (expr, result)| it title do expect(subject.send(:format_ebnf, expr)).to eql result end end end context "illegal expressions" do { "string 'a\nb": [:seq, "a\nb"], }.each do |title, expr| it title do expect {subject.send(:format_ebnf, expr)}.to raise_error RangeError end end end end context "Existing grammars" do { "ABNF Grammar" => File.expand_path("../../etc/abnf.ebnf", __FILE__), "EBNF Grammar" => File.expand_path("../../etc/ebnf.ebnf", __FILE__), "ISO EBNF Grammar" => File.expand_path("../../etc/iso-ebnf.ebnf", __FILE__), "Turtle Grammar" => File.expand_path("../../etc/turtle.ebnf", __FILE__), "SPARQL Grammar" => File.expand_path("../../etc/sparql.ebnf", __FILE__), }.each do |name, file| context name do it "outputs grammar as text" do expect {EBNF.parse(File.read(file)).to_s}.to_not raise_error end it "parses to equivalent rules" do expect(EBNF.parse(File.read(file)).to_sxp).to produce(File.read(file.sub('.ebnf', '.sxp'))) end it "outputs grammar as html" do html = nil expect {html = EBNF.parse(File.read(file)).to_html}.to_not raise_error expect(html).to be_valid_html end end end end end context "ABNF" do describe "#initialize" do { prolog: [ %{rulelist = 1*( rule / (*c-wsp c-nl) )\n}, %{rulelist = 1*(rule / (*c-wsp c-nl))\n} ], }.each do |title, (grammar, plain)| context title do subject {EBNF::Base.new(grammar, format: :abnf).ast} it "generates plain" do expect {EBNF::Writer.new(subject, format: :abnf)}.to write(plain).to(:output) end end end end describe "#format_abnf" do subject {EBNF::Writer.new([])} context "legal expressions" do { "alt": [ [:alt, :A, :B], "A / B" ], "enum": [ [:range, "abc-"], "%d97.98.99.45" ], "hex": [ [:hex, "#x20"], "%x20" ], "istr": [ [:istr, "foo"], %("foo") ], "opt": [ [:opt, :A], "[A]" ], "plus": [ [:plus, :A], "1*A" ], "range": [ [:range, "a-z"], "%d97-122" ], "range 2": [ [:range, "a-zA-Z"], %{(%d97-122 / %d65-90)} ], "rept 0 1": [ [:rept, 0, 1, :A], "*1A" ], "rept 0 *": [ [:rept, 0, '*', :A], "*A" ], "rept 1 1": [ [:rept, 1, 1, :A], "1A" ], "rept 1 *": [ [:rept, 1, '*', :A], "1*A" ], "rept 1 2": [ [:rept, 1, 2, :A], "1*2A" ], "rept 1 3": [ [:rept, 1, 3, :A], "1*3A" ], "rept 2 *": [ [:rept, 2, "*", :A], "2*A" ], "rept 1 3 (A B)": [ [:rept, 1, 3, [:seq, :A, :B]], "1*3(A B)" ], "rept 1 3 (A | B)": [ [:rept, 1, 3, [:alt, :A, :B]], "1*3(A / B)" ], "star": [ [:star, :A], "*A" ], "string '\\r'": [ [:seq, "\r"], %{%x0D} ], "string ' '": [ [:seq, " "], %{" "} ], "string 'a'": [ [:seq, "a"], %{"a"} ], "string '\"'": [ [:seq, '"'], %{%x22} ], "string \"'\"": [ [:seq, '\''], %{"'"} ], "string \"\€\"": [ [:seq, '€'], %{%x20AC} ], "n3 path": [ [:seq, :pathItem, [:alt, [:seq, "!", :path], [:seq, "^", :path]]], %{pathItem (("!" path) / ("^" path))} ], }.each do |title, (expr, result)| it title do expect(subject.send(:format_abnf, expr)).to eql result end end end context "illegal expressions" do { "[^abc]": [:range, "^abc"], "A - B": [:diff, :A, :B], }.each do |title, expr| it title do expect {subject.send(:format_abnf, expr)}.to raise_error RangeError end end end end context "Existing grammars" do { "ABNF Grammar" => File.expand_path("../../etc/abnf.abnf", __FILE__), "HTTP Grammar" => File.expand_path("../../examples/abnf/examples/http.abnf", __FILE__), "JSON Grammar" => File.expand_path("../../examples/abnf/examples/json.abnf", __FILE__), "Postal Address" => File.expand_path("../../examples/abnf/examples/postal-address.abnf", __FILE__), "URI Grammar" => File.expand_path("../../examples/abnf/examples/uri.abnf", __FILE__), }.each do |name, file| context name do it "outputs grammar as text" do expect {EBNF.parse(File.read(file), format: :abnf).to_s(format: :abnf)}.to_not raise_error end it "outputs grammar as html" do html = nil expect {html = EBNF.parse(File.read(file), format: :abnf).to_html(format: :abnf)}.to_not raise_error expect(html).to be_valid_html end end end end end context "ISOEBNF" do describe "#initialize" do { prolog: [ %{syntax = syntax_rule, {syntax_rule} ;}, %{syntax = syntax_rule, {syntax_rule} ;\n} ], }.each do |title, (grammar, plain)| context title do subject {EBNF::Base.new(grammar, format: :isoebnf).ast} it "generates plain" do expect {EBNF::Writer.new(subject, format: :isoebnf)}.to write(plain).to(:output) end end end end describe "#format_isoebnf" do subject {EBNF::Writer.new([])} context "legal expressions" do { "alt": [ [:alt, :A, :B], "A | B" ], "diff": [ [:diff, :A, :B], "A - B" ], "enum": [ [:range, "abc-"], %{("a" | "b" | "c" | "-")} ], "hex": [ [:hex, "#x20"], %(" ") ], "istr": [ [:istr, "foo"], %("foo") ], "opt": [ [:opt, :A], "[A]" ], "plus": [ [:plus, :A], "A, {A}" ], "range": [ [:range, "a-z"], %{("a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z")} ], "range 2": [ [:range, "a-zA-Z"], %{("a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z" | "A" | "B" | "C" | "D" | "E" | "F" | "G" | "H" | "I" | "J" | "K" | "L" | "M" | "N" | "O" | "P" | "Q" | "R" | "S" | "T" | "U" | "V" | "W" | "X" | "Y" | "Z")}], "rept 0 1": [ [:rept, 0, 1, :A], "[A]" ], "rept 0 *": [ [:rept, 0, '*', :A], "{A}" ], "rept 1 1": [ [:rept, 1, 1, :A], "A" ], "rept 1 *": [ [:rept, 1, '*', :A], "A, {A}" ], "rept 1 2": [ [:rept, 1, 2, :A], "A, [A]" ], "rept 1 3": [ [:rept, 1, 3, :A], "A, [(A, [A])]" ], "rept 2 *": [ [:rept, 2, "*", :A], "A, A, {A}" ], "rept 1 3 (A B)": [ [:rept, 1, 3, [:seq, :A, :B]], "(A, B), [((A, B), [(A, B)])]" ], "rept 1 3 (A | B)": [ [:rept, 1, 3, [:alt, :A, :B]], "(A | B), [((A | B), [(A | B)])]" ], "star": [ [:star, :A], "{A}" ], "string ' '": [ [:seq, " "], %{" "} ], "string 'a'": [ [:seq, "a"], %{"a"} ], "string '\"'": [ [:seq, '"'], %{'"'} ], "string \"'\"": [ [:seq, '\''], %{"'"} ], "n3 path": [ [:seq, :pathItem, [:alt, [:seq, "!", :path], [:seq, "^", :path]]], %{pathItem, (("!", path) | ("^", path))} ], }.each do |title, (expr, result)| it title do expect(subject.send(:format_isoebnf, expr)).to eql result end end end context "illegal expressions" do { "[^abc]": [:range, "^abc"], "string '\\r'": [:seq, "\r"], "string \"\€\"": [:seq, '€'], }.each do |title, expr| it title do expect {subject.send(:format_isoebnf, expr)}.to raise_error RangeError end end end end context "Existing grammars" do { "ISO EBNF Grammar" => File.expand_path("../../etc/iso-ebnf.isoebnf", __FILE__), "Simiple EBNF Grammar" => File.expand_path("../../examples/isoebnf/examples/ebnf.isoebnf", __FILE__), "HTML Grammar" => File.expand_path("../../examples/isoebnf/examples/html.isoebnf", __FILE__), "Pascal Grammar" => File.expand_path("../../examples/isoebnf/examples/pascal.isoebnf", __FILE__), "Postal Address" => File.expand_path("../../examples/isoebnf/examples/postal-address.isoebnf", __FILE__), }.each do |name, file| context name do it "outputs grammar as text" do expect {EBNF.parse(File.read(file), format: :isoebnf).to_s(format: :isoebnf)}.to_not raise_error end it "outputs grammar as html" do html = nil expect {html = EBNF.parse(File.read(file), format: :isoebnf).to_html(format: :isoebnf)}.to_not raise_error expect(html).to be_valid_html end end end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/matchers.rb
spec/matchers.rb
# coding: utf-8 def normalize(obj) if obj.is_a?(String) obj.gsub(/\s+/m, ' '). gsub(/\s+\)/m, ')'). gsub(/\(\s+/m, '('). strip else obj end end Info = Struct.new(:id, :logger, :action, :result, :format) RSpec::Matchers.define :produce do |expected, info| match do |actual| @info = if info.is_a?(Logger) Info.new("", info) elsif info.is_a?(Hash) Info.new(info[:id], info[:logger], info[:action], info[:result]) else Info.new(info) end expect(normalize(actual)).to eq normalize(expected) end failure_message do |actual| "Expected: #{normalize(expected)}\n" + "Actual : #{normalize(actual)}\n" + "Raw : #{expected}\n" + "Result : #{actual}\n" + "Processing results:\n#{@info.logger}" end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/bnf_spec.rb
spec/bnf_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::BNF do describe "#make_bnf" do { %{[2] Prolog ::= BaseDecl? PrefixDecl*} => %{((rule _empty "0" (seq)) (rule Prolog "2" (seq _Prolog_1 _Prolog_2)) (rule _Prolog_1 "2.1" (cleanup opt) (alt _empty BaseDecl)) (rule _Prolog_2 "2.2" (cleanup star) (alt _empty _Prolog_3)) (rule _Prolog_3 "2.3" (cleanup merge) (seq PrefixDecl _Prolog_2)))}, %{ [9] primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule _empty "0" (seq)) (rule primary "9" (alt HEX RANGE O_RANGE STRING1 STRING2 _primary_1 )) (rule _primary_1 "9.1" (seq '(' expression ')')))}, %{ [1] R1 ::= 1 2 [2] R2 ::= 1 2 } => %{((rule _empty "0" (seq)) (terminal R1 "1" (seq 1 2)) (terminal R2 "2" (seq 1 2)))} }.each do |input, expected| it "parses #{input.inspect}" do expect(parse(input).make_bnf.ast.to_sxp).to produce(expected, @debug) end end context "EBNF Grammar" do subject {parse(File.read(File.expand_path("../../etc/ebnf.ebnf", __FILE__))).make_bnf} it "rule expressions should be flat, terminal or alt/seq" do subject.ast.each do |rule| case when !rule.rule? then true when !rule.expr.is_a?(Array) then true else expect("#{rule.sym}: #{rule.expr.first}").to match(/#{rule.sym}: (alt|seq)/) end end end end context "Turtle Grammar" do subject {parse(File.read(File.expand_path("../../etc/turtle.ebnf", __FILE__))).make_bnf} it "rule expressions should be flat, terminal or alt/seq" do subject.ast.each do |rule| case when rule.terminal? then true when !rule.expr.is_a?(Array) then true else expect("#{rule.sym}: #{rule.expr.first}").to match(/#{rule.sym}: (alt|seq)/) end end end end end def parse(value, **options) @debug = [] options = {debug: @debug, format: :native}.merge(options) EBNF::Base.new(value, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/abnf_spec.rb
spec/abnf_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::ABNF do let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end context "rule variations" do { "legal rule name": [ 'rulename = "foo"', %{((terminal rulename (istr "foo")))} ], "binary character": [ "bin = %b11", %{((terminal bin (hex "#x3")))} ], "binary string": [ "bin = %b1.10.11", %{((rule bin (seq (hex "#x1") (hex "#x2") (hex "#x3"))))} ], "binary string (ascii range)": [ "bin = %b1100010.1101001.1101110", %{((rule bin (seq "bin")))} ], "binary string (mixed range)": [ "bin = %b1100010.1.1101110", %{((rule bin (seq "b" (hex "#x1") "n")))} ], "binary range": [ "bin = %b1100010-1101110", %{((terminal bin (range "#x62-#x6e")))} ], "decimal char": [ "dec = %d22", %{((terminal dec (hex "#x16")))} ], "decimal string": [ "dec = %d1.2.3", %{((rule dec (seq (hex "#x1") (hex "#x2") (hex "#x3"))))} ], "decimal string (ascii range)": [ "dec = %d100.101.99", %{((rule dec (seq "dec")))} ], "decimal string (mixed range)": [ "dec = %d100.1.99", %{((rule dec (seq "d" (hex "#x1") "c")))} ], "decimal range": [ "dec = %d22-40", %{((terminal dec (range "#x16-#x28")))} ], "hex character": [ "hex = %x1f", %{((terminal hex (hex "#x1f")))} ], "hex string": [ "hex = %x1.a.c", %{((rule hex (seq (hex "#x1") (hex "#xa") (hex "#xc"))))} ], "hex string (ascii range)": [ "hex = %x68.65.78", %{((rule hex (seq "hex")))} ], "hex string (mixed range)": [ "hex = %x68.1.78", %{((rule hex (seq "h" (hex "#x1") "x")))} ], "hex range": [ "hex = %x22-40", %{((terminal hex (range "#x22-#x40")))} ], "aliteration": [ %(baz = foo / bar), %{((rule baz (alt foo bar)))} ], "aliteration 2": [ %(buzz = foo / bar / baz), %{((rule buzz (alt foo bar baz)))} ], "incremental alternatives": [ %(ruleset = alt1 / alt2\nruleset =/ alt3\nruleset =/ alt4 / alt5), %{((rule ruleset (alt alt1 alt2 alt3 alt4 alt5)))} ], "concatenated chars and ranges": [ %(char-line = %x0D.0A %x20-7E %x0D.0A), %{((rule char-line (seq (seq (hex "#xd") (hex "#xa")) (range "#x20-#x7e") (seq (hex "#xd") (hex "#xa")))))} ], "sequence group": [ %(sequence-group = elem (foo / bar) blat), %{((rule sequence-group (seq elem (alt foo bar) blat)))} ], "rept *": [ %(rept = *A), %{((rule rept (star A)))} ], "rept 0*": [ %(rept = 0*A), %{((rule rept (star A)))} ], "rept 1*": [ %(rept = 1*A), %{((rule rept (plus A)))} ], "rept 2*": [ %(rept = 2*A), %{((rule rept (rept 2 "*" A)))} ], "rept *1": [ %(rept = *1A), %{((rule rept (rept 0 1 A)))} ], "rept 0*2": [ %(rept = 0*2A), %{((rule rept (rept 0 2 A)))} ], "rept 1*3": [ %(rept = 1*3A), %{((rule rept (rept 1 3 A)))} ], "rept 3": [ %(rept = 3A), %{((rule rept (rept 3 3 A)))} ], "opt": [ %(opt = [foo bar]), %{((rule opt (opt (seq foo bar))))} ], "comment": [ %(foo = %x61 ; a), %{((terminal foo (hex "#x61")))} ], "prose-value": [ %(prose = < free form >), %{((rule prose (seq "< free form >")))} ] }.each do |title, (input, expect)| it title do input << "\n" unless input.end_with?("\n") expect(parse(input).to_sxp).to produce(expect, logger) end end end context "Case-Sensitive String Support in ABNF" do { "case insensitive": [ %(rulename = %i"aBc"), %{((terminal rulename (istr "aBc")))} ], "case sensitive": [ %(rulename = %s"aBc"), %{((rule rulename (seq 'aBc')))} ], }.each do |title, (input, expect)| it title do input << "\n" unless input.end_with?("\n") expect(parse(input).to_sxp).to produce(expect, logger) end end end context "Core Rules" do { "ALPHA": [ "builtin = ALPHA", %{((rule builtin (seq ALPHA)) (terminal ALPHA (range "#x41-#x5A#x61-#x7A")))} ], "BIT": [ "builtin = BIT", %{((rule builtin (seq BIT)) (terminal BIT (alt "0" "1")))} ], "CR": [ "builtin = CR", %{((rule builtin (seq CR)) (terminal CR (hex "#x0D")))} ], "CRLF": [ "builtin = CRLF", %{((rule builtin (seq CRLF)) (terminal CRLF (seq (opt CR) LF)))} ], "CTL": [ "builtin = CTL", %{((rule builtin (seq CTL)) (terminal CTL (alt (range "#x00-#x1F") (hex "#x7F"))))} ], "DIGIT": [ "builtin = DIGIT", %{((rule builtin (seq DIGIT)) (terminal DIGIT (range "#x30-#x39")))} ], "DQUOTE": [ "builtin = DQUOTE", %{((rule builtin (seq DQUOTE)) (terminal DQUOTE (hex "#x22")))} ], "HEXDIG": [ "builtin = HEXDIG", %{((rule builtin (seq HEXDIG)) (terminal HEXDIG (alt DIGIT (range "A-F"))))} ], "HTAB": [ "builtin = HTAB", %{((rule builtin (seq HTAB)) (terminal HTAB (hex "#x09")))} ], "LF": [ "builtin = LF", %{((rule builtin (seq LF)) (terminal LF (hex "#x0A")))} ], "LWSP": [ "builtin = LWSP", %{((rule builtin (seq LWSP)) (terminal LWSP (star (alt WSP (seq CRLF WSP)))))} ], "WSP": [ "builtin = WSP", %{((rule builtin (seq WSP)) (terminal WSP (alt SP HTAB)))} ], }.each do |title, (input, expect)| it title do input << "\n" unless input.end_with?("\n") expect(parse(input).to_sxp).to produce(expect, logger) end end end context "illegal syntax" do { "illegal rule name": "rule.name = CRLF\n", "no line ending": "rule.name = CRLF", "illegal binary": "bin = %b2\n", "illegal binary range": "bin = %b10-20\n", "illegal decimal": "dec = %d2f\n", "illegal decimal range": "dec = %d22-4060-80\n", "illegal hex": "hex = %x2g\n", "illegal hex range": "hex = %x22-4060-80\n", }.each do |title, input| it title do expect {parse(input)}.to raise_error(SyntaxError) end end end it "parses ABNF grammar" do gram = parse(File.open(File.expand_path("../../etc/abnf.abnf", __FILE__))) expect(gram).to be_valid end def parse(input, **options) @debug = [] EBNF.parse(input, debug: @debug, format: :abnf, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1_spec.rb
spec/ll1_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::Base do let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end describe "#first_follow" do context "start" do context "with legitimate start rule" do let!(:ebnf_doc) { parse(%([1] ebnf ::= (declaration | rule)*), start: :ebnf) } let(:rule) {ebnf_doc.ast.detect {|r| r.sym == :ebnf}} it "should include rule" do expect(rule).not_to be_nil end context "start rule" do subject {rule} its(:start) {expect(rule.start).to be_truthy} its(:follow) {should include(:_eof)} end end context "with illegitimate start rule" do specify { expect { parse(%([1] ebnf ::= (declaration | rule)*), start: :foo) }.to raise_error("No rule found for start symbol foo") } end end context "comprehensions" do { "alt" => [ %{[1] ebnf ::= declaration | rule}, %{ ((rule _empty "0" (first _eps) (seq)) (rule ebnf "1" (alt declaration rule))) } ], "seq[1]" => [ %{[1] rule::= a b }, %{ ((rule _empty "0" (first _eps) (seq)) (rule rule "1" (seq a b)) (rule _rule_1 "1.1" (seq b))) } ], "blankNodePropertyList" => [ %{[14] blankNodePropertyList ::= "[" predicateObjectList "]"}, %{ ((rule _empty "0" (first _eps) (seq)) (rule blankNodePropertyList "14" (first "[") (seq "[" predicateObjectList "]")) (rule _blankNodePropertyList_1 "14.1" (seq predicateObjectList "]")) (rule _blankNodePropertyList_2 "14.2" (first "]") (seq "]"))) } ] }.each do |name, (input, expected)| it name do ebnf = parse(input) sin = ebnf.ast.sort.to_sxp expect(sin).to produce(expected, @debug) end end end context "first" do { "alt (Fi.2.1)" => [ %{ [5] base ::= '@base' IRIREF "." }, %{ ((rule _empty "0" (first _eps) (seq)) (rule base "5" (first '@base') (seq '@base' IRIREF ".")) (rule _base_1 "5.1" (seq IRIREF ".")) (rule _base_2 "5.2" (first ".") (seq "."))) }, nil ], "sparqlPrefix (Fi.2.1/2.2)" => [ %{ [29s] sparqlBase ::= SPARQL_BASE IRIREF [18] IRIREF ::= '<' ("range" | UCHAR)* '>' [29t] SPARQL_BASE ::= [Bb][Aa][Ss][Ee] }, %{( (rule _empty "0" (first _eps) (seq)) (terminal IRIREF "18" (seq '<' (star (alt "range" UCHAR)) '>')) (rule sparqlBase "29s" (first SPARQL_BASE) (seq SPARQL_BASE IRIREF)) (terminal SPARQL_BASE "29t" (seq (range "Bb") (range "Aa") (range "Ss") (range "Ee"))) (rule _sparqlBase_1 "29s.1" (first IRIREF) (seq IRIREF))) }, nil ], "declaration (FF.1)" => [ %{ [2] declaration ::= '@terminals' | '@pass' }, %{ ((rule _empty "0" (first _eps) (seq)) (rule declaration "2" (first '@pass' '@terminals') (alt '@terminals' '@pass'))) }, nil ], "Query (FF.1/6)" => [ %{ [2] Query ::= "BASE"? "SELECT" }, %{ ((rule _empty "0" (first _eps) (seq)) (rule Query "2" (first "BASE" "SELECT") (seq _Query_1 "SELECT")) (rule _Query_1 "2.1" (first "BASE" _eps) (follow "SELECT") (cleanup opt) (alt _empty "BASE")) (rule _Query_2 "2.2" (first "SELECT") (seq "SELECT"))) } ], "turtleDoc (FF.2)" => [ %{ [1] turtleDoc ::= statement* [2] statement ::= directive | triples "." }, %{ ((rule _empty "0" (first _eps) (seq)) (rule turtleDoc "1" (start #t) (first _eps) (follow _eof) (cleanup star) (alt _empty _turtleDoc_1)) (rule _turtleDoc_1 "1.1" (follow _eof) (cleanup merge) (seq statement turtleDoc)) (rule _turtleDoc_2 "1.2" (first _eps) (follow _eof) (seq turtleDoc)) (rule statement "2" (follow _eof) (alt directive _statement_1)) (rule _statement_1 "2.1" (follow _eof) (seq triples ".")) (rule _statement_2 "2.2" (first ".") (follow _eof) (seq "."))) }, :turtleDoc ], "SolutionModifier" => [ %{ [18] SolutionModifier ::= _SolutionModifier_1 [18.1] _SolutionModifier_1 ::= _empty | GroupClause [19] GroupClause ::= "GROUP" }, %{ ((rule _empty "0" (first _eps) (seq)) (rule SolutionModifier "18" (first "GROUP" _eps) (seq _SolutionModifier_1)) (rule _SolutionModifier_1 "18.1" (first "GROUP" _eps) (alt _empty GroupClause)) (rule GroupClause "19" (first "GROUP") (seq "GROUP"))) } ], "GroupGraphPattern" => [ %{ [54] GroupGraphPattern ::= '{' "E"? '}' }, %[ ((rule _empty "0" (first _eps) (seq) ) (rule GroupGraphPattern "54" (first '{') (seq '{' _GroupGraphPattern_1 '}')) (rule _GroupGraphPattern_1 "54.1" (first "E" _eps) (follow '}') (cleanup opt) (alt _empty "E")) (rule _GroupGraphPattern_2 "54.2" (first "E" '}') (seq _GroupGraphPattern_1 '}')) (rule _GroupGraphPattern_3 "54.3" (first '}') (seq '}'))) ] ] }.each do |name, (input, expected, start)| it name do ebnf = parse(input, start: start) sin = ebnf.ast.sort.to_sxp expect(sin).to produce(expected, @debug) end end end context "follow" do { "objectList (FF.3)" => [ %{ [1] rule1 ::= a b [2] a ::= "foo" [3] b ::= "bar" }, %{ ((rule _empty "0" (first _eps) (seq)) (rule rule1 "1" (first "foo") (seq a b)) (rule _rule1_1 "1.1" (first "bar") (seq b)) (rule a "2" (first "foo") (follow "bar") (seq "foo")) (rule b "3" (first "bar") (seq "bar"))) }, nil ], "blankNodePropertyList (FF.4)" => [ %{ [7] predicateObjectList ::= verb objectList ( ";" ( verb objectList)? )* [14] blankNodePropertyList ::= "[" predicateObjectList "]" }, %{ ((rule _empty "0" (first _eps) (seq)) (rule predicateObjectList "7" (follow "]") (seq verb objectList _predicateObjectList_1)) (rule _predicateObjectList_1 "7.1" (first ";" _eps) (follow "]") (cleanup star) (alt _empty _predicateObjectList_3)) (rule _predicateObjectList_2 "7.2" (first ";") (follow ";" "]") (seq ";" _predicateObjectList_4)) (rule _predicateObjectList_3 "7.3" (first ";") (follow "]") (cleanup merge) (seq _predicateObjectList_2 _predicateObjectList_1 )) (rule _predicateObjectList_4 "7.4" (first _eps) (follow ";" "]") (cleanup opt) (alt _empty _predicateObjectList_5)) (rule _predicateObjectList_5 "7.5" (follow ";" "]") (seq verb objectList)) (rule _predicateObjectList_6 "7.6" (follow "]") (seq objectList _predicateObjectList_1)) (rule _predicateObjectList_7 "7.7" (first ";" _eps) (follow "]") (seq _predicateObjectList_1)) (rule _predicateObjectList_8 "7.8" (first _eps) (follow ";" "]") (seq _predicateObjectList_4)) (rule _predicateObjectList_9 "7.9" (follow ";" "]") (seq objectList)) (rule blankNodePropertyList "14" (start #t) (first "[") (follow _eof) (seq "[" predicateObjectList "]")) (rule _blankNodePropertyList_1 "14.1" (follow _eof) (seq predicateObjectList "]") ) (rule _blankNodePropertyList_2 "14.2" (first "]") (follow _eof) (seq "]"))) }, :blankNodePropertyList ], "collection (FF.7/8)" => [ %{ [15] collection ::= "(" object* ")" }, %{ ((rule _empty "0" (first _eps) (seq)) (rule collection "15" (first "(") (seq "(" _collection_1 ")")) (rule _collection_1 "15.1" (first _eps) (follow ")") (cleanup star) (alt _empty _collection_2)) (rule _collection_2 "15.2" (follow ")") (cleanup merge) (seq object _collection_1)) (rule _collection_3 "15.3" (first ")") (seq _collection_1 ")")) (rule _collection_4 "15.4" (first _eps) (follow ")") (seq _collection_1)) (rule _collection_5 "15.5" (first ")") (seq ")"))) }, nil ], "turtleDoc (FF.6)" => [ %{ [1] turtleDoc ::= statement* [2] statement ::= directive | triples "." [3] directive ::= 'BASE' [4] triples ::= 'IRI' }, %{ ((rule _empty "0" (first _eps) (seq)) (rule turtleDoc "1" (start #t) (first 'BASE' 'IRI' _eps) (follow _eof) (cleanup star) (alt _empty _turtleDoc_1)) (rule _turtleDoc_1 "1.1" (first 'BASE' 'IRI') (follow _eof) (cleanup merge) (seq statement turtleDoc)) (rule _turtleDoc_2 "1.2" (first 'BASE' 'IRI' _eps) (follow _eof) (seq turtleDoc)) (rule statement "2" (first 'BASE' 'IRI') (follow 'BASE' 'IRI' _eof) (alt directive _statement_1)) (rule _statement_1 "2.1" (first 'IRI') (follow 'BASE' 'IRI' _eof) (seq triples ".")) (rule _statement_2 "2.2" (first ".") (follow 'BASE' 'IRI' _eof) (seq ".")) (rule directive "3" (first 'BASE') (follow 'BASE' 'IRI' _eof) (seq 'BASE')) (rule triples "4" (first 'IRI') (follow ".") (seq 'IRI'))) }, :turtleDoc ] }.each do |name, (input, expected, start)| it name do ebnf = parse(input, start: start) sin = ebnf.ast.sort.to_sxp expect(sin).to produce(expected, @debug) end end end end shared_examples "#build_tables" do |source, start| let!(:ebnf) { ebnf = parse(source, start: start) ebnf.build_tables ebnf } subject {ebnf} context "#terminals" do subject {ebnf.terminals} let(:symbols) {subject.select {|t| t.is_a?(Symbol)}} let(:other) {subject.reject {|t| t.is_a?(Symbol)}} specify {should be_a(Array)} it "has symbols which are terminals" do symbols.each do |t| expect(ebnf.find_rule(t)).not_to be_nil end end it "has strings otherwise" do expect(other.map(&:class).uniq).to eq [String] end it "has strings used in all rules" do rule_strings = ebnf.ast. select {|r| r.rule?}. map(&:expr).flatten. select {|t| t.is_a?(String)}. uniq expect(rule_strings).to include(*other) end end [:first, :follow].each do |tab| context "#tab" do subject {ebnf.send(tab)} let(:symbols) {subject.select {|t| t.is_a?(Symbol)}} specify {should be_a(Hash)} it "keys are all rule symbols" do subject.keys.each do |sym| r = ebnf.find_rule(sym) expect(r).not_to be_nil expect(r).to be_rule end end it "values should all be terminals" do subject.values.flatten.compact.each do |t| expect(ebnf.terminals).to include(t) unless [:_eps, :_eof].include?(t) end end end end context "#branch" do subject {ebnf.branch} let(:symbols) {subject.select {|t| t.is_a?(Symbol)}} specify {should be_a(Hash)} it "keys are all rule symbols" do subject.keys.each do |sym| r = ebnf.find_rule(sym) expect(r).not_to be_nil expect(r).to be_rule end end it "values should all be Hash's whose keys are terminals" do values = subject.values expect(values.map(&:class).uniq).to eq [Hash] values.map(&:keys).flatten.uniq.each do |t| expect(ebnf.terminals).to include(t) end end it "values of terminal keys are symbols of rules or strings" do symbols = subject.values.map(&:values).flatten.uniq expect(symbols.map(&:class).uniq).to include(Symbol, String) end end describe "EBNF::Base#to_ruby" do it "writes tables to output" do output = StringIO.new ebnf.to_ruby(output) output.rewind expect(output.read).not_to be_empty end end end describe "#build_tables" do context "error reporting" do before(:each) {$stderr, @old_stderr = StringIO.new, $stderr} after(:each) {$stderr = @old_stderr} { "generated terminal" => [ "[1] implicit_terminal ::= [a-z]*", %r{terminal _implicit_terminal_1 is automatically generated}, :implicit_terminal ], "First/First Conflict" => [ %( [1] s ::= e | e "a" [2] e ::= "b"? ), %r{First/First Conflict: .* is the condition for both _s_1 and e}, :s ], "First/Follow Conflict" => [ %( [1] s ::= a "a" "b" [2] a ::= "a"? ), %r{First/Follow Conflict: .* is both first and follow of a}, :s ], }.each do |name, (input, expected, start)| it name do ebnf = parse(input, start: start) expect { ebnf.build_tables expect(false).to produce(true, @debug) }.to raise_error("Table creation failed with errors") expect(ebnf.errors.to_s).to match(expected) sio = StringIO.new ebnf.to_ruby(sio) sio.rewind expect(sio.read).to match(/Note, grammar has errors/) end end end end describe "EBNF Grammar" do it_behaves_like "#build_tables", File.read(File.expand_path("../../etc/ebnf.ebnf", __FILE__)), :ebnf end describe "Turtle Grammar" do it_behaves_like "#build_tables", File.read(File.expand_path("../../etc/turtle.ebnf", __FILE__)), :turtleDoc let!(:ebnf) { ebnf = parse(File.read(File.expand_path("../../etc/turtle.ebnf", __FILE__)), start: :turtleDoc) ebnf.build_tables ebnf } subject {ebnf} # Spot check some productions { turtleDoc: [ ['@prefix', '@base', :IRIREF], [:_eof] ], _predicateObjectList_1: [ [";", :_eps], [".", "]"] ] }.each do |nt, (first, follow)| context nt do subject {ebnf.find_rule(nt)} it {should_not be_nil} its(:first) {expect(subject.first & first).to include(*first)} its(:follow) {expect(subject.follow & follow).to include(*follow)} end end end def parse(value, **options) ebnf = EBNF::Base.new(value, **options) ebnf.make_bnf ebnf.first_follow(options[:start]) ebnf end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/isoebnf_spec.rb
spec/isoebnf_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::ISOEBNF do let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end context "rule variations" do { "legal meta_identifier": [ 'rulename = "foo" ;', %{((rule rulename (seq "foo")))} ], "digits": [ %{ digit_excluding_zero = "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ; digit = "0" | digit_excluding_zero ; }, %{((rule digit_excluding_zero (alt "1" "2" "3" "4" "5" "6" "7" "8" "9")) (rule digit (alt "0" digit_excluding_zero)))} ], "sequence of numbers": [ %{ twelve = "1", "2" ; two_hundred_one = "2", "0", "1" ; three_hundred_twelve = "3", twelve ; twelve_thousand_two_hundred_one = twelve, two_hundred_one ; }, %{((rule twelve (seq "1" "2")) (rule two_hundred_one (seq "2" "0" "1")) (rule three_hundred_twelve (seq "3" twelve)) (rule twelve_thousand_two_hundred_one (seq twelve two_hundred_one)))} ], "natural number": [ %{natural_number = digit_excluding_zero, { digit } ;}, %{((rule natural_number (seq digit_excluding_zero (star digit))))} ], "integer": [ %{integer = "0" | [ "-" ], natural_number ;}, %{((rule integer (alt "0" (seq (opt "-") natural_number))))} ], "simple grammar": [ %q{ letter = "A" | "B" | "C" | "D" | "E" | "F" | "G" | "H" | "I" | "J" | "K" | "L" | "M" | "N" | "O" | "P" | "Q" | "R" | "S" | "T" | "U" | "V" | "W" | "X" | "Y" | "Z" | "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z" ; digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ; symbol = "[" | "]" | "{" | "}" | "(" | ")" | "<" | ">" | "'" | '"' | "=" | "|" | "." | "," | ";" ; character = letter | digit | symbol | "_" ; identifier = letter , { letter | digit | "_" } ; terminal = "'" , character , { character } , "'" | '"' , character , { character } , '"' ; lhs = identifier ; rhs = identifier | terminal | "[" , rhs , "]" | "{" , rhs , "}" | "(" , rhs , ")" | rhs , "|" , rhs | rhs , "," , rhs ; rule = lhs , "=" , rhs , ";" ; grammar = { rule } ; }, %q{((rule letter (alt "A" "B" "C" "D" "E" "F" "G" "H" "I" "J" "K" "L" "M" "N" "O" "P" "Q" "R" "S" "T" "U" "V" "W" "X" "Y" "Z" "a" "b" "c" "d" "e" "f" "g" "h" "i" "j" "k" "l" "m" "n" "o" "p" "q" "r" "s" "t" "u" "v" "w" "x" "y" "z" )) (rule digit (alt "0" "1" "2" "3" "4" "5" "6" "7" "8" "9")) (rule symbol (alt "[" "]" "{" "}" "(" ")" "<" ">" "'" '"' "=" "|" "." "," ";")) (rule character (alt letter digit symbol "_")) (rule identifier (seq letter (star (alt letter digit "_")))) (rule terminal (alt (seq "'" character (star character) "'") (seq '"' character (star character) '"'))) (rule lhs (seq identifier)) (rule rhs (alt identifier terminal (seq "[" rhs "]") (seq "{" rhs "}") (seq "(" rhs ")") (seq rhs "|" rhs) (seq rhs "," rhs)) ) (rule rule (seq lhs "=" rhs ";")) (rule grammar (star rule)))} ], "pascal": [ %q{ (* a simple program syntax in EBNF − Wikipedia *) program = 'PROGRAM', white_space, identifier, white_space, 'BEGIN', white_space, { assignment, ";", white_space }, 'END.' ; identifier = alphabetic_character, { alphabetic_character | digit } ; number = [ "-" ], digit, { digit } ; string = '"' , { all_characters - '"' }, '"' ; assignment = identifier , ":=" , ( number | identifier | string ) ; alphabetic_character = "A" | "B" | "C" | "D" | "E" | "F" | "G" | "H" | "I" | "J" | "K" | "L" | "M" | "N" | "O" | "P" | "Q" | "R" | "S" | "T" | "U" | "V" | "W" | "X" | "Y" | "Z" ; digit = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" ; white_space = ? white_space characters ? ; all_characters = ? all visible characters ? ; }, %q{((rule program (seq 'PROGRAM' white_space identifier white_space 'BEGIN' white_space (star (seq assignment ";" white_space)) 'END.' )) (rule identifier (seq alphabetic_character (star (alt alphabetic_character digit)))) (rule number (seq (opt "-") digit (star digit))) (rule string (seq '"' (star (diff all_characters '"')) '"')) (rule assignment (seq identifier ":=" (seq (alt number identifier string)))) (rule alphabetic_character (alt "A" "B" "C" "D" "E" "F" "G" "H" "I" "J" "K" "L" "M" "N" "O" "P" "Q" "R" "S" "T" "U" "V" "W" "X" "Y" "Z" )) (rule digit (alt "0" "1" "2" "3" "4" "5" "6" "7" "8" "9")) (rule white_space (seq "? white_space characters ?")) (rule all_characters (seq "? all visible characters ?")))} ], "AA": [ %{AA = "A";}, %{((terminal AA (seq "A")))} ], "BB": [ %{BB = 3 * AA, "B";}, %{ ((terminal BB (seq (rept 3 3 AA) "B")))} ], "CC": [ %{CC = 3 * [AA], "C";}, %{((terminal CC (seq (rept 3 3 (opt AA)) "C")))} ], "DD": [ %{DD = {AA}, "D";}, %{((terminal DD (seq (star AA) "D")))} ], "EE": [ %{EE = AA, {AA}, "E";}, %{((terminal EE (seq AA (star AA) "E")))} ], "FF": [ %{FF = 3 * AA, 3 * [AA], "F";}, %{((terminal FF (seq (rept 3 3 AA) (rept 3 3 (opt AA)) "F")))} ], "GG": [ %{GG = {3 * AA}, "G";}, %{((terminal GG (seq (star (rept 3 3 AA)) "G")))} ], "space": [ %{space = ? US-ASCII character 32 ?;}, %{((rule space (seq "? US-ASCII character 32 ?")))} # XXX probably not ], "something": [ %{something = foo, ( bar );}, %{((rule something (seq foo (seq bar))))} ], "diff": [ %{first_terminal_character = terminal_character - "'" ;}, %{((rule first_terminal_character (diff terminal_character "'")))}, ], }.each do |title, (input, expect)| it title do input << "\n" unless input.end_with?("\n") expect(parse(input).to_sxp).to produce(expect, logger) end end end context "alternate terminal characters" do { "digits /": [ %{ digit_excluding_zero = "1" / "2" / "3" / "4" / "5" / "6" / "7" / "8" / "9" ; digit = "0" / digit_excluding_zero ; }, %{((rule digit_excluding_zero (alt "1" "2" "3" "4" "5" "6" "7" "8" "9")) (rule digit (alt "0" digit_excluding_zero)))} ], "digits !": [ %{ digit_excluding_zero = "1" ! "2" ! "3" ! "4" ! "5" ! "6" ! "7" ! "8" ! "9" ; digit = "0" ! digit_excluding_zero ; }, %{((rule digit_excluding_zero (alt "1" "2" "3" "4" "5" "6" "7" "8" "9")) (rule digit (alt "0" digit_excluding_zero)))} ], #"integer (/ /)": [ # %{integer = "0" | (/ "-" /), natural_number ;}, # %{((rule integer (alt "0" (seq (opt "-") natural_number))))} #], "natural number (: :)": [ %{natural_number = digit_excluding_zero, (: digit :) ;}, %{((rule natural_number (seq digit_excluding_zero (star digit))))} ], "legal meta_identifier .": [ 'rulename = "foo" .', %{((rule rulename (seq "foo")))} ], }.each do |title, (input, expect)| it title do input << "\n" unless input.end_with?("\n") expect(parse(input).to_sxp).to produce(expect, logger) end end end context "illegal syntax" do { "something": "something = foo ( bar );" }.each do |title, input| it title do expect {parse(input)}.to raise_error(SyntaxError) end end end it "parses ISO EBNF grammar" do gram = parse(File.open(File.expand_path("../../etc/iso-ebnf.isoebnf", __FILE__))) expect(gram).to be_valid end def parse(input, **options) @debug = [] EBNF.parse(input, debug: @debug, format: :isoebnf, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/native_spec.rb
spec/native_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::Native do let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end context "rule variations" do { "legal rule name": [ 'rulename ::= "foo"', %{((rule rulename (seq "foo")))} ], "prolog": [ %{[2] Prolog ::= BaseDecl? PrefixDecl*}, %{((rule Prolog "2" (seq (opt BaseDecl) (star PrefixDecl))))} ], "aliteration": [ %{[2] declaration ::= '@terminals' | '@pass'}, %{((rule declaration "2" (alt '@terminals' '@pass')))}, ], "posfix": [ %{[9] postfix ::= primary ( [?*+] )?}, %{((rule postfix "9" (seq primary (opt (range "?*+")))))}, ], "diff": [ %{[18] STRING2 ::= "'" (CHAR - "'")* "'"}, %{((terminal STRING2 "18" (seq "'" (star (diff CHAR "'")) "'")))}, ], "IRIREF": [ %([18] IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'), %{((terminal IRIREF "18" (seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')))}, ], "UCHAR": [ %(UCHAR ::= ( '\\u' HEX HEX HEX HEX ) | ( '\\U' HEX HEX HEX HEX HEX HEX HEX HEX )), %{( (terminal UCHAR (alt (seq '\\\\u' HEX HEX HEX HEX) (seq '\\\\U' HEX HEX HEX HEX HEX HEX HEX HEX))) )} ] }.each do |title, (input, expect)| it title do expect(parse(input).to_sxp).to produce(expect, logger) end end context "without rule identifiers" do { "prolog": [ %{Prolog ::= BaseDecl? PrefixDecl*}, %{((rule Prolog (seq (opt BaseDecl) (star PrefixDecl))))} ], "prolog (with brackets)": [ %{<Prolog> ::= <BaseDecl>? <PrefixDecl>*}, %{((rule Prolog (seq (opt BaseDecl) (star PrefixDecl))))} ], "aliteration": [ %{declaration ::= '@terminals' | '@pass'}, %{((rule declaration (alt '@terminals' '@pass')))}, ], "posfix": [ %{postfix ::= primary ( [?*+] )?}, %{((rule postfix (seq primary (opt (range "?*+")))))}, ], "diff": [ %{STRING2 ::= "'" (CHAR - "'")* "'"}, %{((terminal STRING2 (seq "'" (star (diff CHAR "'")) "'")))}, ], "IRIREF": [ %(IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'), %{((terminal IRIREF (seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')))}, ], }.each do |title, (input, expect)| it title do expect(parse(input).to_sxp).to produce(expect, logger) end end end end describe "#expression" do { "'abc' def" => %{(seq 'abc' def)}, %{[0-9]} => %{(range "0-9")}, %{#x00B7} => %{(hex "#x00B7")}, %{[#x0300-#x036F]} => %{(range "#x0300-#x036F")}, %{[^<>'{}|^`]-[#x00-#x20]} => %{(diff (range "^<>'{}|^`") (range "#x00-#x20"))}, %{a b c} => %{(seq a b c)}, %{a? b c} => %{(seq (opt a) b c)}, %{a - b} => %{(diff a b)}, %{(a - b) - c} => %{(diff (diff a b) c)}, %{a b? c} => %{(seq a (opt b) c)}, %{a | b | c} => %{(alt a b c)}, %{a? b+ c*} => %{(seq (opt a) (plus b) (star c))}, %{foo | x xlist} => %{(alt foo (seq x xlist))}, %{a | (b - c)} => %{(alt a (diff b c))}, %{a b | c d} => %{(alt (seq a b) (seq c d))}, %{BaseDecl? PrefixDecl*} => %{(seq (opt BaseDecl) (star PrefixDecl))}, %{NCCHAR1 | '-' | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040]} => %{(alt NCCHAR1 '-' (range "0-9") (hex "#x00B7") (range "#x0300-#x036F") (range "#x203F-#x2040"))}, %{'<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'} => %{(seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')}, }.each do |input, expected| it "given #{input.inspect} produces #{expected}" do rule = parse("rule ::= #{input}").ast.first expect(rule.expr.to_sxp).to produce(expected, @debug) end end end context "illegal syntax" do { "diff missing second operand": %{rule ::= a -}, "unrecognized terminal" => %{rule ::= %foo%}, }.each do |title, input| it title do expect {parse(input)}.to raise_error(SyntaxError) end end end it "parses EBNF grammar" do gram = parse(File.open(File.expand_path("../../etc/ebnf.ebnf", __FILE__))) expect(gram).to be_valid end def parse(input, **options) @debug = [] EBNF.parse(input, debug: @debug, format: :native, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/peg_spec.rb
spec/peg_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::PEG do describe "#make_peg" do { %{[2] Prolog ::= BaseDecl? PrefixDecl*} => %{((rule Prolog "2" (seq _Prolog_1 _Prolog_2)) (rule _Prolog_1 "2.1" (opt BaseDecl)) (rule _Prolog_2 "2.2" (star PrefixDecl)))}, %{ [9] primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule primary "9" (alt HEX RANGE O_RANGE STRING1 STRING2 _primary_1)) (rule _primary_1 "9.1" (seq '(' expression ')')))}, %{ primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule primary (alt HEX RANGE O_RANGE STRING1 STRING2 _primary_1)) (rule _primary_1 (seq '(' expression ')')))}, %{ <primary> ::= <HEX> | <RANGE> | <O_RANGE> | <STRING1> | <STRING2> | '(' <expression> ')' } => %{((rule primary (alt HEX RANGE O_RANGE STRING1 STRING2 _primary_1)) (rule _primary_1 (seq '(' expression ')')))}, %{[1] start ::= A B C} => %{((rule start "1" (seq A B C)))}, %{[1] start ::= A B? C* D+} => %{((rule start "1" (seq A _start_1 _start_2 _start_3)) (rule _start_1 "1.1" (opt B)) (rule _start_2 "1.2" (star C)) (rule _start_3 "1.3" (plus D)))}, %{[1] start ::= A (B C) D} => %{((rule start "1" (seq A _start_1 D)) (rule _start_1 "1.1" (seq B C)))}, %{[1] start ::= A (B) C} => %{((rule start "1" (seq A B C)))}, %{[1] start ::= A (B (C D)) (E F)} => %{((rule start "1" (seq A _start_1 _start_2)) (rule _start_1 "1.1" (seq B _start_3)) (rule _start_3 "1.3" (seq C D)) (rule _start_2 "1.2" (seq E F)))}, %{[1] r1 ::= (A B) C [2] r2 ::= (A B) E} => %{((rule r1 "1" (seq _r1_1 C)) (rule _r1_1 "1.1" (seq A B)) (rule r2 "2" (seq _r2_1 E)) (rule _r2_1 "2.1" (seq A B)))} }.each do |input, expected| it "parses #{input.inspect}" do expect(parse(input).make_peg.ast.to_sxp).to produce(expected, @debug) end end end def parse(value, **options) @debug = [] options = {debug: @debug, format: :native}.merge(options) EBNF::Base.new(value, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/unescape_spec.rb
spec/unescape_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::Unescape do describe ".unescape_codepoints" do # @see https://www.w3.org/TR/rdf-sparql-query/#codepointEscape it "unescapes \\uXXXX codepoint escape sequences" do inputs = { %q(\\u0020) => %q( ), %q(<ab\\u00E9xy>) => %Q(<ab\xC3\xA9xy>), %q(\\u03B1:a) => %Q(\xCE\xB1:a), %q(a\\u003Ab) => %Q(a\x3Ab), } inputs.each do |input, output| expect(EBNF::Unescape.unescape_codepoints(input)).to eq output end end it "unescapes \\UXXXXXXXX codepoint escape sequences" do inputs = { %q(\\U00000020) => %q( ), %q(\\U00010000) => %Q(\xF0\x90\x80\x80), %q(\\U000EFFFF) => %Q(\xF3\xAF\xBF\xBF), } inputs.each do |input, output| expect(EBNF::Unescape.unescape_codepoints(input)).to eq output end end context "escaped strings" do { 'Dürst' => 'D\\u00FCrst', "é" => '\\u00E9', "€" => '\\u20AC', "resumé" => 'resum\\u00E9', }.each_pair do |unescaped, escaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::Unescape.unescape_codepoints(escaped)).to eq unescaped end end end end describe ".unescape_string" do # @see https://www.w3.org/TR/rdf-sparql-query/#grammarEscapes context "escape sequences" do EBNF::Unescape::ESCAPE_CHARS.each do |escaped, unescaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::Unescape.unescape_string(escaped)).to eq unescaped end end end context "escaped strings" do { 'simple literal' => 'simple literal', 'backslash:\\' => 'backslash:\\\\', 'dquote:"' => 'dquote:\\"', "newline:\n" => 'newline:\\n', "return\r" => 'return\\r', "tab:\t" => 'tab:\\t', }.each_pair do |unescaped, escaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::Unescape.unescape_string(escaped)).to eq unescaped end end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/base_spec.rb
spec/base_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' require 'rdf/turtle' describe EBNF::Base do subject {PARSED_EBNF_GRAMMAR.dup} describe "#initialize" do { %{[2] Prolog ::= BaseDecl? PrefixDecl*} => %{((rule Prolog "2" (seq (opt BaseDecl) (star PrefixDecl))))}, %{ @terminals terminal ::= [A-Z]+ } => %{((terminals _terminals (seq)) (terminal terminal (plus (range "A-Z"))))}, %{ [9] primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule primary "9" (alt HEX RANGE O_RANGE STRING1 STRING2 (seq '(' expression ')'))))}, %{#[1] rule ::= 'FOO'} => %{()}, %{//[1] rule ::= 'FOO'} => %{()}, %{[18] SolutionModifier ::= _SolutionModifier_1 _SolutionModifier_2} => %{((rule SolutionModifier "18" (seq _SolutionModifier_1 _SolutionModifier_2)))}, %{[18.1] _SolutionModifier_1 ::= _empty | GroupClause} => %{((rule _SolutionModifier_1 "18.1" (alt _empty GroupClause)))}, %q{[18] STRING1 ::= '"' (CHAR - '"')* '"'} => %q{((terminal STRING1 "18" (seq '"' (star (diff CHAR '"')) '"')))}, %q{[161s] WS ::= #x20 | #x9 | #xD | #xA} => %q{((terminal WS "161s" (alt (hex "#x20") (hex "#x9") (hex "#xD") (hex "#xA"))))}, %q{[1] shexDoc ::= directive* # leading CODE} => %q{((rule shexDoc "1" (star directive)))}, %q{[1] shexDoc ::= directive* /* leading CODE */} => %q{((rule shexDoc "1" (star directive)))}, %q{[1] shexDoc ::= directive* (* leading CODE *)} => %q{((rule shexDoc "1" (star directive)))}, %q{[1] shexDoc ::= directive* // leading CODE} => %q{((rule shexDoc "1" (star directive)))}, %q{[1] shexDoc ::= /* leading CODE */ directive*} => %q{((rule shexDoc "1" (star directive)))}, %q{[1] shexDoc ::= (* leading CODE *) directive*} => %q{((rule shexDoc "1" (star directive)))}, %q{[156s] STRING_LITERAL1 ::= "'" ([^#x27#x5C#xA#xD] | ECHAR | UCHAR)* "'" /* #x27=' #x5C=\ #xA=new line #xD=carriage return */} => %q{((terminal STRING_LITERAL1 "156s" (seq "'" (star (alt (range "^#x27#x5C#xA#xD") ECHAR UCHAR)) "'")) )} }.each do |input, expected| it "parses #{input.inspect}" do expect(parse(input).to_sxp).to produce(expected, @debug) end it "parses generated SXP for #{input.inspect}" do ast = parse(expected, format: :sxp).ast ast.each {|r| expect(r).to be_a(EBNF::Rule)} ast.to_sxp expect(ast.to_sxp).to produce(expected, @debug) end end it "rejects unknown format" do expect {parse("foo", format: :unknown)}.to raise_error "unknown input format :unknown" end end describe "#renumber!" do it "creates identifiers for grammars without identifiers" do gram = EBNF.parse("a ::= b d ::= e") gram.renumber! expect(gram.ast.map(&:id)).to eq %w{1 2} end it "renumbers grammars with identifiers" do gram = EBNF.parse("[10] a ::= b [20] d ::= e") gram.renumber! expect(gram.ast.map(&:id)).to eq %w{1 2} end end describe "#validate!" do let(:simple) {EBNF.parse("a ::= b", format: :native)} it "notes invalid grammar" do expect do expect {simple.validate!}.to raise_error SyntaxError, "In rule a: No rule found for b" end.to write(:something).to(:error) end it "validates EBNF" do expect {subject.validate!}.not_to raise_error end end describe "#valid?" do let(:simple) {EBNF.parse("a ::= b", format: :native)} it "notes invalid grammar" do expect do expect(simple.valid?).to be_falsey end.to write(:something).to(:error) end it "validates EBNF" do expect(subject).to be_valid end end describe "#each" do it "yields each rule" do rules = subject.ast.select {|r| r.rule?} expect {|b| subject.each(:rule, &b)}.to yield_control.exactly(rules.length).times end it "yields each terminal" do terminals = subject.ast.select {|r| r.terminal?} expect {|b| subject.each(:terminal, &b)}.to yield_control.exactly(terminals.length).times end end describe "#to_sxp" do specify {expect(subject.to_sxp).to include("(rule ebnf")} end describe "#to_s" do specify {expect(subject.to_s).to include("ebnf")} end describe "#to_html" do specify {expect(subject.to_s).to include("ebnf")} end describe "#to_ruby" do specify {expect {subject.to_ruby}.to write(:something).to(:output)} end describe "#to_ttl" do let(:reader) {RDF::Turtle::Reader.new(subject.to_ttl, base_uri: 'http://example.org/')} specify {expect(reader).to be_valid} end describe "#dup" do specify {expect(parse(%{[2] Prolog ::= BaseDecl? PrefixDecl*}).dup).to be_a(EBNF::Base)} end describe "#find_rule" do it "finds ebnf" do expect(subject.find_rule(:ebnf).sym).to eql :ebnf end end def parse(value, **options) @debug = [] options = {debug: @debug, format: :native}.merge(options) EBNF::Base.new(value, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/parser_spec.rb
spec/parser_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF::Parser do let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end context "rule variations" do { "legal rule name": [ 'rulename ::= "foo"', %{((rule rulename (seq "foo")))} ], "prolog": [ %{[2] Prolog ::= BaseDecl? PrefixDecl*}, %{((rule Prolog "2" (seq (opt BaseDecl) (star PrefixDecl))))} ], "aliteration": [ %{[2] declaration ::= '@terminals' | '@pass'}, %{((rule declaration "2" (alt '@terminals' '@pass')))}, ], "posfix": [ %{[9] postfix ::= primary ( [?*+] )?}, %{((rule postfix "9" (seq primary (opt (range "?*+")))))}, ], "diff": [ %{[18] STRING2 ::= "'" (CHAR - "'")* "'"}, %{((terminal STRING2 "18" (seq "'" (star (diff CHAR "'")) "'")))}, ], "IRIREF": [ %([18] IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'), %{((terminal IRIREF "18" (seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')))}, ], "minimal whitespace": [ %{[xx]minimal::=whitespace[yy]whitespace::=" "}, %{((rule minimal "xx" (seq whitespace)) (rule whitespace "yy" (seq " ")))} ] }.each do |title, (input, expect)| it title do expect(parse(input).to_sxp).to produce(expect, logger) end end context "without rule identifiers" do { "prolog": [ %{Prolog ::= BaseDecl? PrefixDecl*}, %{((rule Prolog (seq (opt BaseDecl) (star PrefixDecl))))} ], "aliteration": [ %{declaration ::= '@terminals' | '@pass'}, %{((rule declaration (alt '@terminals' '@pass')))}, ], "posfix": [ %{postfix ::= primary ( [?*+] )?}, %{((rule postfix (seq primary (opt (range "?*+")))))}, ], "diff": [ %{STRING2 ::= "'" (CHAR - "'")* "'"}, %{((terminal STRING2 (seq "'" (star (diff CHAR "'")) "'")))}, ], "IRIREF": [ %(IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'), %{((terminal IRIREF (seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')))}, ], }.each do |title, (input, expect)| it title do expect(parse(input).to_sxp).to produce(expect, logger) end end end end describe "#expression" do { "'abc' def" => %{(seq 'abc' def)}, %{[0-9]} => %{(range "0-9")}, %{#x00B7} => %{(hex "#x00B7")}, %{[#x0300-#x036F]} => %{(range "#x0300-#x036F")}, %{[^<>'{}|^`]-[#x00-#x20]} => %{(diff (range "^<>'{}|^`") (range "#x00-#x20"))}, %{a b c} => %{(seq a b c)}, %{a? b c} => %{(seq (opt a) b c)}, %{a - b} => %{(diff a b)}, %{(a - b) - c} => %{(diff (diff a b) c)}, %{a b? c} => %{(seq a (opt b) c)}, %{a | b | c} => %{(alt a b c)}, %{a? b+ c*} => %{(seq (opt a) (plus b) (star c))}, %{foo | x xlist} => %{(alt foo (seq x xlist))}, %{a | (b - c)} => %{(alt a (diff b c))}, %{a b | c d} => %{(alt (seq a b) (seq c d))}, %{[a-z]} => %{(range "a-z")}, %{[a-zA-Z]} => %{(range "a-zA-Z")}, %{[#x20-#x22]} => %{(range "#x20-#x22")}, %{[abc]} => %{(range "abc")}, %{[abc-]} => %{(range "abc-")}, %{[#x20#x21#x22]} => %{(range "#x20#x21#x22")}, %{BaseDecl? PrefixDecl*} => %{(seq (opt BaseDecl) (star PrefixDecl))}, %{NCCHAR1 | '-' | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040]} => %{(alt NCCHAR1 '-' (range "0-9") (hex "#x00B7") (range "#x0300-#x036F") (range "#x203F-#x2040"))}, %{'<' ([^<>"{}|^`\]-[#x00-#x20] | UCHAR)* '>'} => %{(seq '<' (star (alt (diff (range "^<>\\\"{}|^`") (range "#x00-#x20")) UCHAR)) '>')}, }.each do |input, expected| it "given #{input.inspect} produces #{expected}" do rule = parse("rule ::= #{input}").ast.first expect(rule.expr.to_sxp).to produce(expected, @debug) end end end context "illegal syntax" do { "illegal rule name": %{$rule.name ::= foo}, "diff missing second operand": %{rule ::= a -}, "unrecognized terminal" => %{rule ::= %foo%}, "unopened paren" => %{rule ::= a) b c} }.each do |title, input| it title do expect {parse(input)}.to raise_error(SyntaxError) end end end it "parses EBNF grammar" do gram = parse(File.open(File.expand_path("../../etc/ebnf.ebnf", __FILE__))) expect(gram).to be_valid end def parse(input, **options) @debug = [] EBNF.parse(input, debug: @debug, format: :ebnf, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ebnf_spec.rb
spec/ebnf_spec.rb
# coding: utf-8 $:.unshift "." require 'spec_helper' require 'ebnf' require 'sxp' describe EBNF do describe ".parse" do { %{[2] Prolog ::= BaseDecl? PrefixDecl*} => %{((rule Prolog "2" (seq (opt BaseDecl) (star PrefixDecl))))}, %{ @terminals [3] terminal ::= [A-Z]+ } => %{((terminals _terminals (seq)) (terminal terminal "3" (plus (range "A-Z"))))}, %{ [9] primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule primary "9" (alt HEX RANGE O_RANGE STRING1 STRING2 (seq '(' expression ')'))))}, %{ primary ::= HEX | RANGE | O_RANGE | STRING1 | STRING2 | '(' expression ')' } => %{((rule primary (alt HEX RANGE O_RANGE STRING1 STRING2 (seq '(' expression ')'))))}, %{ <primary> ::= <HEX> | <RANGE> | <O_RANGE> | <STRING1> | <STRING2> | '(' <expression> ')' } => %{((rule primary (alt HEX RANGE O_RANGE STRING1 STRING2 (seq '(' expression ')'))))}, }.each do |input, expected| context input do subject {EBNF.parse(input)} it "creates ast" do expect(subject.ast.to_sxp).to produce(expected, []) end it "#to_sxp" do expect(subject.to_sxp).to produce(expected) end it "#to_ttl" do expect(subject.to_ttl("ex", "http://example.org/")).not_to be_empty end it "#to_html" do expect(subject.to_html).not_to be_empty end it "#to_s" do expect(subject.to_s).not_to be_empty end end end context "README" do let(:ebnf) {PARSED_EBNF_GRAMMAR.dup} subject {ebnf} it "creates ast" do expect(subject.ast.to_sxp).not_to be_empty end it "#to_sxp" do expect(subject.to_sxp).not_to be_empty end it "#to_ttl" do expect(subject.to_ttl("ex", "http://example.org/")).not_to be_empty end it "#to_html" do expect(subject.to_html).not_to be_empty end it "#to_s" do expect(subject.to_s).not_to be_empty end context "LL1" do before {subject.make_bnf} before do subject.first_follow(:ebnf) subject.build_tables end it "#to_ruby" do expect {subject.to_ruby}.to write(:something).to(:output) end end context "PEG" do before {subject.make_peg} it "#to_ruby" do expect {subject.to_ruby}.to write(:something).to(:output) end end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/spec_helper.rb
spec/spec_helper.rb
$:.unshift(File.expand_path("../../lib", __FILE__)) $:.unshift File.dirname(__FILE__) require 'bundler/setup' require 'amazing_print' require 'rdf/spec' require 'rdf/spec/matchers' require 'rspec' require 'rspec/matchers' require 'rspec/its' require 'matchers' begin require 'simplecov' require 'simplecov-lcov' SimpleCov::Formatter::LcovFormatter.config do |config| #Coveralls is coverage by default/lcov. Send info results config.report_with_single_file = true config.single_report_path = 'coverage/lcov.info' end SimpleCov.formatter = SimpleCov::Formatter::MultiFormatter.new([ SimpleCov::Formatter::HTMLFormatter, SimpleCov::Formatter::LcovFormatter ]) SimpleCov.start do add_filter "/spec/" end rescue LoadError => e STDERR.puts "Coverage Skipped: #{e.message}" end require 'ebnf' ::RSpec.configure do |c| c.filter_run focus: true c.run_all_when_everything_filtered = true c.filter_run_excluding ruby: ->(version) do case version.to_s when "!jruby" RUBY_ENGINE == "jruby" when /^> (.*)/ !(RUBY_VERSION.to_s > $1) else !(RUBY_VERSION.to_s =~ /^#{version.to_s}/) end end end RSpec::Matchers.define :be_valid_html do match do |actual| return true unless Nokogiri.const_defined?(:HTML5) root = Nokogiri::HTML5("<!DOCTYPE html>" + actual, max_parse_errors: 1000) @errors = Array(root && root.errors.map(&:to_s)) @errors.empty? end failure_message do |actual| "expected no errors, was #{@errors.join("\n")}\n" + actual end end PARSED_EBNF_GRAMMAR = EBNF.parse(File.open(File.expand_path("../../etc/ebnf.ebnf", __FILE__)), format: :native).freeze
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/peg/rule_spec.rb
spec/peg/rule_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::PEG::Rule do describe "#parse" do let(:parser_class) {double("PEG Parser Class", terminal_regexps: {})} let(:parser) { double("PEG Parser", whitespace: /\s+/, packrat: {}, update_furthest_failure: true, class: parser_class) } context "non-terminal rules" do { "(alt 'A' 'B') with 'A'" => { rule: [:alt, "A", "B"], input: "A", expect: "A" }, "(alt 'A' 'B') with ' A '" => { rule: [:alt, "A", "B"], input: " A ", expect: "A" }, "(alt 'A' 'B') with 'B'" => { rule: [:alt, "A", "B"], input: "B", expect: "B" }, "(alt 'A' 'B') with 'C'" => { rule: [:alt, "A", "B"], input: "C", expect: :unmatched }, "(not A) with 'A'" => { rule: [:not, "A"], input: "A", expect: :unmatched }, "(not A) with 'B'" => { rule: [:not, "A"], input: "B", expect: nil }, "(opt A) with 'A'" => { rule: [:opt, "A"], input: "A", expect: "A" }, "(opt A) with 'A' and whitespace" => { rule: [:opt, "A"], input: " A", expect: "A" }, "(opt A) with 'B'" => { rule: [:opt, "A"], input: "B", expect: nil }, "(plus A) with ''" => { rule: [:plus, "A"], input: "", expect: :unmatched }, "(plus A) with 'A'" => { rule: [:plus, "A"], input: "A", expect: %w(A) }, "(plus A) with 'A B'" => { rule: [:plus, "A"], input: "A B", expect: %w(A) }, "(plus A) with 'AAA'" => { rule: [:plus, "A"], input: "AAA", expect: %w(A A A) }, "(plus A) with ' A A A '" => { rule: [:plus, "A"], input: " A A A ", expect: %w(A A A) }, "(rept 1 2 A) with ' A A A '" => { rule: [:rept, 1, 2, "A"], input: " A A A ", expect: %w(A A) }, "(rept 1 4 A) with ' A A A '" => { rule: [:rept, 1, 4, "A"], input: " A A A ", expect: %w(A A A) }, "(rept 4 10 A) with ' A A A '" => { rule: [:rept, 4, 10, "A"], input: " A A A ", expect: :unmatched }, "(seq 'A' 'B')" => { rule: [:seq, "A", "B"], input: "A B", expect: [{A: "A"}, {B: "B"}] }, "(seq 'A' 'B') with no whitespace" => { rule: [:seq, "A", "B"], input: "AB", expect: [{A: "A"}, {B: "B"}] }, "(seq 'A' 'B') with added whitespace" => { rule: [:seq, "A", "B"], input: " A B ", expect: [{A: "A"}, {B: "B"}] }, "(seq 'A' 'B') with 'A'" => { rule: [:seq, "A", "B"], input: " A ", expect: :unmatched }, "(seq 'A' 'B') with 'AC'" => { rule: [:seq, "A", "B"], input: "AC", expect: :unmatched }, "(star A) with ''" => { rule: [:star, "A"], input: "", expect: [] }, "(star A) with 'A'" => { rule: [:star, "A"], input: "A", expect: %w(A) }, "(star A) with 'A B'" => { rule: [:star, "A"], input: "A B", expect: %w(A) }, "(star A) with 'AAA'" => { rule: [:star, "A"], input: "AAA", expect: %w(A A A) }, "(star A) with ' A A A '" => { rule: [:star, "A"], input: " A A A ", expect: %w(A A A) }, }.each do |name, params| it name do rule = EBNF::Rule.new(:rule, "0", params[:rule]).extend(EBNF::PEG::Rule) rule.parser = parser expect(parser).to receive(:onStart).with(Symbol).and_return({}) expect(parser).to receive(:onFinish).with(params[:expect]).and_return(params[:expect]) expect(parser).not_to receive(:onTerminal).with(Symbol) expect(rule.parse(EBNF::LL1::Scanner.new(params[:input]))).to eql(params[:expect]) end end context "with as_hash: true" do { "(alt 'A' 'B') with 'A'" => { rule: [:alt, "A", "B"], input: "A", expect: "A" }, "(alt 'A' 'B') with ' A '" => { rule: [:alt, "A", "B"], input: " A ", expect: "A" }, "(alt 'A' 'B') with 'B'" => { rule: [:alt, "A", "B"], input: "B", expect: "B" }, "(alt 'A' 'B') with 'C'" => { rule: [:alt, "A", "B"], input: "C", expect: :unmatched }, "(not A) with 'A'" => { rule: [:not, "A"], input: "A", expect: :unmatched }, "(not A) with 'B'" => { rule: [:not, "A"], input: "B", expect: nil }, "(opt A) with 'A'" => { rule: [:opt, "A"], input: "A", expect: "A" }, "(opt A) with 'A' and whitespace" => { rule: [:opt, "A"], input: " A", expect: "A" }, "(opt A) with 'B'" => { rule: [:opt, "A"], input: "B", expect: nil }, "(plus A) with ''" => { rule: [:plus, "A"], input: "", expect: :unmatched }, "(plus A) with 'A'" => { rule: [:plus, "A"], input: "A", expect: %w(A) }, "(plus A) with 'A B'" => { rule: [:plus, "A"], input: "A B", expect: %w(A) }, "(plus A) with 'AAA'" => { rule: [:plus, "A"], input: "AAA", expect: %w(A A A) }, "(plus A) with ' A A A '" => { rule: [:plus, "A"], input: " A A A ", expect: %w(A A A) }, "(rept 1 2 A) with ' A A A '" => { rule: [:rept, 1, 2, "A"], input: " A A A ", expect: %w(A A) }, "(rept 1 4 A) with ' A A A '" => { rule: [:rept, 1, 4, "A"], input: " A A A ", expect: %w(A A A) }, "(rept 4 10 A) with ' A A A '" => { rule: [:rept, 4, 10, "A"], input: " A A A ", expect: :unmatched }, "(seq 'A' 'B')" => { rule: [:seq, "A", "B"], input: "A B", expect: {A: "A", B: "B"} }, "(seq 'A' 'B') with no whitespace" => { rule: [:seq, "A", "B"], input: "AB", expect: {A: "A", B: "B"} }, "(seq 'A' 'B') with added whitespace" => { rule: [:seq, "A", "B"], input: " A B ", expect: {A: "A", B: "B"} }, "(seq 'A' 'B') with 'A'" => { rule: [:seq, "A", "B"], input: " A ", expect: :unmatched }, "(seq 'A' 'B') with 'AC'" => { rule: [:seq, "A", "B"], input: "AC", expect: :unmatched }, "(star A) with ''" => { rule: [:star, "A"], input: "", expect: [] }, "(star A) with 'A'" => { rule: [:star, "A"], input: "A", expect: %w(A) }, "(star A) with 'A B'" => { rule: [:star, "A"], input: "A B", expect: %w(A) }, "(star A) with 'AAA'" => { rule: [:star, "A"], input: "AAA", expect: %w(A A A) }, "(star A) with ' A A A '" => { rule: [:star, "A"], input: " A A A ", expect: %w(A A A) }, }.each do |name, params| it name do rule = EBNF::Rule.new(:rule, "0", params[:rule]).extend(EBNF::PEG::Rule) rule.parser = parser expect(parser).to receive(:onStart).with(Symbol).and_return({as_hash: true}) expect(parser).to receive(:onFinish).with(params[:expect]).and_return(params[:expect]) expect(parser).not_to receive(:onTerminal).with(Symbol) expect(rule.parse(EBNF::LL1::Scanner.new(params[:input]))).to eql(params[:expect]) end end end end context "terminal rules" do { "(diff 'A' 'A') with 'A'" => { rule: [:diff, "A", "A"], input: "A", expect: :unmatched }, "(diff 'A' 'B') with 'A'" => { rule: [:diff, "A", "B"], input: "A", expect: "A" }, "(diff 'A' 'B') with 'B'" => { rule: [:diff, "A", "B"], input: "B", expect: :unmatched }, "(diff 'A' 'B') with ' A' (whitespace)" => { rule: [:diff, "A", "B"], input: " A", expect: :unmatched }, "(hex #x41) with 'A'" => { rule: [:hex, "#x41"], input: "A", expect: "A" }, "(hex #x41) with ' A' (whitespace)" => { rule: [:hex, "#x41"], input: " A", expect: :unmatched }, "(hex #x41) with 'B'" => { rule: [:hex, "#x41"], input: "B", expect: :unmatched }, '(istr "foo") with "foo"' => { rule: [:istr, "foo"], input: "foo", expect: "foo" }, '(istr "foo") with "FOO"' => { rule: [:istr, "foo"], input: "FOO", expect: "FOO" }, '(istr "fOo") with "FoO"' => { rule: [:istr, "fOo"], input: "FoO", expect: "FoO" }, "(range A-C) with 'A'" => { rule: [:range, "A-C"], input: "A", expect: "A" }, "(range A-C) with ' A' (whitespace)" => { rule: [:range, "A-C"], input: " A", expect: :unmatched }, "(range A-C) with 'B'" => { rule: [:range, "A-C"], input: "B", expect: "B" }, "(range A-C) with 'D'" => { rule: [:range, "A-C"], input: "D", expect: :unmatched }, "(range #x41-#x43) with 'A'" => { rule: [:range, "#x41-#x43"], input: "A", expect: "A" }, "(range #x41-#x43) with ' A' (whitespace)" => { rule: [:range, "#x41-#x43"], input: " A", expect: :unmatched }, "(range #x41-#x43) with 'B'" => { rule: [:range, "#x41-#x43"], input: "B", expect: "B" }, "(range #x41-#x43) with 'D'" => { rule: [:range, "#x41-#x43"], input: "D", expect: :unmatched }, "(range A-Ca-c) with 'a'" => { rule: [:range, "A-Ca-c"], input: "a", expect: "a" }, "(range A-Ca-c) with ' a' (whitespace)" => { rule: [:range, "A-Ca-c"], input: " a", expect: :unmatched }, "(range A-Ca-c) with 'b'" => { rule: [:range, "A-Ca-c"], input: "b", expect: "b" }, "(range A-Ca-c) with 'd'" => { rule: [:range, "A-Ca-c"], input: "d", expect: :unmatched }, "(range #x41-#x43#x61-#x63) with 'a'" => { rule: [:range, "#x41-#x43#x61-#x63"], input: "a", expect: "a" }, "(range #x41-#x43#x61-#x63) with ' a' (whitespace)" => { rule: [:range, "#x41-#x43#x61-#x63"], input: " a", expect: :unmatched }, "(range #x41-#x43#x61-#x63) with 'b'" => { rule: [:range, "#x41-#x43#x61-#x63"], input: "b", expect: "b" }, "(range #x41-#x43#x61-#x63) with 'd'" => { rule: [:range, "#x41-#x43#x61-#x63"], input: "d", expect: :unmatched }, }.each do |name, params| it name do rule = EBNF::Rule.new(:rule, "0", params[:rule], kind: :terminal).extend(EBNF::PEG::Rule) rule.parser = parser expect(parser).to receive(:onStart).with(Symbol).and_return({}) expect(parser).to receive(:onFinish).with(params[:expect]).and_return(params[:expect]) expect(parser).not_to receive(:onTerminal) expect(parser).to receive(:terminal_regexp).with(:rule) expect(rule.parse(EBNF::LL1::Scanner.new(params[:input]))).to eql(params[:expect]) end end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/peg/parser_spec.rb
spec/peg/parser_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::PEG::Parser do class PegParserTest include EBNF::PEG::Parser end before(:all) { PegParserTest.start_production(:term) {"foo"} PegParserTest.production(:term) {"foo"} PegParserTest.start_production(:toLower, insensitive_strings: :lower) {|value| value} PegParserTest.start_production(:toUpper, insensitive_strings: :upper) {|value| value} PegParserTest.terminal(:escape, /escape/) {"foo"} PegParserTest.terminal(:unescape, /unescape/, unescape: true) {"foo"} } let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end describe "ClassMethods" do describe "production" do it "adds as a start_handler" do expect(PegParserTest.start_handlers.keys).to eq [:term, :toLower, :toUpper] expect(PegParserTest.start_handlers[:term]).to be_a(Proc) end it "adds as a production_handler" do expect(PegParserTest.production_handlers.keys).to eq [:term] expect(PegParserTest.production_handlers[:term]).to include(Proc, FalseClass) end end describe "terminal" do it "adds as a terminal_handler" do expect(PegParserTest.terminal_handlers.keys).to include(:escape, :unescape) expect(PegParserTest.terminal_handlers[:escape]).to be_a(Proc) expect(PegParserTest.terminal_handlers[:unescape]).to be_a(Proc) end end end describe "#parse" do subject {PegParserTest.new} it "raises error if starting production not defined" do rule = EBNF::Rule.new(:rule, "0", [:seq, "foo"], kind: :terminal).extend(EBNF::PEG::Rule) expect { subject.parse("foo", "none", [rule]) }.to raise_error(EBNF::PEG::Parser::Error, "Starting production :none not defined") end context "simplest grammar" do let(:start) {:expression} let(:grammar) {%{( (rule expression "1" (alt sum integer)) (rule sum "2" (seq integer operator expression)) (terminal operator "3" (seq "+")) (terminal integer "4" (plus (range "0-9"))) )}} let(:rules) {EBNF.parse(grammar, format: :sxp).make_peg.ast} { "1" => "1", "10" => "10", "1+1" => [{integer: "1"}, {operator: "+"}, {expression: "1"}], " 1 + 2 " => [{integer: "1"}, {operator: "+"}, {expression: "2"}], "1 + 2 + 3" => [ {integer: "1"}, {operator: "+"}, {expression: [ {integer: "2"}, {operator: "+"}, {expression: "3"} ]}] }.each do |input, expected| it "parses #{input.inspect} to #{expected.inspect}" do output = PegParserTest.new.parse(input, start, rules, debug: 3, logger: logger) expect(output).to produce(expected, logger) end end { "" => %r{syntax error, expecting "0-9", :integer }, "10 x 1" => %r{syntax error, expecting "\+", :operator}, "1-1" => %r{syntax error, expecting "0-9", "\+", :operator}, "foo" => %r{syntax error, expecting "0-9", :integer}, "3 1 + 2" => %r{syntax error, expecting "\+", :operator} }.each do |input, expected| it "fails to parse #{input.inspect} to #{expected.inspect}" do expect { PegParserTest.new.parse(input, start, rules, debug: 3, logger: logger) }.to raise_error(EBNF::PEG::Parser::Error, expected) end end end context "case insensitive string matching" do let(:start) {:expression} let(:grammar) {%{( (rule expression "1" (alt toUpper toLower)) (rule toUpper "2" (seq "uPpEr")) (rule toLower "3" (seq "LoWeR")) )}} let(:rules) {EBNF.parse(grammar, format: :sxp).make_peg.ast} { "UPPER" => [{uPpEr: "UPPER"}], "upper" => [{uPpEr: "UPPER"}], "LOWER" => [{LoWeR: "lower"}], "lower" => [{LoWeR: "lower"}], }.each do |input, expected| it "parses #{input.inspect} to #{expected.inspect}" do output = PegParserTest.new.parse(input, start, rules, debug: 3, logger: logger) expect(output).to produce(expected, logger) end end end context "with backtracking" do let(:start) {:expression} let(:grammar) {%{( (rule expression "1" (alt (seq integer "+" integer) (seq integer "*" integer))) (terminal integer "2" (plus (range "0-9"))) )}} let(:rules) {EBNF.parse(grammar, format: :sxp).make_peg.ast} { "1+1" => [{integer: "1"}, {"+": "+"}, {integer: "1"}], # The following will memoize the first position "1*1" => [{integer: "1"}, {"*": "*"}, {integer: "1"}], }.each do |input, expected| it "parses #{input.inspect} to #{expected.inspect}" do output = PegParserTest.new.parse(input, start, rules, debug: 3, logger: logger) expect(output).to produce(expected, logger) end end end context "turtle grammar" do let(:start) {:turtleDoc} let(:grammar) {File.read File.expand_path("../../../etc/turtle.sxp", __FILE__)} let(:rules) {EBNF.parse(grammar, format: :sxp).make_peg.ast} { ":a :b :c ." => [[ {:triples=>[ {:subject=>":a"}, {:predicateObjectList=>[ {:verb=>[{:iri=>":b"}]}, {:objectList=>[ {:object=>":c"}, {:_objectList_1=>[]} ]}, {:_predicateObjectList_1=>[]} ]} ]}, {:"."=>"."} ]], "[:b :c] ." => [[ {:triples=>[ {:blankNodePropertyList=>[ {:"["=>"["}, {:predicateObjectList=>[ {:verb=>[{:iri=>":b"}]}, {:objectList=>[ {:object=>":c"}, {:_objectList_1=>[]} ]}, {:_predicateObjectList_1=>[]}]}, {:"]"=>"]"} ]}, {:_triples_3=>nil} ]}, {:"."=>"."} ]] }.each do |input, expected| it "parses #{input.inspect} to #{expected.inspect}" do output = PegParserTest.new.parse(input, start, rules, debug: 3, logger: logger) expect(output).to produce(expected, logger) end end end end require_relative "data/parser" describe EBNFPegParser do let(:input) {File.expand_path("../../../etc/ebnf.ebnf", __FILE__)} let(:sxp) {File.read File.expand_path("../../../etc/ebnf.sxp", __FILE__)} let(:parser) {EBNFPegParser.new(File.open(input), debug: 3, logger: logger)} it "parses EBNF Grammar" do expect(parser.to_sxp).to produce(sxp, logger) end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/peg/data/parser.rb
spec/peg/data/parser.rb
# # EBNF Parser for EBNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf/rule' require 'ebnf/terminals' require 'ebnf/peg/parser' require 'sxp' class EBNFPegParser include EBNF::PEG::Parser include EBNF::Terminals # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast terminal(:LHS, LHS) do |value| # [id symbol] value.to_s.scan(/\[([^\]]+)\]\s*(\w+)\s*::=/).first end terminal(:SYMBOL, SYMBOL) do |value| value.to_sym end terminal(:HEX, HEX) terminal(:RANGE, RANGE_NOT_LHS, unescape: true) do |value| [:range, value[1..-2]] end terminal(:O_RANGE, O_RANGE, unescape: true) do |value| [:range, value[1..-2]] end terminal(:STRING1, STRING1, unescape: true) do |value| value[1..-2].tap {|s| s.quote_style = :dquote} end terminal(:STRING2, STRING2, unescape: true) do |value| value[1..-2].tap {|s| s.quote_style = :squote} end terminal(:POSTFIX, POSTFIX) production(:ebnf) do |input| # Cause method_missing to invoke something in our context to_sxp end production(:declaration, clear_packrat: true) do |value, data, callback| # current contains a declaration. # Invoke callback callback.call(:terminals) if value == '@terminals' end start_production(:rule, as_hash: true) production(:rule, clear_packrat: true) do |value, data, callback| # current contains an expression. # Invoke callback id, sym = value[:LHS] expression = value[:expression] callback.call(:rule, EBNF::Rule.new(sym.to_sym, id, expression)) end production(:expression) do |value| value.first[:alt] end production(:alt) do |value| if value.last[:_alt_1].length > 0 [:alt, value.first[:seq]] + value.last[:_alt_1] else value.first[:seq] end end production(:_alt_1) do |value| value.map {|a1| a1.last[:seq]}.compact # Get rid of '|' end production(:seq) do |value| value.length == 1 ? value.first : ([:seq] + value) end start_production(:diff, as_hash: true) production(:diff) do |value| if value[:_diff_1] [:diff, value[:postfix], value[:_diff_1]] else value[:postfix] end end production(:_diff_1) do |value| value.last[:postfix] if value end start_production(:postfix, as_hash: true) production(:postfix) do |value| # Push result onto input stack, as the `diff` production can have some number of `postfix` values that are applied recursively case value[:_postfix_1] when "*" then [:star, value[:primary]] when "+" then [:plus, value[:primary]] when "?" then [:opt, value[:primary]] else value[:primary] end end production(:primary) do |value| Array(value).length > 2 ? value[1][:expression] : value end start_production(:pass, as_hash: true) production(:pass) do |value, data, callback| # Invoke callback callback.call(:pass, value[:expression]) end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :progress # Show progress of parser productions # @return [EBNFParser] def initialize(input, **options, &block) # Intantiate grammar from ebnf.ebnf ebnf = File.expand_path("../../../../etc/ebnf.ebnf", __FILE__) grammar = EBNF.parse(File.open(ebnf)) rules = grammar.make_peg.ast @options = options.dup @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, :ebnf, rules, whitespace: EBNF::Terminals::PASS, **options ) do |context, *data| rule = case context when :terminals parsing_terminals = true rule = EBNF::Rule.new(nil, nil, data.first, kind: :terminals) when :pass rule = EBNF::Rule.new(nil, nil, data.first, kind: :pass) when :rule rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end @ast end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(@ast.map(&:for_sxp)) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1/scanner_spec.rb
spec/ll1/scanner_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::LL1::Scanner do describe ".new" do it "initializes with a StringIO" do scanner = EBNF::LL1::Scanner.new(StringIO.new("line1\nline2\n")) expect(scanner.rest).to eq "line1\nline2\n" expect(scanner).not_to be_eos end it "initializes with a string" do scanner = EBNF::LL1::Scanner.new(StringIO.new "line1\nline2\n") expect(scanner.rest).to eq "line1\nline2\n" expect(scanner).not_to be_eos end it "encodes input to UTF-8", pending: !"".respond_to?(:force_encoding) do f = double("input") expect(f).to receive(:read).and_return("ascii".force_encoding(Encoding::ASCII_8BIT)) expect(f).to receive(:gets).and_return("utf8".force_encoding(Encoding::UTF_8)) expect(f).to receive(:eof?).and_return(false, false, true) scanner = EBNF::LL1::Scanner.new(f) s = scanner.rest expect(s).to eq "asciiutf8" expect(s.encoding).to eq Encoding::UTF_8 end end describe "#eos?" do it "returns true if at both eos and eof" do scanner = EBNF::LL1::Scanner.new(StringIO.new("")) expect(scanner).to be_eos end end describe "#lineno" do context "STRING_LITERAL_LONG_QUOTE" do subject { EBNF::LL1::Scanner.new( %( :Test a rdfs:Class ; rdfs:subClassOf mf:ManifestEntry; rdfs:label "Superclass of all CSVW tests" ; rdfs:comment """ All CSVW tests have an input file referenced using `mf:action`. Positive and Negative Evaluation Tests also have a result file referenced using `mf:result` . Other tests may take different inputs and options as defined for each test class. """ ; :b :c . )) } it "tracks line numbers" do subject.scan_until(/:Test/) expect(subject.lineno).to eq 2 subject.scan_until(/rdfs:subClassOf/) expect(subject.lineno).to eq 3 subject.scan(/\w+/) expect(subject.lineno).to eq 3 subject.skip_until(/"""/) expect(subject.lineno).to eq 5 subject.skip_until(/"""/) expect(subject.lineno).to eq 10 end end end describe "#rest" do it "returns remaining scanner contents if not at eos" do scanner = EBNF::LL1::Scanner.new(StringIO.new("foo\n")) expect(scanner.rest).to eq "foo\n" expect(scanner.lineno).to eq 1 end it "returns next line from file if at eos" do scanner = EBNF::LL1::Scanner.new(StringIO.new("\nfoo\n")) expect(scanner.rest).to eq "\nfoo\n" scanner.scan(/\s*/m) expect(scanner.rest).to eq "foo\n" expect(scanner.lineno).to eq 2 end it "returns \"\" if at eos and eof" do scanner = EBNF::LL1::Scanner.new(StringIO.new("")) expect(scanner.rest).to eq "" end end describe "#scan" do context "simple terminals" do it "returns a word" do scanner = EBNF::LL1::Scanner.new(StringIO.new("foo bar")) expect(scanner.scan(/\w+/)).to eq "foo" expect(scanner.lineno).to eq 1 end it "returns a STRING_LITERAL_QUOTE" do scanner = EBNF::LL1::Scanner.new(StringIO.new("'string' foo")) expect(scanner.scan(/'((?:[^\x27\x5C\x0A\x0D])*)'/)).to eq "'string'" expect(scanner.lineno).to eq 1 end it "returns a STRING_LITERAL_LONG_SINGLE_QUOTE" do scanner = EBNF::LL1::Scanner.new(StringIO.new("'''\nstring\nstring''' foo")) expect(scanner.scan(/'''((?:(?:'|'')?(?:[^'\\])+)*)'''/m)).to eq "'''\nstring\nstring'''" expect(scanner.lineno).to eq 3 end it "scans a multi-line string" do string = %q(''' <html:a="b"/> ''' ) scanner = EBNF::LL1::Scanner.new(StringIO.new(string)) expect(scanner.scan(/'''((?:(?:'|'')?(?:[^'\\])+)*)'''/m)).not_to be_empty expect(scanner.lineno).to eq 3 end it "scans a longer multi-line string" do string = %q(''' <html:b xmlns:html="http://www.w3.org/1999/xhtml" html:a="b"/> ''' ) scanner = EBNF::LL1::Scanner.new(StringIO.new(string)) expect(scanner.scan(/'''((?:(?:'|'')?(?:[^'\\])+)*)'''/m)).not_to be_empty expect(scanner.lineno).to eq 3 end end end describe "#scan_until" do context "simple terminals" do it "returns a word" do scanner = EBNF::LL1::Scanner.new(StringIO.new("foo bar")) expect(scanner.scan_until(/\w+/)).to eq "foo" expect(scanner.lineno).to eq 1 end it "returns a STRING_LITERAL_QUOTE" do scanner = EBNF::LL1::Scanner.new(StringIO.new("prefix 'string' foo")) expect(scanner.scan_until(/'((?:[^\x27\x5C\x0A\x0D])*)'/)).to eq "prefix 'string'" expect(scanner.lineno).to eq 1 end it "returns a STRING_LITERAL_LONG_SINGLE_QUOTE" do scanner = EBNF::LL1::Scanner.new(StringIO.new("prefix '''\nstring\nstring''' foo")) expect(scanner.scan_until(/'''((?:(?:'|'')?(?:[^'\\])+)*)'''/m)).to eq "prefix '''\nstring\nstring'''" expect(scanner.lineno).to eq 3 end end end describe "#skip" do it "skips input" do scanner = EBNF::LL1::Scanner.new(StringIO.new("foo\n")) scanner.skip(/^f/) expect(scanner.rest).to eq "oo\n" expect(scanner.lineno).to eq 1 end end describe "#skip_until" do it "skips input" do scanner = EBNF::LL1::Scanner.new(StringIO.new("prefix\nfoo\n")) scanner.skip_until(/^f/) expect(scanner.rest).to eq "oo\n" expect(scanner.lineno).to eq 2 end end describe "#terminate" do it "skips to end of input" do scanner = EBNF::LL1::Scanner.new(StringIO.new("foo\n")) scanner.terminate expect(scanner).to be_eos end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1/parser_spec.rb
spec/ll1/parser_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::LL1::Parser do class LL1ParserTest include EBNF::LL1::Parser end before(:all) { LL1ParserTest.start_production(:term) {"foo"} LL1ParserTest.production(:term) {"foo"} LL1ParserTest.terminal(:escape, /escape/) {"foo"} LL1ParserTest.terminal(:unescape, /unescape/, unescape: true) {"foo"} } let(:logger) {RDF::Spec.logger} after(:each) do |example| puts logger.to_s if example.exception && !example.exception.is_a?(RSpec::Expectations::ExpectationNotMetError) end describe "ClassMethods" do describe "production" do it "adds as a start_handler" do expect(LL1ParserTest.start_handlers.keys).to eq [:term] expect(LL1ParserTest.start_handlers[:term]).to be_a(Proc) end it "adds as a production_handler" do expect(LL1ParserTest.production_handlers.keys).to eq [:term] expect(LL1ParserTest.production_handlers[:term]).to be_a(Proc) end end describe "terminal" do it "adds as a terminal_handler" do expect(LL1ParserTest.terminal_handlers.keys).to include(:escape, :unescape) expect(LL1ParserTest.terminal_handlers[:escape]).to be_a(Proc) expect(LL1ParserTest.terminal_handlers[:unescape]).to be_a(Proc) end it "adds patterns" do expect(LL1ParserTest.patterns).to include( EBNF::LL1::Lexer::Terminal.new(:escape, /escape/), EBNF::LL1::Lexer::Terminal.new(:unescape, /unescape/, unescape: true) ) end end end describe "#parse" do subject {LL1ParserTest.new} it "raises error if no branch table defined" do expect {subject.parse("foo")}.to raise_error(EBNF::LL1::Parser::Error, "Branch table not defined") end it "raises error if starting production not defined" do expect { subject.parse("foo", nil, branch: {a: {b: ["c"]}}) }.to raise_error(EBNF::LL1::Parser::Error, "Starting production not defined") end it "raises error on inalid input" do expect { subject.parse("bar", :foo, branch: {foo: {bar: ["baz"]}}) }.to raise_error(EBNF::LL1::Parser::Error, /Invalid token "bar"/) end end require_relative "data/parser" describe EBNFParser do before {logger.level = Logger::INFO} let(:input) {File.expand_path("../../../etc/ebnf.ebnf", __FILE__)} let(:sxp) {File.read File.expand_path("../../../etc/ebnf.sxp", __FILE__)} let(:parser) {EBNFParser.new(File.open(input), debug: true, logger: logger)} it "parses EBNF Grammar" do expect(parser.to_sxp).to produce(sxp, logger) end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1/lexer_spec.rb
spec/ll1/lexer_spec.rb
# coding: utf-8 $:.unshift ".." require 'spec_helper' require 'ebnf' describe EBNF::LL1::Lexer do let!(:terminals) {[ [:ANON, ANON], [nil, %r([\(\),.;\[\]a]|\^\^|@base|@prefix|true|false)], [:BLANK_NODE_LABEL, BLANK_NODE_LABEL], [:IRIREF, IRIREF], [:DECIMAL, DECIMAL], [:DOUBLE, DOUBLE], [:INTEGER, INTEGER], [:LANGTAG, LANGTAG], [:PNAME, PNAME], [:STRING_LITERAL_LONG_SINGLE_QUOTE, STRING_LITERAL_LONG_SINGLE_QUOTE, {partial_regexp: /'''/}], [:STRING_LITERAL_LONG_QUOTE, STRING_LITERAL_LONG_QUOTE, {partial_regexp: /"""/}], [:STRING_LITERAL_QUOTE, STRING_LITERAL_QUOTE], [:STRING_LITERAL_SINGLE_QUOTE, STRING_LITERAL_SINGLE_QUOTE], ]} let!(:unescape_terms) {[ :IRIREF, :STRING_LITERAL_QUOTE, :STRING_LITERAL_SINGLE_QUOTE, :STRING_LITERAL_LONG_SINGLE_QUOTE, :STRING_LITERAL_LONG_QUOTE ]} describe ".unescape_codepoints" do # @see https://www.w3.org/TR/rdf-sparql-query/#codepointEscape it "unescapes \\uXXXX codepoint escape sequences" do inputs = { %q(\\u0020) => %q( ), %q(<ab\\u00E9xy>) => %Q(<ab\xC3\xA9xy>), %q(\\u03B1:a) => %Q(\xCE\xB1:a), %q(a\\u003Ab) => %Q(a\x3Ab), } inputs.each do |input, output| output.force_encoding(Encoding::UTF_8) expect(EBNF::LL1::Lexer.unescape_codepoints(input)).to eq output end end it "unescapes \\UXXXXXXXX codepoint escape sequences" do inputs = { %q(\\U00000020) => %q( ), %q(\\U00010000) => %Q(\xF0\x90\x80\x80), %q(\\U000EFFFF) => %Q(\xF3\xAF\xBF\xBF), } inputs.each do |input, output| output.force_encoding(Encoding::UTF_8) expect(EBNF::LL1::Lexer.unescape_codepoints(input)).to eq output end end context "escaped strings" do { 'Dürst' => 'D\\u00FCrst', "é" => '\\u00E9', "€" => '\\u20AC', "resumé" => 'resum\\u00E9', }.each_pair do |unescaped, escaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::LL1::Lexer.unescape_codepoints(escaped)).to eq unescaped end end end end describe ".unescape_string" do # @see https://www.w3.org/TR/rdf-sparql-query/#grammarEscapes context "escape sequences" do EBNF::LL1::Lexer::ESCAPE_CHARS.each do |escaped, unescaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::LL1::Lexer.unescape_string(escaped)).to eq unescaped end end end context "escaped strings" do { 'simple literal' => 'simple literal', 'backslash:\\' => 'backslash:\\\\', 'dquote:"' => 'dquote:\\"', "newline:\n" => 'newline:\\n', "return\r" => 'return\\r', "tab:\t" => 'tab:\\t', }.each_pair do |unescaped, escaped| it "unescapes #{unescaped.inspect}" do expect(EBNF::LL1::Lexer.unescape_string(escaped)).to eq unescaped end end end end describe ".tokenize" do context "numeric literals" do it "tokenizes unsigned integer literals" do tokenize(%q(42)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.first.type).to eq :INTEGER expect(tokens.first.value).to eq "42" end end it "tokenizes positive integer literals" do tokenize(%q(+42)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :INTEGER expect(tokens.first.value).to eq "+42" end end it "tokenizes negative integer literals" do tokenize(%q(-42)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :INTEGER expect(tokens.first.value).to eq "-42" end end it "tokenizes unsigned decimal literals" do tokenize(%q(3.1415)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.first.type).to eq :DECIMAL expect(tokens.first.value).to eq "3.1415" end end it "tokenizes positive decimal literals" do tokenize(%q(+3.1415)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :DECIMAL expect(tokens.first.value).to eq "+3.1415" end end it "tokenizes negative decimal literals" do tokenize(%q(-3.1415)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :DECIMAL expect(tokens.first.value).to eq "-3.1415" end end it "tokenizes unsigned double literals" do tokenize(%q(1e6)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.first.type).to eq :DOUBLE expect(tokens.first.value).to eq "1e6" end end it "tokenizes positive double literals" do tokenize(%q(+1e6)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :DOUBLE expect(tokens.first.value).to eq "+1e6" end end it "tokenizes negative double literals" do tokenize(%q(-1e6)) do |tokens| expect(tokens.length).to eql 1 expect(tokens.last.type).to eq :DOUBLE expect(tokens.first.value).to eq "-1e6" end end end context "string terminals" do %w|^^ ( ) [ ] , ; . a true false @base @prefix|.each do |string| it "tokenizes the #{string.inspect} string" do tokenize(string) do |tokens| expect(tokens.length).to eql 1 expect(tokens.first.type).to eq nil expect(tokens.first.value).to eq string end end end end context "comments" do it "ignores the remainder of the current line" do tokenize("# :foo :bar", "# :foo :bar\n", "# :foo :bar\r\n") do |tokens| expect(tokens.length).to eql 0 end end it "ignores leading whitespace" do tokenize(" # :foo :bar", "\n# :foo :bar", "\r\n# :foo :bar") do |tokens| expect(tokens.length).to eql 0 end end end it "matches input longer than low water mark when buffer is low" do input = StringIO.new %("""123456789 123456789 """ """123456789 123456789 """) lexer = EBNF::LL1::Lexer.new(input, terminals, unescape_terms: unescape_terms, whitespace: WHITESPACE, low_water: 20, high_water: 40) expect(lexer.shift.type).to eq :STRING_LITERAL_LONG_QUOTE expect(lexer.shift.type).to eq :STRING_LITERAL_LONG_QUOTE end end describe "#valid?" do it "validates legal input" do expect(tokenize(%q(:a "b" <c>))).to be_valid end it "invalidates illegal input" do expect(tokenize(%q(:a 'open))).not_to be_valid end end describe "#lineno" do it "for white space" do inputs = { "" => 1, "\n" => 2, "\n\n" => 3, "\r\n" => 2, } inputs.each do |input, lineno| lexer = tokenize(input) lexer.to_a # consumes the input expect(lexer.lineno).to eq lineno end end context "STRING_LITERAL_LONG_QUOTE" do it "tracks line numbers" do input = %( :Test a rdfs:Class ; rdfs:subClassOf mf:ManifestEntry; rdfs:label "Superclass of all CSVW tests" ; rdfs:comment """ All CSVW tests have an input file referenced using `mf:action`. Positive and Negative Evaluation Tests also have a result file referenced using `mf:result` . Other tests may take different inputs and options as defined for each test class. """ ; :b :c . ) expect(tokenize(input).to_a.map(&:lineno)).to include( 2, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 10, 11, 11, 11 ) end end end describe "#first/#shift/#recover" do subject {tokenize("1\n2\n3\n4")} it "returns tokens in first/shift sequence" do %w{1 2 3 4}.each do |v| expect(subject.first.value).to eq v subject.shift end expect(subject.first).to be_nil end context "with unrecognized token" do subject {tokenize("< space > 'foo' 1")} it "raises error with #first" do expect {subject.first}.to raise_error(EBNF::LL1::Lexer::Error, /Invalid token/) end it "recovers to next token" do subject.recover expect(subject.first.value).to eq "'foo'" end end describe "#first" do it "returns a token when passed as an argument" do expect(subject.first(:INTEGER)).to be_a(EBNF::LL1::Lexer::Token) end it "does not return a token unless passed as an argument" do expect {subject.first(:Double)}.to raise_error(EBNF::LL1::Lexer::Error, 'Invalid token "1"') end end end describe EBNF::LL1::Lexer::Token do subject {described_class.new(:type, 'value', lineno: 1)} describe "#type" do its(:type) {is_expected.to eq :type} end describe "#value" do its(:value) {is_expected.to eq 'value'} end describe "#lineno" do its(:lineno) {is_expected.to eq 1} end describe "#[]" do it "returns type at 0 index" do expect(subject[0]).to eq :type end it "returns value at 1 index" do expect(subject[1]).to eq 'value' end it "returns nil for other indexes" do expect(subject[2]).to be_nil end end describe "#===" do specify {expect(subject).to be === :type} specify {expect(subject).to be === 'value'} end describe "#to_hash" do specify {expect(subject.to_hash).to eql({type: :type, value: 'value'})} end describe "#to_s" do specify {expect(subject.to_s).to eq ":type"} end describe "#representation" do specify {expect(subject.representation).to eq :type} end describe "#to_a" do specify {expect(subject.to_a).to eq [:type, 'value']} end end describe EBNF::LL1::Lexer::Terminal do { "returns itself with no map entry": { input: "FOO", map: {}, expect: "FOO" }, "returns map value if specified": { input: "FOO", map: {"foo" => 'bar'}, expect: "bar" }, }.each do |name, params| it name do term = described_class.new(:nil, params[:regexp], map: params[:map]) expect(term.canonicalize(params[:input])).to eq params[:expect] end end end def tokenize(*inputs) options = inputs.last.is_a?(Hash) ? inputs.pop : {} lexer = nil inputs.each do |input| lexer = EBNF::LL1::Lexer.tokenize(input, terminals, unescape_terms: unescape_terms, whitespace: WHITESPACE) expect(lexer).to be_a(EBNF::LL1::Lexer) yield lexer.to_a if block_given? end lexer end EXPONENT = /[eE][+-]?[0-9]+/ ANON = /\[\s*\]/ BLANK_NODE_LABEL = /_:(?:\w)*/ IRIREF = /<\w*>/ INTEGER = /[+-]?[0-9]+/ DECIMAL = /[+-]?(?:[0-9]*\.[0-9]+)/ DOUBLE = /[+-]?(?:[0-9]+\.[0-9]*#{EXPONENT}|\.?[0-9]+#{EXPONENT})/ LANGTAG = /@[a-zA-Z]+(?:-[a-zA-Z0-9]+)*/ PNAME = /\w*:\w*/ STRING_LITERAL_QUOTE = /'(?:[^\'\\\n\r])*'/ STRING_LITERAL_SINGLE_QUOTE = /"(?:[^\"\\\n\r])*"/ STRING_LITERAL_LONG_SINGLE_QUOTE = /'''(?:(?:'|'')?(?:[^'\\]))*'''/m STRING_LITERAL_LONG_QUOTE = /"""(?:(?:"|"")?(?:[^"\\]|#))*"""/m WHITESPACE = /(\s|(?:#[^x]*$))+/m.freeze end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1/data/meta.rb
spec/ll1/data/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from etc/ebnf.ebnf module EBNFParserMeta START = :ebnf BRANCH = { :alt => { "(" => [:seq, :_alt_1], :HEX => [:seq, :_alt_1], :O_RANGE => [:seq, :_alt_1], :RANGE => [:seq, :_alt_1], :STRING1 => [:seq, :_alt_1], :STRING2 => [:seq, :_alt_1], :SYMBOL => [:seq, :_alt_1], }, :_alt_1 => { ")" => [], "@pass" => [], "@terminals" => [], :LHS => [], "|" => [:_alt_3], }, :_alt_2 => { "|" => ["|", :seq], }, :_alt_3 => { "|" => [:_alt_2, :_alt_1], }, :declaration => { "@pass" => [:pass], "@terminals" => ["@terminals"], }, :diff => { "(" => [:postfix, :_diff_1], :HEX => [:postfix, :_diff_1], :O_RANGE => [:postfix, :_diff_1], :RANGE => [:postfix, :_diff_1], :STRING1 => [:postfix, :_diff_1], :STRING2 => [:postfix, :_diff_1], :SYMBOL => [:postfix, :_diff_1], }, :_diff_1 => { "(" => [], ")" => [], "-" => [:_diff_2], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :_diff_2 => { "-" => ["-", :postfix], }, :ebnf => { "@pass" => [:_ebnf_2], "@terminals" => [:_ebnf_2], :LHS => [:_ebnf_2], }, :_ebnf_1 => { "@pass" => [:declaration], "@terminals" => [:declaration], :LHS => [:rule], }, :_ebnf_2 => { "@pass" => [:_ebnf_1, :ebnf], "@terminals" => [:_ebnf_1, :ebnf], :LHS => [:_ebnf_1, :ebnf], }, :expression => { "(" => [:alt], :HEX => [:alt], :O_RANGE => [:alt], :RANGE => [:alt], :STRING1 => [:alt], :STRING2 => [:alt], :SYMBOL => [:alt], }, :pass => { "@pass" => ["@pass", :expression], }, :postfix => { "(" => [:primary, :_postfix_1], :HEX => [:primary, :_postfix_1], :O_RANGE => [:primary, :_postfix_1], :RANGE => [:primary, :_postfix_1], :STRING1 => [:primary, :_postfix_1], :STRING2 => [:primary, :_postfix_1], :SYMBOL => [:primary, :_postfix_1], }, :_postfix_1 => { "(" => [], ")" => [], "-" => [], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :POSTFIX => [:POSTFIX], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :primary => { "(" => [:_primary_1], :HEX => [:HEX], :O_RANGE => [:O_RANGE], :RANGE => [:RANGE], :STRING1 => [:STRING1], :STRING2 => [:STRING2], :SYMBOL => [:SYMBOL], }, :_primary_1 => { "(" => ["(", :expression, ")"], }, :rule => { :LHS => [:LHS, :expression], }, :seq => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, :_seq_1 => { "(" => [:_seq_2], ")" => [], "@pass" => [], "@terminals" => [], :HEX => [:_seq_2], :LHS => [], :O_RANGE => [:_seq_2], :RANGE => [:_seq_2], :STRING1 => [:_seq_2], :STRING2 => [:_seq_2], :SYMBOL => [:_seq_2], "|" => [], }, :_seq_2 => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, }.freeze TERMINALS = [ "(", ")", "-", "@pass", "@terminals", :HEX, :LHS, :O_RANGE, :POSTFIX, :RANGE, :STRING1, :STRING2, :SYMBOL, "|" ].freeze FIRST = { :alt => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_alt_1 => [ :_eps, "|"], :_alt_2 => [ "|"], :_alt_3 => [ "|"], :_alt_4 => [ :_eps, "|"], :_alt_5 => [ :_eps, "|"], :_alt_6 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :declaration => [ "@terminals", "@pass"], :diff => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_diff_1 => [ :_eps, "-"], :_diff_2 => [ "-"], :_diff_3 => [ "-", :_eps], :_diff_4 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :ebnf => [ :_eps, "@terminals", :LHS, "@pass"], :_ebnf_1 => [ "@terminals", :LHS, "@pass"], :_ebnf_2 => [ "@terminals", :LHS, "@pass"], :_ebnf_3 => [ :_eps, "@terminals", :LHS, "@pass"], :_empty => [ :_eps], :expression => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :pass => [ "@pass"], :_pass_1 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :postfix => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_postfix_1 => [ :POSTFIX, :_eps], :_postfix_2 => [ :POSTFIX, :_eps], :primary => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_primary_1 => [ "("], :_primary_2 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_primary_3 => [ ")"], :rule => [ :LHS], :_rule_1 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :seq => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_seq_1 => [ :_eps, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_seq_2 => [ :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_seq_3 => [ :_eps, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], :_seq_4 => [ :_eps, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "("], }.freeze FOLLOW = { :alt => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_1 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_2 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_3 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_4 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_5 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_6 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :declaration => [ :_eof, "@terminals", :LHS, "@pass"], :diff => [ ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_1 => [ ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_2 => [ ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_3 => [ ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_4 => [ ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :ebnf => [ :_eof], :_ebnf_1 => [ :_eof, "@terminals", :LHS, "@pass"], :_ebnf_2 => [ :_eof], :_ebnf_3 => [ :_eof], :expression => [ ")", :_eof, "@terminals", :LHS, "@pass"], :pass => [ :_eof, "@terminals", :LHS, "@pass"], :_pass_1 => [ :_eof, "@terminals", :LHS, "@pass"], :postfix => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_1 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_2 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :primary => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_1 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_2 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_3 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :RANGE, :O_RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :rule => [ :_eof, "@terminals", :LHS, "@pass"], :_rule_1 => [ :_eof, "@terminals", :LHS, "@pass"], :seq => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_1 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_2 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_3 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_4 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], }.freeze CLEANUP = { :_alt_1 => :star, :_alt_3 => :merge, :_diff_1 => :opt, :ebnf => :star, :_ebnf_2 => :merge, :_postfix_1 => :opt, :seq => :plus, :_seq_1 => :star, :_seq_2 => :merge, }.freeze PASS = [ :PASS ].freeze end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/spec/ll1/data/parser.rb
spec/ll1/data/parser.rb
# # EBNF Parser for EBNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf/rule' require 'ebnf/terminals' require 'ebnf/ll1/parser' require_relative 'meta' require 'sxp' class EBNFParser include EBNF::LL1::Parser include EBNFParserMeta include EBNF::Terminals # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast terminal(:LHS, LHS) do |prod, token, input| input[:id], input[:symbol] = token.value.to_s.scan(/\[([^\]]+)\]\s*(\w+)\s*::=/).first end terminal(:SYMBOL, SYMBOL) do |prod, token, input| input[:terminal] = token.value.to_sym end terminal(:HEX, HEX) do |prod, token, input| input[:terminal] = token.value end terminal(:RANGE, RANGE, unescape: true) do |prod, token, input| input[:terminal] = [:range, token.value[1..-2]] end terminal(:O_RANGE, O_RANGE, unescape: true) do |prod, token, input| input[:terminal] = [:range, token.value[1..-2]] end terminal(:STRING1, STRING1, unescape: true) do |prod, token, input| input[:terminal] = token.value[1..-2].tap {|s| s.quote_style = :dquote} end terminal(:STRING2, STRING2, unescape: true) do |prod, token, input| input[:terminal] = token.value[1..-2].tap {|s| s.quote_style = :squote} end terminal(:POSTFIX, POSTFIX) do |prod, token, input| input[:postfix] = token.value end terminal(nil, %r(@terminals|@pass|[\[\]|\-\(\)])) do |prod, token, input| input[:terminal] = token.value end production(:ebnf) do |input, current, callback| # Cause method_missing to invoke something in our context to_sxp end production(:declaration) do |input, current, callback| # current contains a declaration. # Invoke callback callback.call(:terminals) if current[:terminal] == '@terminals' end production(:rule) do |input, current, callback| # current contains an expression. # Invoke callback callback.call(:rule, EBNF::Rule.new(current[:symbol].to_sym, current[:id], current[:expression].last)) end production(:expression) do |input, current, callback| alt = current[:alt] (input[:expression] ||= [:expression]) << (alt.length > 2 ? alt : alt.last) end production(:alt) do |input, current, callback| input[:alt] = if current[:alt] current[:alt] elsif seq = current[:seq] [:alt] << (seq.length > 2 ? seq : seq.last) end end start_production(:_alt_1) do |input, current, callback| seq = Array(input[:seq]) (input[:alt] = [:alt]) << (seq.length > 2 ? seq : seq.last) input.delete(:seq) end production(:_alt_1) do |input, current, callback| input[:alt] ||= [:alt] # Add optimized value of `seq,` if any if seq = current[:seq] input[:alt] << (seq.length == 2 ? seq.last : seq) end # Also recursive call to `_alt_1` input[:alt] += current[:alt][1..-1] if current[:alt] end production(:seq) do |input, current, callback| input[:seq] = if current[:seq] current[:seq] elsif diff = current[:diff] [:seq] << (diff.length > 2 ? diff : diff.last) end end start_production(:_seq_1) do |input, current, callback| diff = Array(input[:diff]) (input[:seq] = [:seq]) << (diff.length > 2 ? diff : diff.last) input.delete(:diff) end production(:_seq_1) do |input, current, callback| input[:seq] ||= [:seq] # Add optimized value of `diff`, if any if diff = current[:diff] input[:seq] << (diff.length > 2 ? diff : diff.last) end # Also recursive call to `_seq_1` input[:seq] += current[:seq][1..-1] if current[:seq] end production(:diff) do |input, current, callback| input[:diff] = if current[:diff] current[:diff] elsif postfix = current[:postfix] [:diff] << postfix end end start_production(:_diff_1) do |input, current, callback| postfix = Array(input[:postfix]) (input[:diff] = [:diff]) << (postfix.length > 2 ? postfix : postfix.last) input.delete(:postfix) end production(:_diff_1) do |input, current, callback| # Gratuitous call to exercise method add_prod_data(:_diff_1, "foo") input[:diff] ||= [:diff] # Add optimized value of `postfix`, if any input[:diff] << current[:postfix] if current[:postfix] end production(:postfix) do |input, current, callback| # Gratuitous call to exercise method add_prod_datum(:postfix, "foo") # Push result onto input stack, as the `diff` production can have some number of `postfix` values that are applied recursively input[:postfix] = case current[:postfix] when "*" then [:star, current[:primary]] when "+" then [:plus, current[:primary]] when "?" then [:opt, current[:primary]] else current[:primary] end end production(:primary) do |input, current, callback| # Gratuitous call to exercise method add_prod_datum(:primary, ["foo"]) input[:primary] = if current[:expression] v = current[:expression][1..-1] v = v.first if v.length == 1 else current[:terminal] end end production(:pass) do |input, current, callback| # Invoke callback callback.call(:pass, current[:expression].last) end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Hash] :prefixes (Hash.new) # the prefix mappings to use (for acessing intermediate parser productions) # @option options [Boolean] :progress # Show progress of parser productions # @return [EBNFParser] def initialize(input, **options, &block) @options = options.dup @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, START.to_sym, branch: BRANCH, first: FIRST, follow: FOLLOW, whitespace: EBNF::Terminals::PASS, reset_on_true: true, **options ) do |context, *data| rule = case context when :terminals parsing_terminals = true rule = EBNF::Rule.new(nil, nil, data.first, kind: :terminals) when :pass rule = EBNF::Rule.new(nil, nil, data.first, kind: :pass) when :rule rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end @ast end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(@ast.map(&:for_sxp)) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/ebnf-ll1-parser/meta.rb
examples/ebnf-ll1-parser/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from ../../etc/ebnf.ebnf module EBNFParserMeta START = :ebnf BRANCH = { :alt => { "(" => [:seq, :_alt_1], :HEX => [:seq, :_alt_1], :O_RANGE => [:seq, :_alt_1], :RANGE => [:seq, :_alt_1], :STRING1 => [:seq, :_alt_1], :STRING2 => [:seq, :_alt_1], :SYMBOL => [:seq, :_alt_1], }, :_alt_1 => { ")" => [], "@pass" => [], "@terminals" => [], :LHS => [], "|" => [:_alt_3], }, :_alt_2 => { "|" => ["|", :seq], }, :_alt_3 => { "|" => [:_alt_2, :_alt_1], }, :declaration => { "@pass" => [:pass], "@terminals" => ["@terminals"], }, :diff => { "(" => [:postfix, :_diff_1], :HEX => [:postfix, :_diff_1], :O_RANGE => [:postfix, :_diff_1], :RANGE => [:postfix, :_diff_1], :STRING1 => [:postfix, :_diff_1], :STRING2 => [:postfix, :_diff_1], :SYMBOL => [:postfix, :_diff_1], }, :_diff_1 => { "(" => [], ")" => [], "-" => [:_diff_2], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :_diff_2 => { "-" => ["-", :postfix], }, :ebnf => { "@pass" => [:_ebnf_2], "@terminals" => [:_ebnf_2], :LHS => [:_ebnf_2], }, :_ebnf_1 => { "@pass" => [:declaration], "@terminals" => [:declaration], :LHS => [:rule], }, :_ebnf_2 => { "@pass" => [:_ebnf_1, :ebnf], "@terminals" => [:_ebnf_1, :ebnf], :LHS => [:_ebnf_1, :ebnf], }, :expression => { "(" => [:alt], :HEX => [:alt], :O_RANGE => [:alt], :RANGE => [:alt], :STRING1 => [:alt], :STRING2 => [:alt], :SYMBOL => [:alt], }, :pass => { "@pass" => ["@pass", :expression], }, :postfix => { "(" => [:primary, :_postfix_1], :HEX => [:primary, :_postfix_1], :O_RANGE => [:primary, :_postfix_1], :RANGE => [:primary, :_postfix_1], :STRING1 => [:primary, :_postfix_1], :STRING2 => [:primary, :_postfix_1], :SYMBOL => [:primary, :_postfix_1], }, :_postfix_1 => { "(" => [], ")" => [], "-" => [], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :POSTFIX => [:POSTFIX], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :primary => { "(" => [:_primary_1], :HEX => [:HEX], :O_RANGE => [:O_RANGE], :RANGE => [:RANGE], :STRING1 => [:STRING1], :STRING2 => [:STRING2], :SYMBOL => [:SYMBOL], }, :_primary_1 => { "(" => ["(", :expression, ")"], }, :rule => { :LHS => [:LHS, :expression], }, :seq => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, :_seq_1 => { "(" => [:_seq_2], ")" => [], "@pass" => [], "@terminals" => [], :HEX => [:_seq_2], :LHS => [], :O_RANGE => [:_seq_2], :RANGE => [:_seq_2], :STRING1 => [:_seq_2], :STRING2 => [:_seq_2], :SYMBOL => [:_seq_2], "|" => [], }, :_seq_2 => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, }.freeze TERMINALS = [ "(", ")", "-", "@pass", "@terminals", :HEX, :LHS, :O_RANGE, :POSTFIX, :RANGE, :STRING1, :STRING2, :SYMBOL, "|" ].freeze FIRST = { :alt => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_alt_1 => [ :_eps, "|"], :_alt_2 => [ "|"], :_alt_3 => [ "|"], :_alt_4 => [ :_eps, "|"], :_alt_5 => [ :_eps, "|"], :_alt_6 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :declaration => [ "@terminals", "@pass"], :diff => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_diff_1 => [ :_eps, "-"], :_diff_2 => [ "-"], :_diff_3 => [ "-", :_eps], :_diff_4 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :ebnf => [ :_eps, "@terminals", :LHS, "@pass"], :_ebnf_1 => [ "@terminals", :LHS, "@pass"], :_ebnf_2 => [ "@terminals", :LHS, "@pass"], :_ebnf_3 => [ :_eps, "@terminals", :LHS, "@pass"], :_empty => [ :_eps], :expression => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :pass => [ "@pass"], :_pass_1 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :postfix => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_postfix_1 => [ :POSTFIX, :_eps], :_postfix_2 => [ :POSTFIX, :_eps], :primary => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_primary_1 => [ "("], :_primary_2 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_primary_3 => [ ")"], :rule => [ :LHS], :_rule_1 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :seq => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_1 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_2 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_3 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_4 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], }.freeze FOLLOW = { :alt => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_1 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_2 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_3 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_4 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_5 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_6 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :declaration => [ :_eof, "@terminals", :LHS, "@pass"], :diff => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_1 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_2 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_3 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_4 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :ebnf => [ :_eof], :_ebnf_1 => [ :_eof, "@terminals", :LHS, "@pass"], :_ebnf_2 => [ :_eof], :_ebnf_3 => [ :_eof], :expression => [ ")", :_eof, "@terminals", :LHS, "@pass"], :pass => [ :_eof, "@terminals", :LHS, "@pass"], :_pass_1 => [ :_eof, "@terminals", :LHS, "@pass"], :postfix => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_1 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_2 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :primary => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_1 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_2 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_3 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :rule => [ :_eof, "@terminals", :LHS, "@pass"], :_rule_1 => [ :_eof, "@terminals", :LHS, "@pass"], :seq => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_1 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_2 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_3 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_4 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], }.freeze CLEANUP = { :_alt_1 => :star, :_alt_3 => :merge, :_diff_1 => :opt, :ebnf => :star, :_ebnf_2 => :merge, :_postfix_1 => :opt, :seq => :plus, :_seq_1 => :star, :_seq_2 => :merge, }.freeze PASS = [ :PASS ].freeze end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/ebnf-ll1-parser/parser.rb
examples/ebnf-ll1-parser/parser.rb
# # EBNF Parser for EBNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf/rule' require 'ebnf/terminals' require 'ebnf/ll1/parser' require 'meta' require 'sxp' require 'logger' class EBNFLL1Parser include EBNF::LL1::Parser include EBNFParserMeta include EBNF::Terminals # An internal class used for capturing the values of a production. class ProdResult attr_accessor :prod attr_accessor :values def initialize(prod, *values) @prod, @values = prod, values end def to_ary values.map {|v| v.respond_to?(:to_ary) ? v.to_ary : v}.unshift(@prod) end def inspect "(#{prod} #{values.map(&:inspect).join(' ')})" end end # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast # ## Terminals # Define rules for Terminals, placing results on the input stack, making them available to upstream non-Terminal rules. # # Terminals are defined with a symbol matching the associated rule name, and a regular expression used by the lexer. # # The `prod` parameter is the name of the parent rule for which this terminal is matched, which may have a bearing in some circumstances, although not used in this example. # # The `token` parameter is the matched lexer token. # # The `input` is used for returning the semantic value(s) of this terminal, which if often a string, but may be any instance which reflects the semantic interpretation of that terminal. # # Terminals are matched in the order of appearance # Match the Left hand side of a rule or terminal # # [11] LHS ::= ('[' SYMBOL+ ']' ' '+)? SYMBOL ' '* '::=' terminal(:LHS, LHS) do |prod, token, input| input[:id], input[:symbol] = token.value.to_s.scan(/(?:\[([^\]]+)\])?\s*(\w+)\s*::=/).first end # Match `SYMBOL` terminal # # [12] SYMBOL ::= ([a-z] | [A-Z] | [0-9] | '_' | '.')+ terminal(:SYMBOL, SYMBOL) do |prod, token, input| input[:terminal] = token.value.to_sym end # Match `HEX` terminal # # [13] HEX ::= '#x' ([a-f] | [A-F] | [0-9])+ terminal(:HEX, HEX) do |prod, token, input| input[:terminal] = [:hex, token.value] end # Terminal for `RANGE` is matched as part of a `primary` rule. Unescape the values to remove EBNF escapes in the input. # # [14] `RANGE` ::= '[' (R_CHAR '-' R_CHAR) | (HEX '-' HEX) ']' terminal(:RANGE, RANGE, unescape: true) do |prod, token, input| input[:terminal] = [:range, token.value[1..-2]] end # Terminal for `O_RANGE` is matched as part of a `primary` rule. Unescape the values to remove EBNF escapes in the input. # # [15] O_RANGE ::= '[^' (R_CHAR '-' R_CHAR) | (HEX '-' HEX) ']' terminal(:O_RANGE, O_RANGE, unescape: true) do |prod, token, input| input[:terminal] = [:range, token.value[1..-2]] end # Strings have internal escape sequences expanded and are passed through without surrounding quotes as terminals # Match double quote string # # [16] STRING1 ::= '"' (CHAR - '"')* '"' terminal(:STRING1, STRING1, unescape: true) do |prod, token, input| input[:terminal] = token.value[1..-2].tap {|s| s.quote_style = :dquote} end # Match single quote string # # [17] STRING2 ::= "'" (CHAR - "'")* "'" terminal(:STRING2, STRING2, unescape: true) do |prod, token, input| input[:terminal] = token.value[1..-2].tap {|s| s.quote_style = :squote} end # The `CHAR` and `R_CHAR` productions are not used explicitly # Match `POSTFIX` terminal # # [20] POSTFIX ::= [?*+] terminal(:POSTFIX, POSTFIX) do |prod, token, input| input[:postfix] = token.value end # The `PASS` productions is not used explicitly # Make sure we recognize string terminals, even though they're not actually used in processing. This defines a "catch-all" terminal for the lexer. terminal(nil, %r(@terminals|@pass|[\[\]|\-\(\)])) do |prod, token, input| input[:terminal] = token.value end # ## Non-terminal productions # Define productions for non-Termainals. This can include `start_production` as well as `production` to hook into rule start and end. In some cases, we need to use sub-productions as generated when turning EBNF into BNF. # # The `input` parameter is a Hash containing input from the parent production. and is used for returning the results of this production. # # The `data` parameter data returned by child productions placing information onto their input. # # The `callback` parameter provides access to a callback defined in the call to `parse`, see `#each_rule` below). # Production for end of `declaration` non-terminal. # # Look for `@terminals` to change parser state to parsing terminals. # # `@pass` is ignored here. # # [2] declaration ::= '@terminals' | pass production(:declaration) do |input, data, callback| # data contains a declaration. # Invoke callback if data[:terminal] callback.call(:terminals, data[:terminal]) elsif data[:pass] callback.call(:pass, data[:pass]) end end # Production for end of `rule` non-terminal. # The `input` parameter includes information placed by previous productions at the same level, or at the start of the current production. # The `data` parameter, is the result of child productions placing information onto their input. # The `callback` parameter provides access to a callback defined in the call. # # Create rule from expression value and pass to callback # # [3] rule ::= LHS expression production(:rule) do |input, data, callback| # data contains an expression. # Invoke callback expr = data[:expression].respond_to?(:to_ary) ? data[:expression].to_ary : data[:expression] callback.call(:rule, EBNF::Rule.new(data[:symbol].to_sym, data[:id], expr)) if expr end # Production for end of `expression` non-terminal. # Passes through the optimized value of the alt production as follows: # # [:alt foo] => foo # [:alt foo bar] => [:alt foo bar] # # [4] expression ::= alt production(:expression) do |input, data, callback| input[:expression] = data[:alt] end # Production for end of `alt` non-terminal. # Passes through the optimized value of the seq production as follows: # # [:seq foo] => foo # [:seq foo bar] => [:seq foo bar] # # Note that this also may just pass through from `_alt_1` # # [5] alt ::= seq ('|' seq)* production(:alt) do |input, data, callback| input[:alt] = if data[:seq].length > 1 ProdResult.new(:alt, *data[:seq]) else data[:seq].first end end # Production for end of `seq` non-terminal. # Passes through the optimized value of the `diff` production as follows: # # [:diff foo] => foo # [:diff foo bar] => [:diff foo bar] # # Note that this also may just pass through from `_seq_1` # # [6] seq ::= diff+ production(:seq) do |input, data, callback| input[:seq] ||= [] input[:seq] << if data[:diff].length > 1 ProdResult.new(:seq, *data[:diff]) else data[:diff].first end end # `Diff` production returns concatenated postfix values # # [7] diff ::= postfix ('-' postfix)? production(:diff) do |input, data, callback| input[:diff] ||= [] data[:postfix] ||= [] input[:diff] << if data[:postfix].length > 1 ProdResult.new(:diff, *data[:postfix]) else data[:postfix].first end end # Production for end of `postfix` non-terminal. # Either returns the `primary` production value, or as modified by the `postfix`. # # [:primary] => [:primary] # [:primary, '*'] => [:star, :primary] # [:primary, '+'] => [:plus, :primary] # [:primary, '?'] => [:opt, :primary] # # [8] postfix ::= primary POSTFIX? production(:postfix) do |input, data, callback| # Push result onto input stack, as the `diff` production can have some number of `postfix` values that are applied recursively input[:postfix] ||= [] input[:postfix] << case data[:postfix] when "*" then ProdResult.new(:star, data[:primary]) when "+" then ProdResult.new(:plus, data[:primary]) when "?" then ProdResult.new(:opt, data[:primary]) else data[:primary] end end # Production for end of `primary` non-terminal. # Places `:primary` on the stack # # This may either be a terminal, or the result of an `expression`. # # [9] primary ::= HEX # | SYMBOL # | RANGE # | ENUM # | O_RANGE # | O_ENUM # | STRING1 # | STRING2 # | '(' expression ')' production(:primary) do |input, data, callback| input[:primary] = data[:expression] || data[:terminal] end # Production for end of pass non-terminal. # # [10] pass ::= '@pass' expression production(:pass) do |input, data, callback| expression = data[:expression] expression = expression.to_ary if expression.respond_to?(:to_ary) input[:pass] = expression end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [self] def initialize(input, **options, &block) # Read input, if necessary, which will be used in a Scanner which feads the Lexer. @input = input.respond_to?(:read) ? input.read : input.to_s # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end parsing_terminals = false @ast = [] parse(@input, START.to_sym, branch: BRANCH, first: FIRST, follow: FOLLOW, cleanup: CLEANUP, whitespace: EBNF::Terminals::PASS, reset_on_start: true, **options ) do |context, *data| rule = case context when :terminals # After parsing `@terminals` # This changes the state of the parser to treat subsequent rules as terminals. parsing_terminals = true rule = EBNF::Rule.new(nil, nil, data.first, kind: :terminals) when :pass # After parsing `@pass` # This defines a specific rule for whitespace. rule = EBNF::Rule.new(nil, nil, data.first, kind: :pass) when :rule # A rule which has already been turned into a `Rule` object. rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end @ast end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(@ast.map(&:for_sxp)) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/isoebnf/meta.rb
examples/isoebnf/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from ../../etc/iso-ebnf.ebnf module ISOEBNFMeta RULES = [ EBNF::Rule.new(:syntax, nil, [:star, :syntax_rule]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:syntax_rule, nil, [:seq, :meta_identifier, :defining_symbol, :definitions_list, :terminator_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:definitions_list, nil, [:seq, :single_definition, :_definitions_list_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_definitions_list_1, nil, [:star, :_definitions_list_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_definitions_list_2, nil, [:seq, :definition_separator_symbol, :definitions_list]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:single_definition, nil, [:seq, :term, :_single_definition_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_single_definition_1, nil, [:star, :_single_definition_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_single_definition_2, nil, [:seq, ",", :term]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:term, nil, [:seq, :factor, :_term_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_term_1, nil, [:opt, :_term_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_term_2, nil, [:seq, "-", :exception]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:exception, nil, [:seq, :factor]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:factor, nil, [:seq, :_factor_1, :primary]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_factor_1, nil, [:opt, :_factor_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_factor_2, nil, [:seq, :integer, "*"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:primary, nil, [:alt, :optional_sequence, :repeated_sequence, :special_sequence, :grouped_sequence, :meta_identifier, :terminal_string, :empty]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:optional_sequence, nil, [:seq, :start_option_symbol, :definitions_list, :end_option_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repeated_sequence, nil, [:seq, :start_repeat_symbol, :definitions_list, :end_repeat_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:grouped_sequence, nil, [:seq, "(", :definitions_list, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminal_string, nil, [:alt, :_terminal_string_1, :_terminal_string_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_1, nil, [:seq, "'", :_terminal_string_3, "'"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_3, nil, [:plus, :first_terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_2, nil, [:seq, "\"", :_terminal_string_4, "\""]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_4, nil, [:plus, :second_terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:meta_identifier, nil, [:seq, :letter, :_meta_identifier_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_meta_identifier_1, nil, [:star, :meta_identifier_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:integer, nil, [:plus, :decimal_digit], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence, nil, [:seq, "?", :_special_sequence_1, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_special_sequence_1, nil, [:star, :special_sequence_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment, nil, [:seq, :start_comment_symbol, :_comment_1, :end_comment_symbol], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_1, nil, [:star, :comment_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment_symbol, nil, [:alt, :comment, :commentless_symbol, :other_character], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:commentless_symbol, nil, [:alt, :terminal_character, :meta_identifier, :integer, :terminal_string, :special_sequence], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:letter, nil, [:range, "a-zA-Z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:decimal_digit, nil, [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:meta_identifier_character, nil, [:alt, :letter, :decimal_digit, "_"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:first_terminal_character, nil, [:diff, :terminal_character, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:second_terminal_character, nil, [:diff, :terminal_character, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence_character, nil, [:diff, :terminal_character, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminal_character, nil, [:alt, :letter, :decimal_digit, :concatenate_symbol, :defining_symbol, :definition_separator_symbol, :end_comment_symbol, :end_group_symbol, :end_option_symbol, :end_repeat_symbol, :except_symbol, :first_quote_symbol, :repetition_symbol, :second_quote_symbol, :special_sequence_symbol, :start_comment_symbol, :start_group_symbol, :start_option_symbol, :start_repeat_symbol, :terminator_symbol, :other_character], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:other_character, nil, [:alt, :_other_character_1, "\\"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_other_character_1, nil, [:range, ":+_%@&$<>^` ̃#x20#x23"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:gap_separator, nil, [:range, "#x9#xa#xb#xc#xd#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_pass, nil, [:alt, :__pass_1, :comment], kind: :pass).extend(EBNF::PEG::Rule), EBNF::Rule.new(:__pass_1, nil, [:plus, :gap_separator]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:empty, nil, [:seq, ""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:concatenate_symbol, nil, [:seq, ","], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repetition_symbol, nil, [:seq, "*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:except_symbol, nil, [:seq, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:first_quote_symbol, nil, [:seq, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:second_quote_symbol, nil, [:seq, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_comment_symbol, nil, [:seq, "(*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_comment_symbol, nil, [:seq, "*)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_group_symbol, nil, [:seq, "("], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_group_symbol, nil, [:seq, ")"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence_symbol, nil, [:seq, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:defining_symbol, nil, [:alt, "=", ":"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:definition_separator_symbol, nil, [:alt, "|", "/", "!"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminator_symbol, nil, [:alt, ";", "."], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_option_symbol, nil, [:seq, "["], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_option_symbol, nil, [:seq, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_repeat_symbol, nil, [:alt, "{", "(:"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_repeat_symbol, nil, [:alt, "}", ":)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:gap_free_symbol, nil, [:alt, :_gap_free_symbol_1, :terminal_string], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_1, nil, [:seq, :_gap_free_symbol_3, :terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_3, nil, [:not, :_gap_free_symbol_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_2, nil, [:range, "'\""], kind: :terminal).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/isoebnf/parser.rb
examples/isoebnf/parser.rb
# # EBNF Parser for EISO BNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf' require 'ebnf/terminals' require 'ebnf/peg/parser' require 'meta' require 'sxp' require 'logger' class ISOEBNFPegParser include EBNF::PEG::Parser # The base for terminal-character, which omits "'", '"', and '?'. # Could be more optimized, and one might quible # with the overly-strictly defined character set, # but it is correct. TERMINAL_CHARACTER_BASE = %r{ [a-zA-Z0-9] | # letter | decimal digit , | # concatenate symbol = | # defining symbol [\|\/!] | # definition separator symbol \*\) | # end comment symbol \) | # end group symbol \] | # end option symbol \} | # end repeat symbol \- | # except symbol #\' | # first quote symbol \* | # repetition symbol #\" | # second quote symbol #\? | # special sequence symbol \(\* | # start comment symbol \( | # start group symbol \[ | # start option symbol \{ | # start repeat symbol [;\.] | # terminator symbol [:+_%@&$<>^\x20\x23\\`~] # other character }x TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['"\?]} FIRST_TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|["\?]} SECOND_TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['\?]} SPECIAL_SEQUENCE_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['"]} # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast # `[14] integer ::= decimal_digit+` terminal(:integer, /\d+/) do |value, prod| value.to_i end # `[15] meta_identifier ::= letter meta_identifier_character*` terminal(:meta_identifier, /[a-zA-Z][a-zA-Z0-9_]*/) do |value| value.to_sym end # `[17] terminal_string ::= ("'" first_terminal_character+ "'")` # ` | ('"' second_terminal_character+ '"')` terminal(:terminal_string, /(?:'#{FIRST_TERMINAL_CHARACTER}+')|(?:"#{SECOND_TERMINAL_CHARACTER}+")/x) do |value| value[1..-2].tap {|s| s.quote_style = (value.start_with?("'") ? :squote : :dquote) } end # `[20] special_sequence ::= '?' special_sequence_character* '?'` terminal(:special_sequence, /\?#{SPECIAL_SEQUENCE_CHARACTER}+\?/) # `[22] terminal_character ::= [a-zA-Z0-9]` # ` | [,=;*}#x2d?([{;]` # ` | '*)'` # ` | '(*'` # ` | ']'` # ` | other_character` terminal(:terminal_character, TERMINAL_CHARACTER) # `[25] empty ::= ''` terminal(:empty, //) # `[26] definition_separator_symbol ::= '|' | '/' | '!'` terminal(:definition_separator_symbol, /[\|\/!]/) # `[27] terminator_symbol ::= ';' | '.'` terminal(:terminator_symbol, /[;\.]/) # `[28] start_option_symbol ::= '[' terminal(:start_option_symbol, /\[|(?:\(\/)/) # `[29] end_option_symbol ::= ']'` terminal(:end_option_symbol, /\]/) # `[30] start_repeat_symbol ::= '{' | '(:'` terminal(:start_repeat_symbol, /{|\(:/) # `[31] end_repeat_symbol ::= '}' | ':)'` terminal(:end_repeat_symbol, /}|:\)/) # ## Non-terminal productions # `[2] syntax_rule ::= meta_identifier '=' definitions_list terminator_symbol` production(:syntax_rule, clear_packrat: true) do |value, data, callback| # value contains an expression. # Invoke callback sym = value[0][:meta_identifier] definitions_list = value[2][:definitions_list] callback.call(:rule, EBNF::Rule.new(sym.to_sym, nil, definitions_list)) nil end # Setting `as_hash: true` in the start production makes the value of the form of a hash, rather than an array of hashes. # # `[3] definitions_list ::= single_definition (definition_separator_symbol definitions_list)*` start_production(:definitions_list, as_hash: true) production(:definitions_list) do |value| if value[:_definitions_list_1].length > 0 [:alt, value[:single_definition]] + value[:_definitions_list_1] else value[:single_definition] end end production(:_definitions_list_1) do |value| Array(value.first) end start_production(:_definitions_list_2, as_hash: true) production(:_definitions_list_2) do |value| if Array(value[:definitions_list]).first == :alt value[:definitions_list][1..-1] else [value[:definitions_list]] end end # `[4] single_definition ::= term (',' term)*` start_production(:single_definition, as_hash: true) production(:single_definition) do |value| if value[:_single_definition_1].length > 0 [:seq, value[:term]] + value[:_single_definition_1] else value[:term] end end production(:_single_definition_1) do |value| value.map {|a1| a1.last[:term]}.compact # Get rid of '|' end # `[5] term ::= factor ('-' exception)?` start_production(:term, as_hash: true) production(:term) do |value| if value[:_term_1] [:diff, value[:factor], value[:_term_1]] else value[:factor] end end production(:_term_1) do |value| value.last[:exception] if value end # `[6] exception ::= factor` start_production(:exception, as_hash: true) production(:exception) do |value| value[:factor] end # `[7] factor ::= (integer '*')? primary` start_production(:factor, as_hash: true) production(:factor) do |value| if value[:_factor_1] [:rept, value[:_factor_1], value[:_factor_1], value[:primary]] else value[:primary] end end production(:_factor_2) do |value| value.first[:integer] end # `[9] optional_sequence ::= start_option_symbol definitions_list end_option_symbol` production(:optional_sequence) do |value| [:opt, value[1][:definitions_list]] end # `[10] repeated_sequence ::= start_repeat_symbol definitions_list end_repeat_symbol` production(:repeated_sequence) do |value| [:star, value[1][:definitions_list]] end # `[11] grouped_sequence ::= '(' definitions_list ')'` production(:grouped_sequence) do |value| [:seq, value[1][:definitions_list]] end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options, &block) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, :syntax, ISOEBNFMeta::RULES, whitespace: %r{([\x09-\x0d\x20]|(?:\(\*(?:(?:\*[^\)])|[^*])*\*\)))+}, **options ) do |context, *data| rule = case context when :rule # A rule which has already been turned into a `Rule` object. rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end @ast end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(@ast.map(&:for_sxp)) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/abnf/core.rb
examples/abnf/core.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from ../../etc/abnf-core.ebnf module ABNFCore RULES = [ EBNF::Rule.new(:ALPHA, nil, [:range, "#x41-#x5A#x61-#x7A"], kind: :terminal), EBNF::Rule.new(:BIT, nil, [:alt, "0", "1"], kind: :terminal), EBNF::Rule.new(:CHAR, nil, [:range, "#x01-#x7F"], kind: :terminal), EBNF::Rule.new(:CR, nil, [:hex, "#x0D"], kind: :terminal), EBNF::Rule.new(:CRLF, nil, [:seq, [:opt, :CR], :LF], kind: :terminal), EBNF::Rule.new(:CTL, nil, [:alt, [:range, "#x00-#x1F"], [:hex, "#x7F"]], kind: :terminal), EBNF::Rule.new(:DIGIT, nil, [:range, "#x30-#x39"], kind: :terminal), EBNF::Rule.new(:DQUOTE, nil, [:hex, "#x22"], kind: :terminal), EBNF::Rule.new(:HEXDIG, nil, [:alt, :DIGIT, [:range, "A-F"]], kind: :terminal), EBNF::Rule.new(:HTAB, nil, [:hex, "#x09"], kind: :terminal), EBNF::Rule.new(:LF, nil, [:hex, "#x0A"], kind: :terminal), EBNF::Rule.new(:LWSP, nil, [:star, [:alt, :WSP, [:seq, :CRLF, :WSP]]], kind: :terminal), EBNF::Rule.new(:OCTET, nil, [:range, "#x00-#xFF"], kind: :terminal), EBNF::Rule.new(:SP, nil, [:hex, "#x20"], kind: :terminal), EBNF::Rule.new(:VCHAR, nil, [:range, "#x21-#x7E"], kind: :terminal), EBNF::Rule.new(:WSP, nil, [:alt, :SP, :HTAB], kind: :terminal), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/abnf/meta.rb
examples/abnf/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from ../../etc/abnf.ebnf module ABNFMeta RULES = [ EBNF::Rule.new(:rulelist, nil, [:plus, :_rulelist_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_1, nil, [:alt, :rule, :_rulelist_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_2, nil, [:seq, :_rulelist_3, :c_nl]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rule, nil, [:seq, :rulename, :defined_as, :elements, :c_nl]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:elements, nil, [:seq, :alternation, :_elements_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_elements_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:alternation, nil, [:seq, :concatenation, :_alternation_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_1, nil, [:star, :_alternation_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_2, nil, [:seq, :_alternation_3, "/", :_alternation_4, :concatenation]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_4, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:concatenation, nil, [:seq, :repetition, :_concatenation_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_1, nil, [:star, :_concatenation_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_2, nil, [:seq, :_concatenation_3, :repetition]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_3, nil, [:plus, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repetition, nil, [:seq, :_repetition_1, :element]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repetition_1, nil, [:opt, :repeat]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repeat, nil, [:alt, :_repeat_1, :_repeat_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_1, nil, [:seq, :_repeat_3, "*", :_repeat_4]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_3, nil, [:star, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_4, nil, [:star, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_2, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:element, nil, [:alt, :rulename, :group, :option, :char_val, :num_val, :prose_val]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:group, nil, [:seq, "(", :_group_1, :alternation, :_group_2, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_group_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_group_2, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:option, nil, [:seq, "[", :_option_1, :alternation, :_option_2, "]"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_option_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_option_2, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:char_val, nil, [:alt, :case_insensitive_string, :case_sensitive_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:case_insensitive_string, nil, [:seq, :_case_insensitive_string_1, :quoted_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_case_insensitive_string_1, nil, [:opt, "%i"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:case_sensitive_string, nil, [:seq, "%s", :quoted_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:num_val, nil, [:seq, "%", :_num_val_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_num_val_1, nil, [:alt, :bin_val, :dec_val, :hex_val]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rulename, nil, [:seq, :ALPHA, :_rulename_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulename_1, nil, [:star, :_rulename_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulename_2, nil, [:alt, :ALPHA, :DIGIT, "-"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:defined_as, nil, [:seq, :_defined_as_1, :_defined_as_2, :_defined_as_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_2, nil, [:alt, "=", "=/"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:c_wsp, nil, [:alt, :WSP, :_c_wsp_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_c_wsp_1, nil, [:seq, :c_nl, :WSP]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:c_nl, nil, [:alt, :COMMENT, :CRLF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment, nil, [:seq, ";", :_comment_1, :CRLF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_1, nil, [:star, :_comment_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_2, nil, [:alt, :WSP, :VCHAR]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:quoted_string, nil, [:seq, :DQUOTE, :_quoted_string_1, :DQUOTE], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_quoted_string_1, nil, [:star, :_quoted_string_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_quoted_string_2, nil, [:range, "#x20-#x21#x23-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:bin_val, nil, [:seq, "b", :_bin_val_1, :_bin_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_1, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_2, nil, [:opt, :_bin_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_3, nil, [:alt, :_bin_val_4, :_bin_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_4, nil, [:plus, :_bin_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_6, nil, [:seq, ".", :_bin_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_7, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_5, nil, [:seq, "-", :_bin_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_8, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:dec_val, nil, [:seq, "d", :_dec_val_1, :_dec_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_1, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_2, nil, [:opt, :_dec_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_3, nil, [:alt, :_dec_val_4, :_dec_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_4, nil, [:plus, :_dec_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_6, nil, [:seq, ".", :_dec_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_7, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_5, nil, [:seq, "-", :_dec_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_8, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:hex_val, nil, [:seq, "x", :_hex_val_1, :_hex_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_1, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_2, nil, [:opt, :_hex_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_3, nil, [:alt, :_hex_val_4, :_hex_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_4, nil, [:plus, :_hex_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_6, nil, [:seq, ".", :_hex_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_7, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_5, nil, [:seq, "-", :_hex_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_8, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:prose_val, nil, [:seq, "<", :_prose_val_1, ">"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_prose_val_1, nil, [:star, :_prose_val_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_prose_val_2, nil, [:range, "#x20-#x3D#x3F-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:ALPHA, nil, [:range, "#x41-#x5A#x61-#x7A"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:BIT, nil, [:alt, "0", "1"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CHAR, nil, [:range, "#x01-#x7F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CR, nil, [:hex, "#x0D"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CRLF, nil, [:seq, :_CRLF_1, :LF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CRLF_1, nil, [:opt, :CR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CTL, nil, [:alt, :_CTL_1, :_CTL_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CTL_1, nil, [:range, "#x00-#x1F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CTL_2, nil, [:hex, "#x7F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:DIGIT, nil, [:range, "#x30-#x39"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:DQUOTE, nil, [:hex, "#x22"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HEXDIG, nil, [:alt, :DIGIT, "A", "B", "C", "D", "E", "F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HTAB, nil, [:hex, "#x09"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LF, nil, [:hex, "#x0A"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LWSP, nil, [:star, :_LWSP_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LWSP_1, nil, [:alt, :WSP, :_LWSP_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LWSP_2, nil, [:seq, :CRLF, :WSP], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:OCTET, nil, [:range, "#x00-#xFF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:SP, nil, [:hex, "#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:VCHAR, nil, [:range, "#x21-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:WSP, nil, [:alt, :SP, :HTAB], kind: :terminal).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/abnf/parser.rb
examples/abnf/parser.rb
# # EBNF Parser for ABNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf' require 'ebnf/terminals' require 'ebnf/peg/parser' require 'core' # "Core" rules used in the ABNF spec. require 'meta' # "ABNF" rules used for parsing ABNF, itself require 'scanf' require 'logger' class ABNFParser include EBNF::PEG::Parser # Regular expressions for both "Core" and ABNF-specific terminals. ALPHA = %r{[\x41-\x5A\x61-\x7A]} VCHAR = %r{[\x20-\x7E]} WSP = %r{[\x20\x09]} CRLF = %r{\x0D?\x0A} COMMENT = %r{;(?:#{WSP}|#{VCHAR})*#{CRLF}} C_NL = %r{#{COMMENT}|#{CRLF}} C_WSP = %r{#{WSP}|(?:#{C_NL}#{WSP})} ## # Hash of generated {EBNF::Rule} objects by symbol # # @return [Hash{Symbol => EBNF::Rule}] attr_reader :parsed_rules ## # The following ABNF grammar rules are treated as terminals. # `rulename ::= ALPHA (ALPHA | DIGIT | "-")*` terminal(:rulename, /#{ALPHA}(?:#{ALPHA}|[0-9-])*/) do |value| value.to_sym end # `defined_as ::= c_wsp* ("=" | "=/") c_wsp*` terminal(:defined_as, /#{C_WSP}*=\/?#{C_WSP}*/) {|value| value.strip} # `quoted_string ::= DQUOTE [#x20-#x21#x23-#x7E]* DQUOTE` terminal(:quoted_string, /"[\x20-\x21\x23-\x7E]*"/) do |value| value[1..-2] end # `bin_val ::= "b" BIT+ (("." BIT+)+ | ("-" BIT+))?` terminal(:bin_val, /b[01]+(?:(?:(?:\.[01]+)+)|(?:-[01]+))?/) do |value| if value.include?('.') # Interpret segments in binary creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i(base=2).chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|b| "#x%x" % b.to_i(base=2)}.join("-")] else # Interpret as a single HEX character [:hex, "#x%x" % value[1..-1].to_i(base=2)] end end # `dec_val ::= "d" DIGIT+ (("." DIGIT+)+ | ("-" DIGIT+))?` terminal(:dec_val, /d[0-9]+(?:(?:(?:\.[0-9]+)+)|(?:-[0-9]+))?/) do |value| if value.include?('.') # Interpret segments in decimal creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i.chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|d| "#x%x" % d.to_i}.join("-")] else # Interpret as a single HEX character [:hex, "#x%x" % value[1..-1].to_i] end end # `hex_val ::= "x" HEXDIG+ (("." HEXDIG+)+ | ("-" HEXDIG+))?` terminal(:hex_val, /x[0-9A-F]+(?:(?:(?:\.[0-9A-F]+)+)|(?:-[0-9A-F]+))?/i) do |value| if value.include?('.') # Interpret segments in hexadecimal creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i(base=16).chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|h| "#x%x" % h.to_i(base=16)}.join("-")] else # Interpret as a single HEX character [:hex, "#x#{value[1..-1]}"] end end # `c_wsp ::= WSP | (c_nl WSP)` terminal(:c_wsp, C_WSP) # `c_nl ::= comment | CRLF` terminal(:c_nl, C_NL) # `DIGIT ::= [#x30-#x39]` terminal(:DIGIT, /\d/) # ## Non-terminal productions # The `start_production` on `:rule` allows the parser to present the value as a single Hash, rather than an array of individual hashes. start_production(:rule, as_hash: true) # `rule ::= rulename defined_as elements c_nl` production(:rule) do |value| # value contains an expression. # Invoke callback sym = value[:rulename] elements = value[:elements] if value[:defined_as] == "=/" # append to rule alternate rule = parsed_rules.fetch(sym) {raise "No existing rule found for #{sym}"} rule.expr = [:alt, rule.expr] unless rule.alt? if elements.first == :alt # append alternatives to rule rule.expr.concat(elements[1..-1]) else # add elements as last alternative rule.expr.push(elements) end else # There shouldn't be an existing rule raise "Redefining rule #{sym}" if parsed_rules.has_key?(sym) parsed_rules[sym] = EBNF::Rule.new(sym.to_sym, nil, elements) end progress(:rule, level: 2) {parsed_rules[sym].to_sxp} sym end # `elements ::= alternation c_wsp*` production(:elements) do |value| value.first[:alternation] end # `alternation ::= concatenation (c_wsp* "/" c_wsp* concatenation)*` production(:alternation) do |value| unless value.last[:_alternation_1].empty? [:alt, value.first[:concatenation]] + value.last[:_alternation_1] else value.first[:concatenation] end end # The `_aleteration_2` rule comes from the expanded PEG grammar and serves as an opportunity to custommize the values presented to the `aleteration` rule. production(:_alternation_2) do |value| if Array(value.last[:concatenation]).first == :alt value.last[:concatenation][1..-1] else [value.last[:concatenation]] end value.last[:concatenation] end # `concatenation::= repetition (c_wsp+ repetition)*` production(:concatenation) do |value| unless value.last[:_concatenation_1].empty? [:seq, value.first[:repetition]] + value.last[:_concatenation_1] else value.first[:repetition] end end start_production(:_concatenation_2, as_hash: true) production(:_concatenation_2) do |value| value[:repetition] end # `repetition ::= repeat? element` production(:repetition) do |value| rept = value.first[:_repetition_1] elt = value.last[:element] case rept when [0, '*'] then [:star, elt] when [1, '*'] then [:plus, elt] when nil then elt else [:rept, rept.first, rept.last, elt] end end # `repeat ::= DIGIT+ | (DIGIT* "*" DIGIT*)` production(:repeat) do |value| if value.is_a?(Integer) [value, value] else [value.first, value.last] end end start_production(:_repeat_1, as_hash: true) production(:_repeat_1) {|value| value.values} production(:_repeat_2) {|value| value.join("").to_i} production(:_repeat_3) {|value| value.join("").to_i} production(:_repeat_4) {|value| value.length > 0 ? value.join("").to_i : '*'} # `element ::= rulename | group | option | char_val | num_val | prose_val` production(:element) do |value| value end # `group ::= "(" c_wsp* alternation c_wsp* ")"` start_production(:group, as_hash: true) production(:group) do |value| value[:alternation] end # `option ::= "[" c_wsp* alternation c_wsp* "]"` start_production(:option, as_hash: true) production(:option) do |value| [:opt, value[:alternation]] end # `case_insensitive_string ::= "%i"? quoted_string` production(:case_insensitive_string) do |value| str = value.last[:quoted_string] if str.match?(/[[:alpha:]]/) # Only need to use case-insensitive if there are alphabetic characters in the string. [:istr, value.last[:quoted_string].tap {|s| s.quote_style = :dquote}] else value.last[:quoted_string].tap {|s| s.quote_style = :dquote} end end # `case_sensitive_string ::= "%s" quoted_string` production(:case_sensitive_string) do |value| value.last[:quoted_string].tap {|s| s.quote_style = :squote} end # `num_val ::= "%" (bin_val | dec_val | hex_val)` production(:num_val) do |value| value.last[:_num_val_1] end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options, &block) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s @parsed_rules = {} # Parses into `@parsed_rules` parse(@input, :rulelist, # Starting rule ABNFMeta::RULES, # PEG rules whitespace: '', # No implicit whitespace **options) end ## # The AST includes the parsed rules along with built-in rules for ABNF used within the parsed grammar. # # @return [Array<EBNF::Rule>] def ast # Add built-in rules for standard ABNF rules not parsed_rules.values.map(&:symbols).flatten.uniq.each do |sym| rule = ABNFCore::RULES.detect {|r| r.sym == sym} parsed_rules[sym] ||= rule if rule end parsed_rules.values end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(ast.map(&:for_sxp)) end private # Generate a combination of seq and string to represent a sequence of characters # # @param [Array<String>] characters # @return [String,Array] def hex_or_string(characters) seq = [:seq] str_result = "" characters.each do |c| if VCHAR.match?(c) str_result << c else if str_result.length > 0 seq << str_result str_result = "" end seq << [:hex, "#x%x" % c.hex] end end seq << str_result if str_result.length > 0 # Either return the sequence, or a string if seq.length == 2 && seq.last.is_a?(String) seq.last else seq end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/calc/calc.rb
examples/calc/calc.rb
# # EBNF Parser for EBNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf' require 'ebnf/terminals' require 'ebnf/peg/parser' require 'sxp' require 'logger' class Calc include EBNF::PEG::Parser # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast ## # The calculator grammar comes from a [Wikipedia entry on Parsing Expression Grammar](https://en.wikipedia.org/wiki/Parsing_expression_grammar#Examples), with some small concessions. # # [1] Expr ::= Sum # [2] Sum ::= Product (('+' | '-') Product)* # [3] Product ::= Power (('*' | '/') Power)* # [4] Power ::= Value ('^' Power)? # [5] Value ::= NUMBER | '(' Expr ')' # [6] NUMBER ::= [0-9]+ # # This, in turn, is turned into S-Expression with sub-rules added for embedded rules, which allow them to be accessed independently: # # ( # (rule Expr "1" (seq Sum)) # (rule Sum "2" (seq Product _Sum_1)) # (rule _Sum_1 "2.1" (star _Sum_2)) # (rule _Sum_2 "2.2" (seq _Sum_3 Product)) # (rule _Sum_3 "2.3" (alt "+" "-")) # (rule Product "3" (seq Power _Product_1)) # (rule _Product_1 "3.1" (star _Product_2)) # (rule _Product_2 "3.2" (seq _Product_3 Power)) # (rule _Product_3 "3.3" (alt "*" "/")) # (rule Power "4" (seq Value _Power_1)) # (rule _Power_1 "4.1" (opt _Power_2)) # (rule _Power_2 "4.2" (seq "^" Power)) # (rule Value "5" (alt NUMBER _Value_1)) # (rule _Value_1 "5.1" (seq "(" Expr ")")) # (terminal NUMBER "6" (plus _NUMBER_1)) # (terminal _NUMBER_1 "6.1" (range "0-9"))) ## # The calculator evaluates values from each rule and applies operators resulting in the calculated result. # [1] Expr := Sum # # (rule Expr "1" (seq Sum)) production(:Expr, clear_packrat: true) do |value| value.first[:Sum] end # [2] Sum := Product (('+' | '-') Product)\* # # (rule Sum "2" (seq Product _Sum_1)) # (rule _Sum_1 "2.1" (star _Sum_2)) production(:Sum, clear_packrat: true) do |value| product, operations = value.first[:Product], value.last[:_Sum_1] # Operations are an array of tuples: [['+', 2], ['-', 3]] operations.inject(product) {|accumulator, vv| accumulator.send(*vv)} end # (('+' | '-') Product)\* # # (rule _Sum_2 "2.2" (seq _Sum_3 Product)) # (rule _Sum_3 "2.3" (alt "+" "-")) # # Turn [{_Sum_3: "+"}, {Product: N}] into ["+" N] production(:_Sum_2) do |value| value.map(&:values).flatten end # [3] Product := Power (('\*' | '/') Power)\* # # (rule Product "3" (seq Power _Product_1)) # (rule _Product_1 "3.1" (star _Product_2)) production(:Product, clear_packrat: true) do |value| power, operations = value.first[:Power], value.last[:_Product_1] # Operations are an array of tuples: [['*', 2], ['/', 3]] operations.inject(power) {|accumulator, vv| accumulator.send(*vv)} end # (('\*' | '/') Power)\* # # (rule _Product_2 "3.2" (seq _Product_3 Power)) # (rule _Product_3 "3.3" (alt "*" "/")) # # Turn [{_Product_3: "*"}, {Power: N}] into ["*" N] production(:_Product_2) do |value| value.map(&:values).flatten end # [4] Power := Value ('^' Power)? # # (rule Power "4" (seq Value _Power_1)) production(:Power, clear_packrat: true) do |value| val, pow = value.first[:Value], value.last[:_Power_1] pow ? val.pow(pow) : val end # ('^' Power)? # # (rule _Power_2 "4.2" (seq "^" Power)) production(:_Power_2) {|value| value.last[:Power]} # [5] Value := [0-9]+ | '(' Expr ')' # # (rule Value "5" (alt NUMBER _Value_1)) # (rule _Value_1 "5.1" (seq "(" Expr ")")) production(:Value, clear_packrat: true) do |value| case value when String then value.to_i when Array then value[1][:Expr] end end # Terminals don't require any special processing, but we could optimize by creating a regular expression such as `/\d+/`. # (terminal NUMBER "6" (plus _NUMBER_1)) # (terminal _NUMBER_1 "6.1" (range "0-9"))) # Instantiate the calculator using the EBNF grammar. # # @param [Hash{Symbol => Object}] options # @option options [Boolean] :trace # Trace level. 0(debug), 1(info), 2(warn), 3(error). def initialize(**options) # Intantiate grammar from ebnf.ebnf ebnf = File.expand_path("../calc.ebnf", __FILE__) # Perform PEG-specific transformation to the associated rules, which will be passed directly to the parser. @rules = EBNF.parse(File.open(ebnf)).make_peg.ast @options = options.dup # If the `trace` option is set, instantiate a logger for collecting trace information. if @options.has_key?(:trace) @options[:logger] = Logger.new(STDERR) @options[:logger].level = @options[:trace] @options[:logger].formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"} end end # Evaluate an expression # # Evaluates each line of input. # # @param [String] input def evaluate(input) result = parse(input, :Expr, @rules, **@options) # This is called for each Expr puts result end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/ebnf-peg-parser/meta.rb
examples/ebnf-peg-parser/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from ../../etc/ebnf.ebnf module EBNFPegMeta RULES = [ EBNF::Rule.new(:ebnf, "1", [:star, :_ebnf_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_ebnf_1, "1.1", [:alt, :declaration, :rule]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:declaration, "2", [:alt, "@terminals", :pass]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rule, "3", [:seq, :LHS, :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:expression, "4", [:seq, :alt]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:alt, "5", [:seq, :seq, :_alt_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_2, "5.2", [:seq, "|", :seq]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:seq, "6", [:plus, :diff]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:diff, "7", [:seq, :postfix, :_diff_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_2, "7.2", [:seq, "-", :postfix]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:postfix, "8", [:seq, :primary, :_postfix_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_postfix_1, "8.1", [:opt, :POSTFIX]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:primary, "9", [:alt, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, :_primary_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_primary_1, "9.1", [:seq, "(", :expression, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:pass, "10", [:seq, "@pass", :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LHS, "11", [:seq, :_LHS_1, :SYMBOL, :_LHS_2, "::="], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_1, "11.1", [:opt, :_LHS_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_3, "11.3", [:seq, "[", :SYMBOL, "]", :_LHS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_4, "11.4", [:plus, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_2, "11.2", [:star, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:SYMBOL, "12", [:plus, :_SYMBOL_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_1, "12.1", [:alt, :_SYMBOL_2, :_SYMBOL_3, :_SYMBOL_4, "_", "."], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_2, "12.2", [:range, "a-z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_3, "12.3", [:range, "A-Z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_4, "12.4", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HEX, "13", [:seq, "#x", :_HEX_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_1, "13.1", [:plus, :_HEX_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_2, "13.2", [:alt, :_HEX_3, :_HEX_4, :_HEX_5], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_3, "13.3", [:range, "a-f"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_4, "13.4", [:range, "A-F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_5, "13.5", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:RANGE, "14", [:seq, "[", :_RANGE_1, :_RANGE_2, :_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_1, "14.1", [:plus, :_RANGE_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_4, "14.4", [:alt, :_RANGE_5, :_RANGE_6, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_5, "14.5", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_6, "14.6", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_2, "14.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_3, "14.3", [:diff, "]", :LHS], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:O_RANGE, "15", [:seq, "[^", :_O_RANGE_1, :_O_RANGE_2, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_1, "15.1", [:plus, :_O_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_3, "15.3", [:alt, :_O_RANGE_4, :_O_RANGE_5, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_4, "15.4", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_5, "15.5", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_2, "15.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING1, "16", [:seq, "\"", :_STRING1_1, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_1, "16.1", [:star, :_STRING1_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_2, "16.2", [:diff, :CHAR, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING2, "17", [:seq, "'", :_STRING2_1, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_1, "17.1", [:star, :_STRING2_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_2, "17.2", [:diff, :CHAR, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CHAR, "18", [:alt, :_CHAR_1, :_CHAR_2, :_CHAR_3, :_CHAR_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_1, "18.1", [:range, "#x9#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_2, "18.2", [:range, "#x20-#xD7FF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_3, "18.3", [:range, "#xE000-#xFFFD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_4, "18.4", [:range, "#x10000-#x10FFFF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:R_CHAR, "19", [:diff, :CHAR, :_R_CHAR_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_R_CHAR_1, "19.1", [:alt, "]", "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:POSTFIX, "20", [:range, "?*+"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:PASS, "21", [:alt, :_PASS_1, :_PASS_2, :_PASS_3, :_PASS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_1, "21.1", [:range, "#x9#xA#xD#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_2, "21.2", [:seq, :_PASS_5, :_PASS_6], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_5, "21.5", [:alt, :_PASS_7, "//"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_7, "21.7", [:diff, "#", "#x"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_6, "21.6", [:star, :_PASS_8], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_8, "21.8", [:range, "^#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_3, "21.3", [:seq, "/*", :_PASS_9, "*/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_9, "21.9", [:star, :_PASS_10], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_10, "21.10", [:alt, :_PASS_11, :_PASS_12], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_11, "21.11", [:opt, :_PASS_13], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_13, "21.13", [:seq, "*", :_PASS_14], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_14, "21.14", [:range, "^/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_12, "21.12", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_4, "21.4", [:seq, "(*", :_PASS_15, "*)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_15, "21.15", [:star, :_PASS_16], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_16, "21.16", [:alt, :_PASS_17, :_PASS_18], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_17, "21.17", [:opt, :_PASS_19], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_19, "21.19", [:seq, "*", :_PASS_20], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_20, "21.20", [:range, "^)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_18, "21.18", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_pass, nil, [:seq, :PASS], kind: :pass).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/examples/ebnf-peg-parser/parser.rb
examples/ebnf-peg-parser/parser.rb
# # EBNF Parser for EBNF. # # Produces an Abstract Synatx Tree in S-Expression form for the input grammar file require 'ebnf' require 'ebnf/terminals' require 'ebnf/peg/parser' require 'meta' require 'sxp' require 'logger' class EBNFPegParser include EBNF::PEG::Parser include EBNF::Terminals # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast # ## Terminals # Define rules for Terminals, placing results on the input stack, making them available to upstream non-Terminal rules. # # Terminals are defined with a symbol matching the associated rule name, and an optional (although strongly encouraged) regular expression used to match the head of the input stream. # # The result of the terminal block is the semantic value of that terminal, which if often a string, but may be any instance which reflects the semantic interpretation of that terminal. # # The `value` parameter is the value matched by the regexp, if defined, or by the sub-terminal rules otherwise. # # The `prod` parameter is the name of the parent rule for which this terminal is matched, which may have a bearing in some circumstances, although not used in this example. # # If no block is provided, then the value which would have been passed to the block is used as the result directly. # Match the Left hand side of a rule or terminal # # [11] LHS ::= ('[' SYMBOL+ ']' ' '+)? SYMBOL ' '* '::=' terminal(:LHS, LHS) do |value, prod| value.to_s.scan(/(?:\[([^\]]+)\])?\s*(\w+)\s*::=/).first end # Match `SYMBOL` terminal # # [12] SYMBOL ::= ([a-z] | [A-Z] | [0-9] | '_' | '.')+ terminal(:SYMBOL, SYMBOL) do |value| value.to_sym end # Match `HEX` terminal # # [13] HEX ::= #x' ([a-f] | [A-F] | [0-9])+ terminal(:HEX, HEX) do |value| [:hex, value] end # Terminal for `RANGE` is matched as part of a `primary` rule. # # [14] `RANGE` ::= '[' (R_CHAR '-' R_CHAR) | (HEX '-' HEX) ']' terminal(:RANGE, RANGE) do |value| [:range, value[1..-2]] end # Terminal for `O_RANGE` is matched as part of a `primary` rule. # # [15] O_RANGE ::= '[^' (R_CHAR '-' R_CHAR) | (HEX '-' HEX) ']' terminal(:O_RANGE, O_RANGE) do |value| [:range, value[1..-2]] end # Match double quote string # # [16] STRING1 ::= '"' (CHAR - '"')* '"' terminal(:STRING1, STRING1) do |value| value[1..-2].tap {|s| s.quote_style = :dquote} end # Match single quote string # # [17] STRING2 ::= "'" (CHAR - "'")* "'" terminal(:STRING2, STRING2) do |value| value[1..-2].tap {|s| s.quote_style = :squote} end # The `CHAR` and `R_CHAR` productions are not used explicitly # Match `POSTFIX` terminal # # [20] POSTFIX ::= [?*+] terminal(:POSTFIX, POSTFIX) # The `PASS` productions is not used explicitly # ## Non-terminal productions # Define productions for non-Termainals. This can include `start_production` as well as `production` to hook into rule start and end. In some cases, we need to use sub-productions as generated when turning EBNF into PEG. # # Productions are defined with a symbol matching the associated rule name. # # The result of the productions is typically the abstract syntax tree matched by the rule, so far, but could be a specific semantic value, or could be ignored with the result being returned via the `callback`. # # The `value` parameter is the result returned from child productions # # The `data` parameter other data which may be returned by child productions placing information onto their input (unused in this example). # # The `callback` parameter provides access to a callback defined in the call to `parse`). # Production for end of `declaration` non-terminal. # # Look for `@terminals` to change parser state to parsing terminals. # # Clears the packrat parser when called. # # `@pass` is ignored here. # # [2] declaration ::= '@terminals' | pass production(:declaration, clear_packrat: true) do |value, data, callback| # value contains a declaration. # Invoke callback callback.call(:terminals) if value == '@terminals' nil end # Production for end of `rule` non-terminal. # # By setting `as_hash: true` in the `start_production`, the `value` parameter will be in the form `{LHS: "v", expression: "v"}`. Otherwise, it would be expressed using an array of hashes of the form `[{LHS: "v"}, {expression: "v"}]`. # # Clears the packrat parser when called. # # Create rule from expression value and pass to callback # # [3] rule ::= LHS expression start_production(:rule, as_hash: true) production(:rule, clear_packrat: true) do |value, data, callback| # value contains an expression. # Invoke callback id, sym = value[:LHS] expression = value[:expression] callback.call(:rule, EBNF::Rule.new(sym.to_sym, id, expression)) nil end # Production for end of `expression` non-terminal. # Passes through the optimized value of the alt production as follows: # # The `value` parameter, is of the form `[{alt: "v"}]`. # # [:alt foo] => foo # [:alt foo bar] => [:alt foo bar] # # [4] expression ::= alt production(:expression) do |value| value.first[:alt] end # Production for end of `alt` non-terminal. # Passes through the optimized value of the seq production as follows: # # The `value` parameter, is of the form `{seq: "v", _alt_1: "v"}`. # # [:seq foo] => foo # [:seq foo bar] => [:seq foo bar] # # Note that this also may just pass through from `_alt_1` # # [5] alt ::= seq ('|' seq)* start_production(:alt, as_hash: true) production(:alt) do |value| if value[:_alt_1].length > 0 [:alt, value[:seq]] + value[:_alt_1] else value[:seq] end end # Production for end of `_alt_1` non-terminal. # Used to collect the `('|' seq)*` portion of the `alt` non-terminal: # # The `value` parameter, is of the form `[{seq: ["v"]}]`. # # [5] _alt_1 ::= ('|' seq)* production(:_alt_1) do |value| value.map {|a1| a1.last[:seq]}.compact # Get rid of '|' end # Production for end of `seq` non-terminal. # Passes through the optimized value of the `diff` production as follows: # # The `value` parameter, is an array of values, which cannot be empty. # # [:diff foo] => foo # [:diff foo bar] => [:diff foo bar] # # Note that this also may just pass through from `_seq_1` # # [6] seq ::= diff+ production(:seq) do |value| value.length == 1 ? value.first : ([:seq] + value) end # `Diff` production returns concatenated postfix values # # The `value` parameter, is of the form `{postfix: "v", _diff_1: "v"}`. # # [7] diff ::= postfix ('-' postfix)? start_production(:diff, as_hash: true) production(:diff) do |value| if value[:_diff_1] [:diff, value[:postfix], value[:_diff_1]] else value[:postfix] end end production(:_diff_1) do |value| value.last[:postfix] if value end # Production for end of `postfix` non-terminal. # Either returns the `primary` production value, or as modified by the `postfix`. # # The `value` parameter, is of the form `{primary: "v", _postfix_1: "v"}`. # # [:primary] => [:primary] # [:primary, '*'] => [:star, :primary] # [:primary, '+'] => [:plus, :primary] # [:primary, '?'] => [:opt, :primary] # # [8] postfix ::= primary POSTFIX? start_production(:postfix, as_hash: true) production(:postfix) do |value| # Push result onto input stack, as the `diff` production can have some number of `postfix` values that are applied recursively case value[:_postfix_1] when "*" then [:star, value[:primary]] when "+" then [:plus, value[:primary]] when "?" then [:opt, value[:primary]] else value[:primary] end end # Production for end of `primary` non-terminal. # Places `:primary` on the stack # # The `value` parameter, is either a string (for a terminal) or an array of the form `['(': '(', expression: "v", ')', ')']`. # # This may either be a terminal, or the result of an `expression`. # # [9] primary ::= HEX # | SYMBOL # | RANGE # | ENUM # | O_RANGE # | O_ENUM # | STRING1 # | STRING2 # | '(' expression ')' production(:primary) do |value| Array(value).length > 2 ? value[1][:expression] : value end # Production for end of pass non-terminal. # # [10] pass ::= '@pass' expression production(:pass) do |value, data, callback| # Invoke callback callback.call(:pass, value.last[:expression]) end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options, &block) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, :ebnf, EBNFPegMeta::RULES, # Use an optimized Regexp for whitespace whitespace: EBNF::Terminals::PASS, **options ) do |context, *data| rule = case context when :terminals # After parsing `@terminals` # This changes the state of the parser to treat subsequent rules as terminals. parsing_terminals = true rule = EBNF::Rule.new(nil, nil, data.first, kind: :terminals) when :pass # After parsing `@pass` # This defines a specific rule for whitespace. rule = EBNF::Rule.new(nil, nil, data.first, kind: :pass) when :rule # A rule which has already been turned into a `Rule` object. rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end @ast end # Output formatted S-Expression of grammar # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) # Output rules as a formatted S-Expression SXP::Generator.string(@ast.map(&:for_sxp)) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf.rb
lib/ebnf.rb
require 'sxp' unless defined?(SXP) module EBNF autoload :ABNF, "ebnf/abnf" autoload :Base, "ebnf/base" autoload :BNF, "ebnf/bnf" autoload :ISOEBNF, "ebnf/isoebnf" autoload :LL1, "ebnf/ll1" autoload :Native, "ebnf/native" autoload :Parser, "ebnf/parser" autoload :PEG, "ebnf/peg" autoload :Rule, "ebnf/rule" autoload :Terminals,"ebnf/terminals" autoload :Unescape, "ebnf/unescape" autoload :Writer, "ebnf/writer" autoload :VERSION, "ebnf/version" ## # Parse the given EBNF `query` input. # # @example # ebnf = EBNF.parse(input) # # @param [#read, String, #to_s] input # @param [Hash{Symbol => Object}] options # @return [EBNF::Base] # @raise [Exception] on invalid input def self.parse(input, **options) ::EBNF::Base.new(input, **options) end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/version.rb
lib/ebnf/version.rb
module EBNF module VERSION VERSION_FILE = File.join(File.expand_path(File.dirname(__FILE__)), "..", "..", "VERSION") MAJOR, MINOR, TINY, EXTRA = File.read(VERSION_FILE).chomp.split(".") STRING = [MAJOR, MINOR, TINY, EXTRA].compact.join('.') ## # @return [String] def self.to_s() STRING end ## # @return [String] def self.to_str() STRING end ## # @return [Array(Integer, Integer, Integer)] def self.to_a() [MAJOR, MINOR, TINY] end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/peg.rb
lib/ebnf/peg.rb
module EBNF module PEG autoload :Parser, 'ebnf/peg/parser' autoload :Rule, 'ebnf/peg/rule' ## # Transform EBNF Rule set for PEG parsing: # # * Transform each rule into a set of sub-rules extracting unnamed sequences into new rules, using {Rule#to_peg}. # @return [ENBF] self def make_peg progress("make_peg") {"Start: #{@ast.length} rules"} new_ast = [] ast.each do |rule| debug("make_peg") {"expand from: #{rule.inspect}"} new_rules = rule.to_peg debug(" => ") {new_rules.map(&:sym).join(', ')} new_ast += new_rules end @ast = new_ast progress("make_peg") {"End: #{@ast.length} rules"} self end ## # Output Ruby parser files for PEG parsing # # @param [IO, StringIO] output def to_ruby_peg(output, **options) output.puts " RULES = [" ast.each do |rule| output.puts " " + rule.to_ruby + (rule.is_a?(EBNF::PEG::Rule) ? '.extend(EBNF::PEG::Rule)' : '') + ',' end output.puts " ]" end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/abnf.rb
lib/ebnf/abnf.rb
require_relative 'abnf/core' require_relative 'abnf/meta' require 'logger' # ABNF parser # Parses ABNF into an array of {EBNF::Rule}. module EBNF class ABNF include EBNF::PEG::Parser # Regular expressions for both "Core" and ABNF-specific terminals. ALPHA = %r{[\x41-\x5A\x61-\x7A]} VCHAR = %r{[\x20-\x7E]} WSP = %r{[\x20\x09]} CRLF = %r{\x0D?\x0A} COMMENT = %r{;(?:#{WSP}|#{VCHAR})*#{CRLF}} C_NL = %r{#{COMMENT}|#{CRLF}} C_WSP = %r{#{WSP}|(?:#{C_NL}#{WSP})} ## # Hash of generated {EBNF::Rule} objects by symbol # # @return [Hash{Symbol => EBNF::Rule}] attr_reader :parsed_rules ## # The following ABNF grammar rules are treated as terminals. # `rulename ::= ALPHA (ALPHA | DIGIT | "-")*` terminal(:rulename, /#{ALPHA}(?:#{ALPHA}|[0-9-])*/) do |value| value.to_sym end # `defined_as ::= c_wsp* ("=" | "=/") c_wsp*` terminal(:defined_as, /#{C_WSP}*=\/?#{C_WSP}*/) {|value| value.strip} # `quoted_string::= DQUOTE [#x20-#x21#x23-#x7E]* DQUOTE` terminal(:quoted_string, /"[\x20-\x21\x23-\x7E]*"/) do |value| value[1..-2] end # `bin_val ::= "b" BIT+ (("." BIT+)+ | ("-" BIT+))?` terminal(:bin_val, /b[01]+(?:(?:(?:\.[01]+)+)|(?:-[01]+))?/) do |value| if value.include?('.') # Interpret segments in binary creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i(base=2).chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|b| "#x%x" % b.to_i(base=2)}.join("-")] else # Interpret as a single HEX character [:hex, "#x%x" % value[1..-1].to_i(base=2)] end end # `dec_val ::= "d" DIGIT+ (("." DIGIT+)+ | ("-" DIGIT+))?` terminal(:dec_val, /d[0-9]+(?:(?:(?:\.[0-9]+)+)|(?:-[0-9]+))?/) do |value| if value.include?('.') # Interpret segments in decimal creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i.chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|d| "#x%x" % d.to_i}.join("-")] else # Interpret as a single HEX character [:hex, "#x%x" % value[1..-1].to_i] end end # `hex_val ::= "x" HEXDIG+ (("." HEXDIG+)+ | ("-" HEXDIG+))?` terminal(:hex_val, /x[0-9A-F]+(?:(?:(?:\.[0-9A-F]+)+)|(?:-[0-9A-F]+))?/i) do |value| if value.include?('.') # Interpret segments in hexadecimal creating a sequence of hex characters or a string hex_or_string(value[1..-1].split('.').map {|b| b.to_i(base=16).chr(Encoding::UTF_8)}) elsif value.include?('-') # Interpret as a range [:range, value[1..-1].split('-').map {|h| "#x%x" % h.to_i(base=16)}.join("-")] else # Interpret as a single HEX character [:hex, "#x#{value[1..-1]}"] end end # `c_wsp ::= WSP | (c_nl WSP)` terminal(:c_wsp, C_WSP) # `c_nl ::= comment | CRLF` terminal(:c_nl, C_NL) # `DIGIT ::= [#x30-#x39]` terminal(:DIGIT, /\d/) # ## Non-terminal productions # The `start_production` on `:rule` allows the parser to present the value as a single Hash, rather than an array of individual hashes. start_production(:rule, as_hash: true) # `rule ::= rulename defined_as elements c_nl` production(:rule) do |value| # value contains an expression. # Invoke callback sym = value[:rulename] elements = value[:elements] if value[:defined_as] == "=/" # append to rule alternate rule = parsed_rules.fetch(sym) {raise "No existing rule found for #{sym}"} rule.expr = [:alt, rule.expr] unless rule.alt? if elements.is_a?(Array) && elements.first == :alt # append alternatives to rule rule.expr.concat(elements[1..-1]) else # add elements as last alternative rule.expr.push(elements) end else # There shouldn't be an existing rule raise "Redefining rule #{sym}" if parsed_rules.has_key?(sym) parsed_rules[sym] = EBNF::Rule.new(sym.to_sym, nil, elements) end progress(:rule, level: 2) {parsed_rules[sym].to_sxp} sym end # `elements ::= alternation c_wsp*` production(:elements) do |value| value.first[:alternation] end # `alternation ::= concatenation (c_wsp* "/" c_wsp* concatenation)*` production(:alternation) do |value| unless value.last[:_alternation_1].empty? [:alt, value.first[:concatenation]] + value.last[:_alternation_1] else value.first[:concatenation] end end # The `_aleteration_2` rule comes from the expanded PEG grammar and serves as an opportunity to custommize the values presented to the `aleteration` rule. production(:_alternation_2) do |value| if Array(value.last[:concatenation]).first == :alt value.last[:concatenation][1..-1] else [value.last[:concatenation]] end value.last[:concatenation] end # `concatenation::= repetition (c_wsp+ repetition)*` production(:concatenation) do |value| unless value.last[:_concatenation_1].empty? [:seq, value.first[:repetition]] + value.last[:_concatenation_1] else value.first[:repetition] end end start_production(:_concatenation_2, as_hash: true) production(:_concatenation_2) do |value| value[:repetition] end # `repetition ::= repeat? element` production(:repetition) do |value| rept = value.first[:_repetition_1] elt = value.last[:element] case rept when [0, '*'] then [:star, elt] when [1, '*'] then [:plus, elt] when nil then elt else [:rept, rept.first, rept.last, elt] end end # `repeat ::= DIGIT+ | (DIGIT* "*" DIGIT*)` production(:repeat) do |value| if value.is_a?(Integer) [value, value] else [value.first, value.last] end end start_production(:_repeat_1, as_hash: true) production(:_repeat_1) {|value| value.values} production(:_repeat_2) {|value| value.join("").to_i} production(:_repeat_3) {|value| value.join("").to_i} production(:_repeat_4) {|value| value.length > 0 ? value.join("").to_i : '*'} # `element ::= rulename | group | option | char_val | num_val | prose_val` production(:element) do |value| value end # `group ::= "(" c_wsp* alternation c_wsp* ")"` start_production(:group, as_hash: true) production(:group) do |value| value[:alternation] end # `option ::= "[" c_wsp* alternation c_wsp* "]"` start_production(:option, as_hash: true) production(:option) do |value| [:opt, value[:alternation]] end # `case_insensitive_string ::= "%i"? quoted_string` production(:case_insensitive_string) do |value| str = value.last[:quoted_string] if str.match?(/[[:alpha:]]/) # Only need to use case-insensitive if there are alphabetic characters in the string. [:istr, value.last[:quoted_string].tap {|s| s.quote_style = :dquote}] else value.last[:quoted_string].tap {|s| s.quote_style = :dquote} end end # `case_sensitive_string ::= "%s" quoted_string` production(:case_sensitive_string) do |value| value.last[:quoted_string].tap {|s| s.quote_style = :squote} end # `num_val ::= "%" (bin_val | dec_val | hex_val)` production(:num_val) do |value| value.last[:_num_val_1] end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s @parsed_rules = {} # Parses into `@parsed_rules` parse(@input, :rulelist, # Starting rule ABNFMeta::RULES, # PEG rules whitespace: '', # No implicit whitespace **options) rescue EBNF::PEG::Parser::Error => e raise SyntaxError, e.message end ## # The AST includes the parsed rules along with built-in rules for ABNF used within the parsed grammar. # # @return [Array<EBNF::Rule>] def ast # Add built-in rules for standard ABNF rules not parsed_rules.values.map(&:symbols).flatten.uniq.each do |sym| rule = ABNFCore::RULES.detect {|r| r.sym == sym} parsed_rules[sym] ||= rule if rule end parsed_rules.values end private # Generate a combination of seq and string to represent a sequence of characters # # @param [Array<String>] characters # @return [String,Array] def hex_or_string(characters) seq = [:seq] str_result = "" characters.each do |c| if VCHAR.match?(c) str_result << c else if str_result.length > 0 seq << str_result str_result = "" end seq << [:hex, "#x%x" % c.codepoints.first] end end seq << str_result if str_result.length > 0 # Either return the sequence, or a string if seq.length == 2 && seq.last.is_a?(String) seq.last else seq end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/parser.rb
lib/ebnf/parser.rb
require_relative 'ebnf/meta' require 'logger' module EBNF class Parser include EBNF::PEG::Parser include EBNF::Terminals # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast # Set on first rule attr_reader :lhs_includes_identifier # Regular expression to match a [...] range, which may be distinguisehd from an LHS attr_reader :range # ## Terminals # Define rules for Terminals, placing results on the input stack, making them available to upstream non-Terminal rules. # # Terminals are defined with a symbol matching the associated rule name, and an optional (although strongly encouraged) regular expression used to match the head of the input stream. # # The result of the terminal block is the semantic value of that terminal, which if often a string, but may be any instance which reflects the semantic interpretation of that terminal. # # The `value` parameter is the value matched by the regexp, if defined, or by the sub-terminal rules otherwise. # # The `prod` parameter is the name of the parent rule for which this terminal is matched, which may have a bearing in some circumstances, although not used in this example. # # If no block is provided, then the value which would have been passed to the block is used as the result directly. # Match the Left hand side of a rule or terminal # # [11] LHS ::= ('[' SYMBOL+ ']' ' '+)? <? SYMBOL >? ' '* '::=' terminal(:LHS, LHS) do |value, prod| md = value.to_s.scan(/(?:\[([^\]]+)\])?\s*<?(\w+)>?\s*::=/).first if @lhs_includes_identifier.nil? @lhs_includes_identifier = !md[0].nil? @range = md[0] ? RANGE_NOT_LHS : RANGE elsif @lhs_includes_identifier && !md[0] error("LHS", "Rule does not begin with a [xxx] identifier, which was established on the first rule", production: :LHS, rest: value) elsif !@lhs_includes_identifier && md[0] error("LHS", "Rule begins with a [xxx] identifier, which was not established on the first rule", production: :LHS, rest: value) end md end # Match `SYMBOL` terminal # # [12] SYMBOL ::= '<' O_SYMBOL '>' | O_SYMBOL # [12a] O_SYMBOL ::= ([a-z] | [A-Z] | [0-9] | '_' | '.')+ terminal(:SYMBOL, SYMBOL) do |value| value = value[1..-2] if value.start_with?('<') && value.end_with?('>') value.to_sym end # Match `HEX` terminal # # [13] HEX ::= #x' ([a-f] | [A-F] | [0-9])+ terminal(:HEX, HEX) do |value| [:hex, value] end # Terminal for `RANGE` is matched as part of a `primary` rule. # Note that this won't match if rules include identifiers. # # [14] RANGE ::= '[' ((R_CHAR '-' R_CHAR) | (HEX '-' HEX) | R_CHAR | HEX)+ '-'? ']' terminal(:RANGE, proc {@range}) do |value| [:range, value[1..-2]] end # Terminal for `O_RANGE` is matched as part of a `primary` rule. # # [15] O_RANGE ::= '[^' ((R_CHAR '-' R_CHAR) | (HEX '-' HEX) | R_CHAR | HEX)+ '-'? ']' terminal(:O_RANGE, O_RANGE) do |value| [:range, value[1..-2]] end # Match double quote string # # [16] STRING1 ::= '"' (CHAR - '"')* '"' terminal(:STRING1, STRING1) do |value| using ::EBNF value[1..-2].tap {|s| s.quote_style = :dquote} end # Match single quote string # # [17] STRING2 ::= "'" (CHAR - "'")* "'" terminal(:STRING2, STRING2) do |value| using ::EBNF value[1..-2].tap {|s| s.quote_style = :squote} end # The `CHAR` and `R_CHAR` productions are not used explicitly # Match `POSTFIX` terminal # # [20] POSTFIX ::= [?*+] terminal(:POSTFIX, POSTFIX) # The `PASS` productions is not used explicitly # ## Non-terminal productions # Define productions for non-Termainals. This can include `start_production` as well as `production` to hook into rule start and end. In some cases, we need to use sub-productions as generated when turning EBNF into PEG. # # Productions are defined with a symbol matching the associated rule name. # # The result of the productions is typically the abstract syntax tree matched by the rule, so far, but could be a specific semantic value, or could be ignored with the result being returned via the `callback`. # # The `value` parameter is the result returned from child productions # # The `data` parameter other data which may be returned by child productions placing information onto their input (unused in this example). # # The `callback` parameter provides access to a callback defined in the call to `parse`). # Production for end of `declaration` non-terminal. # # Look for `@terminals` to change parser state to parsing terminals. # # Clears the packrat parser when called. # # `@pass` is ignored here. # # [2] declaration ::= '@terminals' | pass production(:declaration, clear_packrat: true) do |value, data, callback| # value contains a declaration. # Invoke callback callback.call(:terminals) if value == '@terminals' nil end # Production for end of `rule` non-terminal. # # By setting `as_hash: true` in the `start_production`, the `value` parameter will be in the form `{LHS: "v", expression: "v"}`. Otherwise, it would be expressed using an array of hashes of the form `[{LHS: "v"}, {expression: "v"}]`. # # Clears the packrat parser when called. # # Create rule from expression value and pass to callback # # [3] rule ::= LHS expression start_production(:rule, as_hash: true) production(:rule, clear_packrat: true) do |value, data, callback| # value contains an expression. # Invoke callback id, sym = value[:LHS] expression = value[:expression] rule = EBNF::Rule.new(sym.to_sym, id, expression) progress(:rule, rule.to_sxp) callback.call(:rule, rule) nil end # Production for end of `expression` non-terminal. # Passes through the optimized value of the alt production as follows: # # The `value` parameter, is of the form `[{alt: "v"}]`. # # [:alt foo] => foo # [:alt foo bar] => [:alt foo bar] # # [4] expression ::= alt production(:expression) do |value| value.first[:alt] end # Production for end of `alt` non-terminal. # Passes through the optimized value of the seq production as follows: # # The `value` parameter, is of the form `{seq: "v", _alt_1: "v"}`. # # [:seq foo] => foo # [:seq foo bar] => [:seq foo bar] # # Note that this also may just pass through from `_alt_1` # # [5] alt ::= seq ('|' seq)* start_production(:alt, as_hash: true) production(:alt) do |value| if value[:_alt_1].length > 0 [:alt, value[:seq]] + value[:_alt_1] else value[:seq] end end # Production for end of `_alt_1` non-terminal. # Used to collect the `('|' seq)*` portion of the `alt` non-terminal: # # The `value` parameter, is of the form `[{seq: ["v"]}]`. # # [5] _alt_1 ::= ('|' seq)* production(:_alt_1) do |value| value.map {|a1| a1.last[:seq]}.compact # Get rid of '|' end # Production for end of `seq` non-terminal. # Passes through the optimized value of the `diff` production as follows: # # The `value` parameter, is an array of values, which cannot be empty. # # [:diff foo] => foo # [:diff foo bar] => [:diff foo bar] # # Note that this also may just pass through from `_seq_1` # # [6] seq ::= diff+ production(:seq) do |value| value.length == 1 ? value.first : ([:seq] + value) end # `Diff` production returns concatenated postfix values # # The `value` parameter, is of the form `{postfix: "v", _diff_1: "v"}`. # # [7] diff ::= postfix ('-' postfix)? start_production(:diff, as_hash: true) production(:diff) do |value| if value[:_diff_1] [:diff, value[:postfix], value[:_diff_1]] else value[:postfix] end end production(:_diff_1) do |value| value.last[:postfix] if value end # Production for end of `postfix` non-terminal. # Either returns the `primary` production value, or as modified by the `postfix`. # # The `value` parameter, is of the form `{primary: "v", _postfix_1: "v"}`. # # [:primary] => [:primary] # [:primary, '*'] => [:star, :primary] # [:primary, '+'] => [:plus, :primary] # [:primary, '?'] => [:opt, :primary] # # [8] postfix ::= primary POSTFIX? start_production(:postfix, as_hash: true) production(:postfix) do |value| # Push result onto input stack, as the `diff` production can have some number of `postfix` values that are applied recursively case value[:_postfix_1] when "*" then [:star, value[:primary]] when "+" then [:plus, value[:primary]] when "?" then [:opt, value[:primary]] else value[:primary] end end # Production for end of `primary` non-terminal. # Places `:primary` on the stack # # The `value` parameter, is either a string (for a terminal) or an array of the form `['(': '(', expression: "v", ')', ')']`. # # This may either be a terminal, or the result of an `expression`. # # [9] primary ::= HEX # | SYMBOL # | RANGE # | O_RANGE # | STRING1 # | STRING2 # | '(' expression ')' production(:primary) do |value| Array(value).length > 2 ? value[1][:expression] : value end # Production for end of pass non-terminal. # # [10] pass ::= '@pass' expression production(:pass) do |value, data, callback| # Invoke callback callback.call(:pass, value.last[:expression]) end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options, &block) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # This is established on the first rule. self.class.instance_variable_set(:@lhs_includes_identifier, nil) # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, :ebnf, EBNFMeta::RULES, # Use an optimized Regexp for whitespace whitespace: EBNF::Terminals::PASS, **options ) do |context, *data| rule = case context when :terminals # After parsing `@terminals` # This changes the state of the parser to treat subsequent rules as terminals. parsing_terminals = true rule = EBNF::Rule.new(nil, nil, data.first, kind: :terminals) when :pass # After parsing `@pass` # This defines a specific rule for whitespace. rule = EBNF::Rule.new(nil, nil, data.first, kind: :pass) when :rule # A rule which has already been turned into a `Rule` object. rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end rescue EBNF::PEG::Parser::Error => e raise SyntaxError, e.message end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/base.rb
lib/ebnf/base.rb
require 'strscan' require 'sxp' unless defined?(SXP) # Extended Bakus-Nour Form (EBNF), being the W3C variation is # originaly defined in the # [W3C XML 1.0 Spec](https://www.w3.org/TR/REC-xml/#sec-notation). # # This version attempts to be less strict than the strict definition # to allow for coloquial variations (such as in the Turtle syntax). # # A rule takes the following form: # \[1\] symbol ::= expression # # Comments include the content between '/*' and '*/' # # @see https://www.w3.org/2000/10/swap/grammar/ebnf2turtle.py # @see https://www.w3.org/2000/10/swap/grammar/ebnf2bnf.n3 # # Based on bnf2turtle by Dan Connolly. # # Motivation # ---------- # # Many specifications include grammars that look formal but are not # actually checked, by machine, against test data sets. Debugging the # grammar in the XML specification has been a long, tedious manual # process. Only when the loop is closed between a fully formal grammar # and a large test data set can we be confident that we have an accurate # specification of a language (and even then, only the syntax of the language). # # # The grammar in the [N3 design note][] has evolved based on the original # manual transcription into a python recursive-descent parser and # subsequent development of test cases. Rather than maintain the grammar # and the parser independently, our [goal] is to formalize the language # syntax sufficiently to replace the manual implementation with one # derived mechanically from the specification. # # # [N3 design note]: https://www.w3.org/DesignIssues/Notation3 # # Related Work # ------------ # # Sean Palmer's [n3p announcement][] demonstrated the feasibility of the # approach, though that work did not cover some aspects of N3. # # In development of the [SPARQL specification][], Eric Prud'hommeaux # developed [Yacker][], which converts EBNF syntax to perl and C and C++ # yacc grammars. It includes an interactive facility for checking # strings against the resulting grammars. # Yosi Scharf used it in [cwm Release 1.1.0rc1][], which includes # a SPAQRL parser that is *almost* completely mechanically generated. # # The N3/turtle output from yacker is lower level than the EBNF notation # from the XML specification; it has the ?, +, and * operators compiled # down to pure context-free rules, obscuring the grammar # structure. Since that transformation is straightforwardly expressed in # semantic web rules (see [bnf-rules.n3][]), it seems best to keep the RDF # expression of the grammar in terms of the higher level EBNF # constructs. # # [goal]: https://www.w3.org/2002/02/mid/1086902566.21030.1479.camel@dirk;list=public-cwm-bugs # [n3p announcement]: https://lists.w3.org/Archives/Public/public-cwm-talk/2004OctDec/0029.html # [Yacker]: https://rubygems/02/26-modules/User/Yacker # [SPARQL specification]: https://www.w3.org/TR/rdf-sparql-query/ # [Cwm Release 1.1.0rc1]: https://lists.w3.org/Archives/Public/public-cwm-announce/2005JulSep/0000.html # [bnf-rules.n3]: https://www.w3.org/2000/10/swap/grammar/bnf-rules.n3 # # [swap/grammar/bnf]: https://www.w3.org/2000/10/swap/grammar/bnf # [bnf2html.n3]: https://www.w3.org/2000/10/swap/grammar/bnf2html.n3 # # Background # ---------- # # The [N3 Primer] by Tim Berners-Lee introduces RDF and the Semantic # web using N3, a teaching and scribbling language. Turtle is a subset # of N3 that maps directly to (and from) the standard XML syntax for # RDF. # # [N3 Primer]: https://www.w3.org/2000/10/swap/Primer.html # # @author Gregg Kellogg module EBNF class Base include BNF include LL1 include Native include PEG # Abstract syntax tree from parse # # @return [Array<Rule>] attr_reader :ast # Grammar errors, or errors found genering parse tables # # @return [Array<String>] attr_accessor :errors # Parse the string or file input generating an abstract syntax tree # in S-Expressions (similar to SPARQL SSE) # # @param [#read, #to_s] input # @param [Symbol] format (:ebnf) # Format of input, one of `:abnf`, `:ebnf`, `:isoebnf`, `:isoebnf`, `:native`, or `:sxp`. # Use `:native` for the native EBNF parser, rather than the PEG parser. # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @option options [Boolean, Array] :validate # Validate resulting grammar. def initialize(input, format: :ebnf, **options) @options = options.dup @lineno, @depth, @errors = 1, 0, [] @ast = [] input = input.respond_to?(:read) ? input.read : input.to_s case format when :abnf abnf = ABNF.new(input, **options) @ast = abnf.ast when :ebnf ebnf = Parser.new(input, **options) @ast = ebnf.ast when :isoebnf iso = ISOEBNF.new(input, **options) @ast = iso.ast when :native terminals = false scanner = StringScanner.new(input) eachRule(scanner) do |r| debug("rule string") {r.inspect} case r when /^@terminals/ # Switch mode to parsing terminals terminals = true rule = Rule.new(nil, nil, nil, kind: :terminals, ebnf: self) @ast << rule when /^@pass\s*(.*)$/m expr = expression($1).first rule = Rule.new(nil, nil, expr, kind: :pass, ebnf: self) rule.orig = expr @ast << rule else rule = depth {ruleParts(r)} rule.kind = :terminal if terminals # Override after we've parsed @terminals rule.orig = r @ast << rule end end when :sxp @ast = SXP::Reader::Basic.read(input).map {|e| Rule.from_sxp(e)} else raise "unknown input format #{format.inspect}" end validate! if @options[:validate] end ## # Validate the grammar. # # Makes sure that rules reference either strings or other defined rules. # # @raise [RangeError] def validate! ast.each do |rule| begin rule.validate!(@ast) rescue SyntaxError => e error("In rule #{rule.sym}: #{e.message}") end end raise SyntaxError, errors.join("\n") unless errors.empty? end ## # Is the grammar valid? # # Uses `#validate!` and catches `RangeError` # # @return [Boolean] def valid? validate! true rescue SyntaxError false end # Iterate over each rule or terminal, except empty # @param [:termina, :rule] kind # @yield rule # @yieldparam [Rule] rule def each(kind, &block) ast.each {|r| block.call(r) if r.kind == kind && r.sym != :_empty} end ## # Write out parsed syntax string as an S-Expression # # @return [String] def to_sxp(**options) require 'sxp' unless defined?(SXP) SXP::Generator.string(ast.map(&:for_sxp)) end ## # Output formatted EBNF # # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @return [String] def to_s(format: :ebnf) Writer.string(*ast, format: format) end ## # Output formatted EBNF as HTML # # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @param [Boolean] validate (false) validate generated HTML. # @return [String] def to_html(format: :ebnf, validate: false) Writer.html(*ast, format: format, validate: validate) end ## # Output Ruby parser files # # @param [IO, StringIO] output # @param [String] grammarFile # @param [String] mod_name ('Meta') def to_ruby(output = $stdout, grammarFile: nil, mod_name: 'Meta', **options) unless output == $stdout output.puts "# This file is automatically generated by ebnf version #{EBNF::VERSION}" output.puts "# Derived from #{grammarFile}" if grammarFile unless self.errors.empty? output.puts "# Note, grammar has errors, may need to be resolved manually:" #output.puts "# #{pp.conflicts.map{|c| c.join("\n# ")}.join("\n# ")}" end output.puts "module #{mod_name}" output.puts " START = #{self.start.inspect}\n" if self.start end # Either output LL(1) BRANCH tables or rules for PEG parsing if ast.first.first to_ruby_ll1(output) else to_ruby_peg(output) end unless output == $stdout output.puts "end" end end ## # Renumber, rule identifiers def renumber! ast.each_with_index do |rule, index| rule.id = (index + 1).to_s end end ## # Write out syntax tree as Turtle # @param [String] prefix for language # @param [String] ns URI for language # @return [String] def to_ttl(prefix = nil, ns = "http://example.org/") unless ast.empty? [ "@prefix dc: <http://purl.org/dc/terms/>.", "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>.", "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#>.", ("@prefix #{prefix}: <#{ns}>." if prefix), "@prefix : <#{ns}>.", "@prefix re: <http://www.w3.org/2000/10/swap/grammar/regex#>.", "@prefix g: <http://www.w3.org/2000/10/swap/grammar/ebnf#>.", "", ":language rdfs:isDefinedBy <>; g:start :#{ast.first.id}.", "", ].compact end.join("\n") + ast.map(&:to_ttl).join("\n") end def dup new_obj = super new_obj.instance_variable_set(:@ast, @ast.dup) new_obj end ## # Find a rule given a symbol # @param [Symbol] sym # @return [Rule] def find_rule(sym) (@find ||= {})[sym] ||= ast.detect {|r| r.sym == sym} end def depth @depth += 1 ret = yield @depth -= 1 ret end # Progress output, less than debugging def progress(*args, **options) debug(*args, level: Logger::INFO, **options) end # Error output def error(*args, **options) depth = options[:depth] || @depth args << yield if block_given? message = "#{args.join(': ')}" debug(message, level: Logger::ERROR, **options) @errors << message $stderr.puts(message) end ## # Progress output when debugging # # @overload debug(node, message) # @param [String] node relative location in input # @param [String] message ("") # # @overload debug(message) # @param [String] message ("") # # @yieldreturn [String] added to message def debug(*args, level: Logger::DEBUG, **options) return unless @options.key?(:logger) depth = options[:depth] || @depth args << yield if block_given? message = "#{args.join(': ')}" str = "[#{@lineno}]#{' ' * depth}#{message}" if @options[:logger].respond_to?(:add) @options[:logger].add(level, str) elsif @options[:logger].respond_to?(:<<) @options[:logger] << "[#{lineno}] " + str end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/writer.rb
lib/ebnf/writer.rb
# -*- encoding: utf-8 -*- require 'rdf' require 'strscan' unless defined?(StringScanner) require "ostruct" require 'unicode/types' ## # Serialize ruleset back to EBNF module EBNF class Writer LINE_LENGTH = 80 LINE_LENGTH_HTML = 200 # UNICODE escape names # From https://en.wikipedia.org/wiki/List_of_Unicode_characters UNICODE_ESCAPE_NAMES = { 0x00 => 'null', 0x01 => 'start of heading', 0x02 => 'start of text', 0x03 => 'end of text', 0x04 => 'end of transmission', 0x05 => 'enquiry', 0x06 => 'acknowledge', 0x07 => 'bell', 0x08 => 'backspace', 0x09 => 'horizontal tab', 0x0A => 'new line', 0x0B => 'vertical tab', 0x0C => 'form feed', 0x0D => 'carriage return', 0x0E => 'shift out', 0x0F => 'shift in', 0x10 => 'data link escape', 0x11 => 'device control 1', 0x12 => 'device control 2', 0x13 => 'device control 3', 0x14 => 'device control 4', 0x15 => 'negative acknowledge', 0x16 => 'synchronous idle', 0x17 => 'end of trans. block', 0x18 => 'cancel', 0x19 => 'end of medium', 0x1A => 'substitute', 0x1B => 'escape', 0x1C => 'file separator', 0x1D => 'group separator', 0x1E => 'record separator', 0x1F => 'unit separator', 0x20 => 'space', 0x22 => 'dquote', 0x27 => 'apos', 0x2F => 'slash', 0x5C => 'backslash', 0x60 => 'grave', 0x7F => 'delete', 0x80 => 'padding character', 0x81 => 'high octet preset', 0x82 => 'break permitted here', 0x83 => 'no break here', 0x84 => 'index', 0x85 => 'next line', 0x86 => 'start of selected area', 0x87 => 'end of selected area', 0x88 => 'character tabulation set', 0x89 => 'character tabulation with justification', 0x8A => 'line tabulation set', 0x8B => 'partial line forward', 0x8C => 'partial line backward', 0x8D => 'reverse line feed', 0x8E => 'single-shift two', 0x8F => 'single-shift three', 0x90 => 'device control string', 0x91 => 'private use 1', 0x92 => 'private use 2', 0x93 => 'set transmit state', 0x94 => 'cancel character', 0x95 => 'message waiting', 0x96 => 'start of protected area', 0x97 => 'end of protected area', 0x98 => 'start of string', 0x99 => 'single graphic character introducer', 0x9A => 'single character intro introducer', 0x9B => 'control sequence introducer', 0x9C => 'string terminator', 0x9D => 'operating system command', 0x9E => 'private message', 0x9F => 'application program command', } ## # Format rules to a String # # @param [Array<Rule>] rules # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @return [Object] def self.string(*rules, format: :ebnf) require 'stringio' unless defined?(StringIO) buf = StringIO.new write(buf, *rules, format: format) buf.string end ## # Format rules to $stdout # # @param [Array<Rule>] rules # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @return [Object] def self.print(*rules, format: :ebnf) write($stdout, *rules, format: format) end ## # Write formatted rules to an IO like object # # @param [Object] out # @param [Array<Rule>] rules # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @return [Object] def self.write(out, *rules, format: :ebnf) Writer.new(rules, out: out, format: format) end ## # Write formatted rules to an IO like object as HTML # # @param [Array<Rule>] rules # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @param [Boolean] validate (false) validate generated HTML. # @return [Object] def self.html(*rules, format: :ebnf, validate: false) require 'stringio' unless defined?(StringIO) buf = StringIO.new Writer.new(rules, out: buf, html: true, format: format, validate: validate) buf.string end ## # @param [Array<Rule>] rules # @param [:abnf, :ebnf, :isoebnf] format (:ebnf) # @param [Boolean] html (false) generate HTML output # @param [Boolean] validate (false) validate generated HTML. # @param [Hash{Symbol => Object}] options # @param [#write] out ($stdout) def initialize(rules, out: $stdout, html: false, format: :ebnf, validate: false, **options) @options = options.merge(html: html) return if rules.empty? # Determine max LHS length format_meth = "format_#{format}".to_sym max_id = rules.max_by {|r| r.id.to_s.length}.id.to_s.length max_sym = rules.max_by {|r| r.sym.to_s.length}.sym.to_s.length lhs_length = max_sym + 1 lhs_fmt = case format when :abnf then "%<sym>-#{max_sym}s = " when :ebnf then "%<sym>-#{max_sym}s ::= " when :isoebnf then "%<sym>-#{max_sym}s = " end if format == :ebnf && max_id > 0 lhs_fmt = "%<id>-#{max_id+2}s " + lhs_fmt lhs_length += max_id + 3 end rhs_length = (html ? LINE_LENGTH_HTML : LINE_LENGTH) - lhs_length if html # Output as formatted HTML begin require 'erubis' require 'htmlentities' @coder = HTMLEntities.new eruby = Erubis::Eruby.new(ERB_DESC) formatted_rules = rules.map do |rule| if rule.kind == :terminals || rule.kind == :pass OpenStruct.new(id: ("@#{rule.kind}"), class: :declaration, sym: rule.kind, assign: nil, formatted: ( rule.kind == :terminals ? "<strong># Productions for terminals</strong>" : self.send(format_meth, rule.expr))) else formatted_expr = self.send(format_meth, rule.expr) # Measure text without markup formatted_expr_text = formatted_expr.gsub(%r{</?\w+[^>]*>}, '') if formatted_expr_text.length > rhs_length && (format != :abnf || rule.alt?) lines = [] # Can only reasonably split apart alts self.send(format_meth, rule.expr, sep: "--rule-extensions--"). split(/\s*--rule-extensions--\s*/).each_with_index do |formatted, ndx| assign = case format when :ebnf formatted.sub!(%r{\s*<code[^>]*>\|</code>\s*}, '') (ndx > 0 ? (rule.alt? ? '<code class="grammar-alt">|</code>' : '') : '::=') when :abnf formatted.sub!(%r{\s*<code[^>]>/</code>\s*}, '') (ndx > 0 ? '<code class="grammar-alt">=/</code>' : '=') else formatted.sub!(%r{\s*<code[^>]>\|</code>\s*}, '') (ndx > 0 ? (rule.alt? ? '<code class="grammar-alt">|</code>' : '') : '=') end lines << OpenStruct.new(id: ((ndx == 0 ? "[#{rule.id}]" : "") if rule.id), sym: (rule.sym if ndx == 0 || format == :abnf), class: :production, assign: assign, formatted: formatted) end if format == :isoebnf lines << OpenStruct.new(assign: ';') end lines else OpenStruct.new(id: ("[#{rule.id}]" if rule.id), class: :production, sym: rule.sym, assign: (format == :ebnf ? '::=' : '='), formatted: (formatted_expr + (format == :isoebnf ? ' ;' : ''))) end end end.flatten html_result = eruby.evaluate(format: format, rules: formatted_rules) if validate begin require 'nokogiri' # Validate the output HTML doc = ::Nokogiri::HTML5("<!DOCTYPE html>" + html_result, max_errors: 10) raise EncodingError, "Errors found in generated HTML:\n " + doc.errors.map(&:to_s).join("\n ") unless doc.errors.empty? rescue LoadError, NoMethodError # Skip end end out.write html_result return rescue LoadError $stderr.puts "Generating HTML requires erubis and htmlentities gems to be loaded" end end # Format each rule, considering the available rhs size rules.each do |rule| buffer = if rule.pass? "\n%-#{lhs_length-2}s " % '@pass' elsif rule.kind == :terminals "\n%-#{lhs_length-2}s" % '@terminals' else lhs_fmt % {id: "[#{rule.id}]", sym: rule.sym} end formatted_expr = self.send(format_meth, rule.expr) if formatted_expr.length > rhs_length && (format != :abnf || rule.alt?) if format == :abnf # No whitespace, use =/ self.send(format_meth, rule.expr, sep: "--rule-extensions--"). split(/\s*--rule-extensions--\s*/).each_with_index do |formatted, ndx| if ndx > 0 buffer << "\n" + lhs_fmt.sub('= ', '=/') % {id: "[#{rule.id}]", sym: rule.sym} end buffer << formatted.sub(/\s*\/\s*/, '') end else # Space out past "= " buffer << self.send(format_meth, rule.expr, sep: ("\n" + " " * (lhs_length + (rule.alt? ? 2 : 4) - (format == :ebnf ? 0 : 2)))) buffer << ("\n" + " " * (lhs_length) + ';') if format == :isoebnf end else buffer << formatted_expr + (format == :isoebnf ? ' ;' : '') end buffer << "\n\n" if [:terminals, :pass].include?(rule.kind) out.puts(buffer) end end protected ## # W3C EBNF Formatters ## # Format the expression part of a rule def format_ebnf(expr, sep: nil, embedded: false) return (@options[:html] ? %(<a href="#grammar-production-#{@coder.encode expr}">#{@coder.encode expr}</a>) : expr.to_s) if expr.is_a?(Symbol) if expr.is_a?(String) return expr.length == 1 ? format_ebnf_char(expr) : format_ebnf_string(expr) end parts = { alt: (@options[:html] ? %(<code class="grammar-alt">|</code> ) : "| "), diff: (@options[:html] ? %(<code class="grammar-diff">-</code> ) : "- "), star: (@options[:html] ? %(<code class="grammar-star">*</code>) : "*"), plus: (@options[:html] ? %(<code class="grammar-plus">+</code>) : "+"), opt: (@options[:html] ? %(<code class="grammar-opt">?</code>) : "?") } lparen = (@options[:html] ? %[<code class="grammar-paren">(</code>] : "(") rparen = (@options[:html] ? %[<code class="grammar-paren">)</code>] : ")") case expr.first when :istr # Looses fidelity, but, oh well ... format_ebnf(expr.last, embedded: true) when :alt, :diff this_sep = (sep ? sep : " ") + parts[expr.first.to_sym] res = expr[1..-1].map {|e| format_ebnf(e, embedded: true)}.join(this_sep) embedded ? (lparen + res + rparen) : res when :star, :plus, :opt char = parts[expr.first.to_sym] r = format_ebnf(expr[1], embedded: true) "#{r}#{char}" when :hex escape_ebnf_hex(expr.last[2..-1].hex.chr(Encoding::UTF_8)) when :range format_ebnf_range(expr.last) when :seq this_sep = (sep ? sep : " ") res = expr[1..-1].map do |e| format_ebnf(e, embedded: true) end.join(this_sep) embedded ? (lparen + res + rparen) : res when :rept # Expand repetition min, max, value = expr[1..-1] if min == 0 && max == 1 format_ebnf([:opt, value], sep: sep, embedded: embedded) elsif min == 0 && max == '*' format_ebnf([:star, value], sep: sep, embedded: embedded) elsif min == 1 && max == '*' format_ebnf([:plus, value], sep: sep, embedded: embedded) else val2 = [:seq] while min > 0 val2 << value min -= 1 max -= 1 unless max == '*' end if max == '*' val2 << [:star, value] else opt = nil while max > 0 opt = [:opt, opt ? [:seq, value, opt] : value] max -= 1 end val2 << opt if opt end format_ebnf(val2, sep: sep, embedded: embedded) end else raise "Unknown operator: #{expr.first}" end end # Format a single-character string, prefering hex for non-main ASCII def format_ebnf_char(c) quote = c.as_dquote? ? '"' : "'" case c.ord when 0x21 then (@options[:html] ? %("<code class="grammar-literal">#{@coder.encode c}</code>") : %{"#{c}"}) when 0x22 then (@options[:html] ? %('<code class="grammar-literal">&quot;</code>') : %{'"'}) when (0x23..0x7e) then (@options[:html] ? %(#{quote}<code class="grammar-literal">#{@coder.encode c}</code>#{quote}) : %{#{quote}#{c}#{quote}}) when (0x80..0xFFFD) then (@options[:html] ? %(#{quote}<code class="grammar-literal">#{@coder.encode c}</code>#{quote}) : %{#{quote}#{c}#{quote}}) else escape_ebnf_hex(c) end end # Format a range def format_ebnf_range(string) lbrac = (@options[:html] ? %(<code class="grammar-brac">[</code>) : "[") rbrac = (@options[:html] ? %(<code class="grammar-brac">]</code>) : "]") buffer = lbrac s = StringScanner.new(string) while !s.eos? case when s.scan(/\A[!"\u0024-\u007e]+/) buffer << (@options[:html] ? %(<code class="grammar-literal">#{@coder.encode s.matched}</code>) : s.matched) when s.scan(/\A#x\h+/) buffer << escape_ebnf_hex(s.matched[2..-1].hex.chr(Encoding::UTF_8)) else buffer << escape_ebnf_hex(s.getch) end end buffer + rbrac end # Escape a string, using as many UTF-8 characters as possible def format_ebnf_string(string) quote = string.as_dquote? ? '"' : "'" string.each_char do |c| case c.ord when 0x00..0x19, quote.ord raise RangeError, "cannot format #{string.inspect} as an EBNF String: #{c.inspect} is out of range" unless ISOEBNF::TERMINAL_CHARACTER.match?(c) end end res = @options[:html] ? %(<code class="grammar-literal">#{@coder.encode(string)}</code>) : string res = "#{quote}#{res}#{quote}" end def escape_ebnf_hex(u) fmt = case u.ord when 0x0000..0x00ff then "#x%02X" when 0x0100..0xffff then "#x%04X" else "#x%08X" end char = fmt % u.ord if @options[:html] char = if UNICODE_ESCAPE_NAMES.include?(u.ord) %(<abbr title="#{UNICODE_ESCAPE_NAMES[u.ord]}">#{char}</abbr>) elsif ([::Unicode::Types.of(u)] - %w(Control Private-use Surrogate Noncharacter Reserved)).empty? %(<abbr title="unicode '#{@coder.encode u}'">#{char}</abbr>) else uni_esc = "U+%04X" % u.ord %(<abbr title="unicode #{uni_esc}">#{char}</abbr>) end %(<code class="grammar-char-escape">#{char}</code>) else char end end ## # ABNF Formatters ## # Format the expression part of a rule def format_abnf(expr, sep: nil, embedded: false, sensitive: true) return (@options[:html] ? %(<a href="#grammar-production-#{@coder.encode expr}">#{@coder.encode expr}</a>) : expr.to_s) if expr.is_a?(Symbol) if expr.is_a?(String) if expr.length == 1 return format_abnf_char(expr) elsif expr.start_with?('%') # Already encoded return expr elsif expr =~ /"/ # Split into segments segments = expr.split('"') return format_abnf_char(expr) if segments.empty? seq = segments.inject([]) {|memo, s| memo.concat([[:hex, "#x22"], s])}[1..-1] seq.unshift(:seq) return format_abnf(seq, sep: nil, embedded: false) else return (@options[:html] ? %("<code class="grammar-literal">#{@coder.encode expr}</code>") : %("#{expr}")) end end parts = { alt: (@options[:html] ? %(<code class="grammar-alt">/</code>) : "/ "), star: (@options[:html] ? %(<code class="grammar-star">*</code>) : "*"), plus: (@options[:html] ? %(<code class="grammar-plus">+</code>) : "1*"), opt: (@options[:html] ? %(<code class="grammar-opt">?</code>) : "?") } lbrac = (@options[:html] ? %(<code class="grammar-brac">[</code>) : "[") rbrac = (@options[:html] ? %(<code class="grammar-brac">]</code>) : "]") lparen = (@options[:html] ? %[<code class="grammar-paren">(</code>] : "(") rparen = (@options[:html] ? %[<code class="grammar-paren">)</code>] : ")") case expr.first when :istr # FIXME: if string part is segmented, need to do something different format_abnf(expr.last, embedded: true, sensitive: false) when :alt this_sep = (sep ? sep : " ") + parts[expr.first.to_sym] res = expr[1..-1].map {|e| format_abnf(e, embedded: true)}.join(this_sep) embedded ? (lparen + res + rparen) : res when :diff raise RangeError, "ABNF does not support the diff operator" when :opt char = parts[expr.first.to_sym] r = format_abnf(expr[1], embedded: true) "#{lbrac}#{r}#{rbrac}" when :plus, :star char = parts[expr.first.to_sym] r = format_abnf(expr[1], embedded: true) "#{char}#{r}" when :hex escape_abnf_hex(expr.last[2..-1].hex.chr) when :range # Returns an [:alt] or [:not [:alt]] if composed of multiple sequences # Note: ABNF does not support the `not` operator res = format_abnf_range(expr.last) res.is_a?(Array) ? format_abnf(res, embedded: true) : res when :seq this_sep = (sep ? sep : " ") res = expr[1..-1].map do |e| format_abnf(e, embedded: true) end.join(this_sep) embedded ? (lparen + res + rparen) : res when :rept # Expand repetition min, max, value = expr[1..-1] r = format_abnf(value, embedded: true) if min == max "#{min}#{r}" elsif min == 0 && max == '*' "#{parts[:star]}#{r}" elsif min > 0 && max == '*' "#{min}#{parts[:star]}#{r}" elsif min == 0 "#{parts[:star]}#{max}#{r}" else "#{min}#{parts[:star]}#{max}#{r}" end else raise "Unknown operator: #{expr.first}" end end # Format a single-character string, prefering hex for non-main ASCII def format_abnf_char(c) if /[\x20-\x21\x23-\x7E]/.match?(c) @options[:html] ? %("<code class="grammar-literal">#{@coder.encode c}</code>") : c.inspect else escape_abnf_hex(c) end end # Format a range # # Presumes range has already been validated def format_abnf_range(string) alt, o_dash = [:alt], false raise RangeError, "cannot format #{string.inspect} an ABNF range" if string.start_with?('^') if string.end_with?('-') o_dash = true string = string[0..-2] end scanner = StringScanner.new(string) hexes, deces = [], [] in_range = false # Build op (alt) from different ranges/enums while !scanner.eos? if hex = scanner.scan(Terminals::HEX) # Append any decimal values alt << "%d" + deces.join(".") unless deces.empty? deces = [] hex = hex.upcase if in_range # Add "." sequences for any previous hexes alt << "%x" + hexes[0..-2].join(".") if hexes.length > 1 alt << "%x#{hexes.last}-#{hex[2..-1]}" in_range, hexes = false, [] else hexes << hex[2..-1] end elsif dec = scanner.scan(Terminals::R_CHAR) # Append any hexadecimal values alt << "%x" + hexes.join(".") unless hexes.empty? hexes = [] if in_range # Add "." sequences for any previous hexes alt << "%d" + deces[0..-2].join(".") if deces.length > 1 alt << "%d#{deces.last}-#{dec.codepoints.first}" in_range, deces = false, [] else deces << dec.codepoints.first.to_s end end in_range = true if scanner.scan(/\-/) end deces << '45' if o_dash # Append hexes and deces as "." sequences (should be only one) alt << "%d" + deces.join(".") unless deces.empty? alt << "%x" + hexes.join(".") unless hexes.empty? # FIXME: HTML abbreviations? if alt.length == 2 # Just return the range or enum alt.last else # Return the alt, which will be further formatted alt end end def escape_abnf_hex(u) fmt = case u.ord when 0x0000..0x00ff then "%02X" when 0x0100..0xffff then "%04X" else "%08X" end char = "%x" + (fmt % u.ord).upcase if @options[:html] char = if UNICODE_ESCAPE_NAMES.include?(u.ord) %(<abbr title="#{UNICODE_ESCAPE_NAMES[u.ord]}">#{char}</abbr>) elsif ([::Unicode::Types.of(u)] - %w(Control Private-use Surrogate Noncharacter Reserved)).empty? %(<abbr title="unicode '#{@coder.encode u}'">#{char}</abbr>) else uni_esc = "U+%04X" % u.ord %(<abbr title="unicode #{uni_esc}">#{char}</abbr>) end %(<code class="grammar-char-escape">#{char}</code>) else char end end ## # ISO EBNF Formatters ## # Format the expression part of a rule def format_isoebnf(expr, sep: nil, embedded: false) return (@options[:html] ? %(<a href="#grammar-production-#{@coder.encode expr}">#{@coder.encode expr}</a>) : expr.to_s) if expr.is_a?(Symbol) if expr.is_a?(String) expr = expr[2..-1].hex.chr if expr =~ /\A#x\h+/ expr.chars.each do |c| raise RangeError, "cannot format #{expr.inspect} as an ISO EBNF String: #{c.inspect} is out of range" unless ISOEBNF::TERMINAL_CHARACTER.match?(c) end if expr =~ /"/ return (@options[:html] ? %('<code class="grammar-literal">#{@coder.encode expr}</code>') : %('#{expr}')) else return (@options[:html] ? %("<code class="grammar-literal">#{@coder.encode expr}</code>") : %("#{expr}")) end end parts = { alt: (@options[:html] ? %(<code class="grammar-alt">|</code> ) : "| "), diff: (@options[:html] ? %(<code class="grammar-diff">-</code> ) : "- "), } lparen = (@options[:html] ? %[<code class="grammar-paren">(</code>] : "(") rparen = (@options[:html] ? %[<code class="grammar-paren">)</code>] : ")") case expr.first when :istr # Looses fidelity, but, oh well ... format_isoebnf(expr.last, embedded: true) when :alt, :diff this_sep = (sep ? sep : " ") + parts[expr.first.to_sym] res = expr[1..-1].map {|e| format_isoebnf(e, embedded: true)}.join(this_sep) embedded ? (lparen + res + rparen) : res when :opt r = format_isoebnf(expr[1], embedded: true) "[#{r}]" when :star r = format_isoebnf(expr[1], embedded: true) "{#{r}}" when :plus r = format_isoebnf(expr[1], embedded: true) "#{r}, {#{r}}" when :hex format_isoebnf(expr[1], embedded: true) when :range res = format_isoebnf_range(expr.last) res.is_a?(Array) ? format_isoebnf(res, embedded: true) : res when :seq this_sep = "," + (sep ? sep : " ") res = expr[1..-1].map do |e| format_isoebnf(e, embedded: true) end.join(this_sep) embedded ? (lparen + res + rparen) : res when :rept # Expand repetition min, max, value = expr[1..-1] if min == 0 && max == 1 format_isoebnf([:opt, value], sep: sep, embedded: embedded) elsif min == 0 && max == '*' format_isoebnf([:star, value], sep: sep, embedded: embedded) elsif min == 1 && max == '*' format_isoebnf([:plus, value], sep: sep, embedded: embedded) else val2 = [:seq] while min > 0 val2 << value min -= 1 max -= 1 unless max == '*' end if max == '*' val2 << [:star, value] else opt = nil while max > 0 opt = [:opt, opt ? [:seq, value, opt] : value] max -= 1 end val2 << opt if opt end format_isoebnf(val2, sep: sep, embedded: embedded) end else raise "Unknown operator: #{expr.first}" end end # Format a range # Range is formatted as a aliteration of characters def format_isoebnf_range(string) chars = [] o_dash = false raise RangeError, "cannot format #{string.inspect} an ABNF range" if string.start_with?('^') if string.end_with?('-') o_dash = true string = string[0..-2] end scanner = StringScanner.new(string) in_range = false # Build chars from different ranges/enums while !scanner.eos? char = if hex = scanner.scan(Terminals::HEX) hex[2..-1].hex.ord.char(Encoding::UTF_8) else scanner.scan(Terminals::R_CHAR) end raise RangeError, "cannot format #{string.inspect} as an ISO EBNF Aliteration: #{char.inspect} is out of range" unless char && ISOEBNF::TERMINAL_CHARACTER.match?(char) if in_range # calculate characters from chars.last to this char raise RangeError, "cannot format #{string.inspect} as an ISO EBNF Aliteration" unless chars.last < char chars.concat (chars.last..char).to_a[1..-1] in_range = false else chars << char end in_range = true if scanner.scan(/\-/) end chars << '-' if o_dash # Possibly only a single character (no character?) chars.length == 1 ? chars.last.inspect : chars.unshift(:alt) end ERB_DESC = %(<!-- Generated with ebnf version #{EBNF::VERSION}. See https://github.com/dryruby/ebnf. -->\n) + %q(<table class="grammar"> <tbody id="grammar-productions" class="<%= @format %>"> <% for rule in @rules %> <tr<%= %{ id="grammar-#{rule[:class]}-#{rule.sym}"} unless %w(=/ |).include?(rule.assign) || rule.sym.nil?%>> <% if rule.id %> <td<%= " colspan=2" unless rule.sym %>><%= rule.id %></td> <% end %> <% if rule.sym %> <td><code><%== (rule.sym unless rule.class == :declaration) %></code></td> <% end %> <td><%= rule.assign %></td> <td><%= rule.formatted %></td> </tr> <% end %> </tbody> </table> ).gsub(/^ /, '') end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/native.rb
lib/ebnf/native.rb
module EBNF module Native ## # Native parser for EBNF; less accurate, but appropriate when changing EBNF grammar, itself. # # Iterate over rule strings. # a line that starts with '\[' or '@' starts a new rule # # @param [StringScanner] scanner # @yield rule_string # @yieldparam [String] rule_string def eachRule(scanner) cur_lineno = 1 r = '' until scanner.eos? case when s = scanner.scan(%r(\s+)m) # Eat whitespace cur_lineno += s.count("\n") #debug("eachRule(ws)") { "[#{cur_lineno}] #{s.inspect}" } when s = scanner.scan(%r(/\*([^\*]|\*[^\/])*\*/)m) # Eat comments /* .. */ cur_lineno += s.count("\n") debug("eachRule(comment)") { "[#{cur_lineno}] #{s.inspect}" } when s = scanner.scan(%r(\(\*([^\*]|\*[^\)])*\*\))m) # Eat comments (* .. *) cur_lineno += s.count("\n") debug("eachRule(comment)") { "[#{cur_lineno}] #{s.inspect}" } when s = scanner.scan(%r((#(?!x)|//).*$)) # Eat comments // & # cur_lineno += s.count("\n") debug("eachRule(comment)") { "[#{cur_lineno}] #{s.inspect}" } when s = scanner.scan(/\A["']/) # Found a quote, scan until end of matching quote s += scanner.scan_until(/#{scanner.matched}|$/) s.quote_style = scanner.matched == "'" ? :squote : :dquote r += s when s = scanner.scan(%r(^@terminals)) #debug("eachRule(@terminals)") { "[#{cur_lineno}] #{s.inspect}" } yield(r) unless r.empty? @lineno = cur_lineno yield(s) r = '' when s = scanner.scan(/@pass/) # Found rule start, if we've already collected a rule, yield it #debug("eachRule(@pass)") { "[#{cur_lineno}] #{s.inspect}" } yield r unless r.empty? @lineno = cur_lineno r = s when s = scanner.scan(EBNF::Terminals::LHS) # Found rule start, if we've already collected a rule, yield it yield r unless r.empty? #debug("eachRule(rule)") { "[#{cur_lineno}] #{s.inspect}" } @lineno = cur_lineno r = s.gsub(/[<>]/, '') # Remove angle brackets else # Collect until end of line, or start of comment or quote s = scanner.scan_until(%r{(?:[/\(]\*)|#(?!x)|//|["']|$}) if scanner.matched.length > 0 # Back up scan head before ending match scanner.pos = scanner.pos - scanner.matched.length # Remove matched from end of string s = s[0..-(scanner.matched.length+1)] end cur_lineno += s.count("\n") #debug("eachRule(rest)") { "[#{cur_lineno}] #{s.inspect}" } r += s end end yield r unless r.empty? end ## # Parse a rule into an optional rule number, a symbol and an expression # # @param [String] rule # @return [Rule] def ruleParts(rule) num_sym, expr = rule.split('::=', 2).map(&:strip) num, sym = num_sym.split(']', 2).map(&:strip) num, sym = "", num if sym.nil? num = num[1..-1] sym = sym[1..-2] if sym.start_with?('<') && sym.end_with?('>') r = Rule.new(sym && sym.to_sym, num, expression(expr).first, ebnf: self) debug("ruleParts") { r.inspect } r end ## # Parse a string into an expression tree and a remaining string # # @example # >>> expression("a b c") # ((seq a b c) '') # # >>> expression("a? b+ c*") # ((seq (opt a) (plus b) (star c)) '') # # >>> expression(" | x xlist") # ((alt (seq) (seq x xlist)) '') # # >>> expression("a | (b - c)") # ((alt a (diff b c)) '') # # >>> expression("a b | c d") # ((alt (seq a b) (seq c d)) '') # # >>> expression("a | b | c") # ((alt a b c) '') # # >>> expression("a) b c") # (a ' b c') # # >>> expression("BaseDecl? PrefixDecl*") # ((seq (opt BaseDecl) (star PrefixDecl)) '') # # >>> expression("NCCHAR1 | diff | [0-9] | #x00B7 | [#x0300-#x036F] | \[#x203F-#x2040\]") # ((alt NCCHAR1 diff # (range '0-9') # (hex '#x00B7') # (range '#x0300-#x036F') # (range, '#x203F-#x2040')) '') # # @param [String] s # @return [Array] def expression(s) debug("expression") {"(#{s.inspect})"} e, s = depth {alt(s)} debug {"=> alt returned #{[e, s].inspect}"} unless s.to_s.empty? t, ss = depth {terminal(s)} debug {"=> terminal returned #{[t, ss].inspect}"} return [e, ss] if t.is_a?(Array) && t.first == :")" end [e, s] end ## # Parse alt # >>> alt("a | b | c") # ((alt a b c) '') # @param [String] s # @return [Array] def alt(s) debug("alt") {"(#{s.inspect})"} args = [] while !s.to_s.empty? e, s = depth {seq(s)} debug {"=> seq returned #{[e, s].inspect}"} if e.to_s.empty? break unless args.empty? e = [:seq, []] # empty sequence end args << e unless s.to_s.empty? t, ss = depth {terminal(s)} break unless t[0] == :alt s = ss end end args.length > 1 ? [args.unshift(:alt), s] : [e, s] end ## # parse seq # # >>> seq("a b c") # ((seq a b c) '') # # >>> seq("a b? c") # ((seq a (opt b) c) '') def seq(s) debug("seq") {"(#{s.inspect})"} args = [] while !s.to_s.empty? e, ss = depth {diff(s)} debug {"=> diff returned #{[e, ss].inspect}"} unless e.to_s.empty? args << e s = ss else break; end end if args.length > 1 [args.unshift(:seq), s] elsif args.length == 1 args + [s] else ["", s] end end ## # parse diff # # >>> diff("a - b") # ((diff a b) '') def diff(s) debug("diff") {"(#{s.inspect})"} e1, s = depth {postfix(s)} debug {"=> postfix returned #{[e1, s].inspect}"} unless e1.to_s.empty? unless s.to_s.empty? t, ss = depth {terminal(s)} debug {"diff #{[t, ss].inspect}"} if t.is_a?(Array) && t.first == :diff s = ss e2, s = primary(s) unless e2.to_s.empty? return [[:diff, e1, e2], s] else error("diff", "Syntax Error") raise SyntaxError, "diff missing second operand" end end end end [e1, s] end ## # parse postfix # # >>> postfix("a b c") # (a ' b c') # # >>> postfix("a? b c") # ((opt a) ' b c') def postfix(s) debug("postfix") {"(#{s.inspect})"} e, s = depth {primary(s)} debug {"=> primary returned #{[e, s].inspect}"} return ["", s] if e.to_s.empty? if !s.to_s.empty? t, ss = depth {terminal(s)} debug {"=> #{[t, ss].inspect}"} if t.is_a?(Array) && [:opt, :star, :plus].include?(t.first) return [[t.first, e], ss] end end [e, s] end ## # parse primary # # >>> primary("a b c") # (a ' b c') def primary(s) debug("primary") {"(#{s.inspect})"} t, s = depth {terminal(s)} debug {"=> terminal returned #{[t, s].inspect}"} if t.is_a?(Symbol) || t.is_a?(String) [t, s] elsif %w(range hex).map(&:to_sym).include?(t.first) [t, s] elsif t.first == :"(" e, s = depth {expression(s)} debug {"=> expression returned #{[e, s].inspect}"} [e, s] else ["", s] end end ## # parse one terminal; return the terminal and the remaining string # # A terminal is represented as a tuple whose 1st item gives the type; # some types have additional info in the tuple. # # @example # >>> terminal("'abc' def") # ('abc' ' def') # # >>> terminal("[0-9]") # ((range '0-9') '') # >>> terminal("#x00B7") # ((hex '#x00B7') '') # >>> terminal ("\[#x0300-#x036F\]") # ((range '#x0300-#x036F') '') # >>> terminal("\[^<>'{}|^`\]-\[#x00-#x20\]") # ((range "^<>'{}|^`") '-\[#x00-#x20\]') def terminal(s) s = s.strip #STDERR.puts s.inspect case m = s[0,1] when '"', "'" # STRING1 or STRING2 l, s = s[1..-1].split(m.rstrip, 2) [Unescape.unescape(l).tap {|str| str.quote_style = (m == "'" ? :squote : :dquote)}, s] when '[' # RANGE, O_RANGE # Includes RANGE and O_RANGE which can't include a ']' l, s = s[1..-1].split(']', 2) [[:range, Unescape.unescape(l)], s] when '#' # HEX s.match(/(#x\h+)(.*)$/) l, s = $1, $2 [[:hex, l], s] when '<', /[\w\.]/ # SYMBOL s.match(/<?([\w\.]+)>?(.*)$/) l, s = $1, $2 [l.to_sym, s] when '-' [[:diff], s[1..-1]] when '?' [[:opt], s[1..-1]] when '|' [[:alt], s[1..-1]] when '+' [[:plus], s[1..-1]] when '*' [[:star], s[1..-1]] when /[\(\)]/ # '(' or ')' [[m.to_sym], s[1..-1]] else error("terminal", "unrecognized terminal: #{s.inspect}") raise SyntaxError, "unrecognized terminal: #{s.inspect}" end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/isoebnf.rb
lib/ebnf/isoebnf.rb
require_relative 'isoebnf/meta' require 'logger' # ISO EBNF parser # Parses ISO EBNF into an array of {EBNF::Rule}. module EBNF class ISOEBNF include EBNF::PEG::Parser # The base for terminal-character, which omits "'", '"', and '?'. # Could be more optimized, and one might quible # with the overly-strictly defined character set, # but it is correct. TERMINAL_CHARACTER_BASE = %r{ [a-zA-Z0-9] | # letter | decimal digit , | # concatenate symbol = | # defining symbol [\|\/!] | # definition separator symbol \*\) | # end comment symbol \) | # end group symbol \] | # end option symbol \} | # end repeat symbol \- | # except symbol #\' | # first quote symbol \* | # repetition symbol #\" | # second quote symbol #\? | # special sequence symbol \(\* | # start comment symbol \( | # start group symbol \[ | # start option symbol \{ | # start repeat symbol [;\.] | # terminator symbol [:+_%@&$<>^\x20\x23\\`~] # other character }x TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['"\?]} FIRST_TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|["\?]} SECOND_TERMINAL_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['\?]} SPECIAL_SEQUENCE_CHARACTER = %r{#{TERMINAL_CHARACTER_BASE}|['"]} # Abstract syntax tree from parse # # @return [Array<EBNF::Rule>] attr_reader :ast # `[14] integer ::= decimal_digit+` terminal(:integer, /\d+/) do |value, prod| value.to_i end # `[15] meta_identifier ::= letter meta_identifier_character*` terminal(:meta_identifier, /[a-zA-Z][a-zA-Z0-9_]*/) do |value| value.to_sym end # `[17] terminal_string ::= ("'" first_terminal_character+ "'")` # ` | ('"' second_terminal_character+ '"')` terminal(:terminal_string, /(?:'#{FIRST_TERMINAL_CHARACTER}+')|(?:"#{SECOND_TERMINAL_CHARACTER}+")/x) do |value| value[1..-2].tap {|s| s.quote_style = (value.start_with?("'") ? :squote : :dquote) } end # `[20] special_sequence ::= '?' special_sequence_character* '?'` terminal(:special_sequence, /\?#{SPECIAL_SEQUENCE_CHARACTER}+\?/) # `[22] terminal_character ::= [a-zA-Z0-9]` # ` | [,=;*}#x2d?([{;]` # ` | '*)'` # ` | '(*'` # ` | ']'` # ` | other_character` terminal(:terminal_character, TERMINAL_CHARACTER) # `[25] empty ::= ''` terminal(:empty, //) # `[26] definition_separator_symbol ::= '|' | '/' | '!'` terminal(:definition_separator_symbol, /[\|\/!]/) # `[27] terminator_symbol ::= ';' | '.'` terminal(:terminator_symbol, /[;\.]/) # `[28] start_option_symbol ::= '[' terminal(:start_option_symbol, /\[|(?:\(\/)/) # `[29] end_option_symbol ::= ']'` terminal(:end_option_symbol, /\]/) # `[30] start_repeat_symbol ::= '{' | '(:'` terminal(:start_repeat_symbol, /{|\(:/) # `[31] end_repeat_symbol ::= '}' | ':)'` terminal(:end_repeat_symbol, /}|:\)/) # ## Non-terminal productions # `[2] syntax_rule ::= meta_identifier '=' definitions_list terminator_symbol` production(:syntax_rule, clear_packrat: true) do |value, data, callback| # value contains an expression. # Invoke callback sym = value[0][:meta_identifier] definitions_list = value[2][:definitions_list] callback.call(:rule, EBNF::Rule.new(sym.to_sym, nil, definitions_list)) nil end # Setting `as_hash: true` in the start production makes the value of the form of a hash, rather than an array of hashes. # # `[3] definitions_list ::= single_definition (definition_separator_symbol definitions_list)*` start_production(:definitions_list, as_hash: true) production(:definitions_list) do |value| if value[:_definitions_list_1].length > 0 [:alt, value[:single_definition]] + value[:_definitions_list_1] else value[:single_definition] end end production(:_definitions_list_1) do |value| Array(value.first) end start_production(:_definitions_list_2, as_hash: true) production(:_definitions_list_2) do |value| if Array(value[:definitions_list]).first == :alt value[:definitions_list][1..-1] else [value[:definitions_list]] end end # `[4] single_definition ::= term (',' term)*` start_production(:single_definition, as_hash: true) production(:single_definition) do |value| if value[:_single_definition_1].length > 0 [:seq, value[:term]] + value[:_single_definition_1] else value[:term] end end production(:_single_definition_1) do |value| value.map {|a1| a1.last[:term]}.compact # Get rid of '|' end # `[5] term ::= factor ('-' exception)?` start_production(:term, as_hash: true) production(:term) do |value| if value[:_term_1] [:diff, value[:factor], value[:_term_1]] else value[:factor] end end production(:_term_1) do |value| value.last[:exception] if value end # `[6] exception ::= factor` start_production(:exception, as_hash: true) production(:exception) do |value| value[:factor] end # `[7] factor ::= (integer '*')? primary` start_production(:factor, as_hash: true) production(:factor) do |value| if value[:_factor_1] [:rept, value[:_factor_1], value[:_factor_1], value[:primary]] else value[:primary] end end production(:_factor_2) do |value| value.first[:integer] end # `[9] optional_sequence ::= start_option_symbol definitions_list end_option_symbol` production(:optional_sequence) do |value| [:opt, value[1][:definitions_list]] end # `[10] repeated_sequence ::= start_repeat_symbol definitions_list end_repeat_symbol` production(:repeated_sequence) do |value| [:star, value[1][:definitions_list]] end # `[11] grouped_sequence ::= '(' definitions_list ')'` production(:grouped_sequence) do |value| [:seq, value[1][:definitions_list]] end # ## Parser invocation. # On start, yield ourselves if a block is given, otherwise, return this parser instance # # @param [#read, #to_s] input # @param [Hash{Symbol => Object}] options # @option options [Boolean] :level # Trace level. 0(debug), 1(info), 2(warn), 3(error). # @return [EBNFParser] def initialize(input, **options, &block) # If the `level` option is set, instantiate a logger for collecting trace information. if options.key?(:level) options[:logger] ||= Logger.new(STDERR). tap {|x| x.level = options[:level]}. tap {|x| x.formatter = lambda {|severity, datetime, progname, msg| "#{severity} #{msg}\n"}} end # Read input, if necessary, which will be used in a Scanner. @input = input.respond_to?(:read) ? input.read : input.to_s parsing_terminals = false @ast = [] parse(@input, :syntax, ISOEBNFMeta::RULES, whitespace: %r{([\x09-\x0d\x20]|(?:\(\*(?:(?:\*[^\)])|[^*])*\*\)))+}, **options ) do |context, *data| rule = case context when :rule # A rule which has already been turned into a `Rule` object. rule = data.first rule.kind = :terminal if parsing_terminals rule end @ast << rule if rule end rescue EBNF::PEG::Parser::Error => e raise SyntaxError, e.message end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/rule.rb
lib/ebnf/rule.rb
require 'scanf' require 'strscan' require 'sxp' unless defined?(SXP) module EBNF # Represent individual parsed rules class Rule # Operations which are flattened to seprate rules in to_bnf. BNF_OPS = %w{ alt diff not opt plus rept seq star }.map(&:to_sym).freeze TERM_OPS = %w{ hex istr range }.map(&:to_sym).freeze # The number of arguments expected per operator. `nil` for unspecified OP_ARGN = { alt: nil, diff: 2, hex: 1, istr: 1, not: 1, opt: 1, plus: 1, range: 1, rept: 3, seq: nil, star: 1 } # Symbol of rule # # @return [Symbol] attr_accessor :sym # ID of rule # @return [String] attr_accessor :id # A comprehension is a sequence which contains all elements but the first of the original rule. # # @return [Rule] attr_accessor :comp # Kind of rule # # @return [:rule, :terminal, :terminals, or :pass] attr_accessor :kind # Rule expression # # @return [Array] attr_accessor :expr # Original EBNF # # @return [String] attr_accessor :orig # Terminals that immediately procede this rule # # @return [Array<Rule>] attr_reader :first # Terminals that immediately follow this rule # # @return [Array<Rule>] attr_reader :follow # Indicates that this is a starting rule # # @return [Boolean] attr_accessor :start # Determines preparation and cleanup rules for reconstituting EBNF ? * + from BNF attr_accessor :cleanup # @param [Symbol, nil] sym # `nil` is allowed only for @pass or @terminals # @param [Integer, nil] id # @param [Array] expr # The expression is an internal-representation of an S-Expression with one of the following oparators: # # * `alt` – A list of alternative rules, which are attempted in order. It terminates with the first matching rule, or is terminated as unmatched, if no such rule is found. # * `diff` – matches any string that matches `A` but does not match `B`. # * `hex` – A single character represented using the hexadecimal notation `#xnn`. # * `istr` – A string which matches in a case-insensitive manner, so that `(istr "fOo")` will match either of the strings `"foo"`, `"FOO"` or any other combination. # * `opt` – An optional rule or terminal. It either results in the matching rule or returns `nil`. # * `plus` – A sequence of one or more of the matching rule. If there is no such rule, it is terminated as unmatched; otherwise, the result is an array containing all matched input. # * `range` – A range of characters, possibly repeated, of the form `(range "a-z")`. May also use hexadecimal notation. # * `rept m n` – A sequence of at lest `m` and at most `n` of the matching rule. It will always return an array. # * `seq` – A sequence of rules or terminals. If any (other than `opt` or `star`) to not parse, the rule is terminated as unmatched. # * `star` – A sequence of zero or more of the matching rule. It will always return an array. # @param [:rule, :terminal, :terminals, :pass] kind (nil) # @param [String] ebnf (nil) # When parsing, records the EBNF string used to create the rule. # @param [Array] first (nil) # Recorded set of terminals that can proceed this rule (LL(1)) # @param [Array] follow (nil) # Recorded set of terminals that can follow this rule (LL(1)) # @param [Boolean] start (nil) # Is this the starting rule for the grammar? # @param [Rule] top_rule (nil) # The top-most rule. All expressed rules are top-rules, derived rules have the original rule as their top-rule. # @param [Boolean] cleanup (nil) # Records information useful for cleaning up converted :plus, and :star expansions (LL(1)). def initialize(sym, id, expr, kind: nil, ebnf: nil, first: nil, follow: nil, start: nil, top_rule: nil, cleanup: nil) @sym, @id = sym, id @expr = expr.is_a?(Array) ? expr : [:seq, expr].compact @ebnf, @kind, @first, @follow, @start, @cleanup, @top_rule = ebnf, kind, first, follow, start, cleanup, top_rule @top_rule ||= self @kind ||= case when sym.to_s == sym.to_s.upcase then :terminal when !BNF_OPS.include?(@expr.first) then :terminal else :rule end # Allow @pass and @terminals to not be named @sym ||= :_pass if @kind == :pass @sym ||= :_terminals if @kind == :terminals raise ArgumentError, "Rule sym must be a symbol, was #{@sym.inspect}" unless @sym.is_a?(Symbol) raise ArgumentError, "Rule id must be a string or nil, was #{@id.inspect}" unless (@id || "").is_a?(String) raise ArgumentError, "Rule kind must be one of :rule, :terminal, :terminals, or :pass, was #{@kind.inspect}" unless @kind.is_a?(Symbol) && %w(rule terminal terminals pass).map(&:to_sym).include?(@kind) case @expr.first when :alt raise ArgumentError, "#{@expr.first} operation must have at least one operand, had #{@expr.length - 1}" unless @expr.length > 1 when :diff raise ArgumentError, "#{@expr.first} operation must have exactly two operands, had #{@expr.length - 1}" unless @expr.length == 3 when :hex, :istr, :not, :opt, :plus, :range, :star raise ArgumentError, "#{@expr.first} operation must have exactly one operand, had #{@expr.length - 1}" unless @expr.length == 2 when :rept raise ArgumentError, "#{@expr.first} operation must have exactly three, had #{@expr.length - 1}" unless @expr.length == 4 raise ArgumentError, "#{@expr.first} operation must an non-negative integer minimum, was #{@expr[1]}" unless @expr[1].is_a?(Integer) && @expr[1] >= 0 raise ArgumentError, "#{@expr.first} operation must an non-negative integer maximum or '*', was #{@expr[2]}" unless @expr[2] == '*' || @expr[2].is_a?(Integer) && @expr[2] >= 0 when :seq # It's legal to have a zero-length sequence else raise ArgumentError, "Rule expression must be an array using a known operator, was #{@expr.first}" end end ## # Return a rule from its SXP representation: # # @example inputs # (pass _pass (plus (range "#x20\\t\\r\\n"))) # (rule ebnf "1" (star (alt declaration rule))) # (terminal R_CHAR "19" (diff CHAR (alt "]" "-"))) # # Also may have `(first ...)`, `(follow ...)`, or `(start #t)`. # # @param [String, Array] sxp # @return [Rule] def self.from_sxp(sxp) if sxp.is_a?(String) sxp = SXP.parse(sxp) end expr = sxp.detect {|e| e.is_a?(Array) && ![:first, :follow, :start].include?(e.first.to_sym)} first = sxp.detect {|e| e.is_a?(Array) && e.first.to_sym == :first} first = first[1..-1] if first follow = sxp.detect {|e| e.is_a?(Array) && e.first.to_sym == :follow} follow = follow[1..-1] if follow cleanup = sxp.detect {|e| e.is_a?(Array) && e.first.to_sym == :cleanup} cleanup = cleanup[1..-1] if cleanup start = sxp.any? {|e| e.is_a?(Array) && e.first.to_sym == :start} sym = sxp[1] if sxp[1].is_a?(Symbol) id = sxp[2] if sxp[2].is_a?(String) self.new(sym, id, expr, kind: sxp.first, first: first, follow: follow, cleanup: cleanup, start: start) end # Build a new rule creating a symbol and numbering from the current rule # Symbol and number creation is handled by the top-most rule in such a chain. # # @param [Array] expr # @param [Symbol] kind (nil) # @param [Hash{Symbol => Symbol}] cleanup (nil) # @param [Hash{Symbol => Object}] options def build(expr, kind: nil, cleanup: nil, **options) new_sym, new_id = @top_rule.send(:make_sym_id) self.class.new(new_sym, new_id, expr, kind: kind, ebnf: @ebnf, top_rule: @top_rule, cleanup: cleanup, **options) end # Return representation for building S-Expressions. # # @return [Array] def for_sxp elements = [kind, sym] elements << id if id elements << [:start, true] if start elements << first.sort_by(&:to_s).unshift(:first) if first elements << follow.sort_by(&:to_s).unshift(:follow) if follow elements << [:cleanup, cleanup] if cleanup elements << expr elements end # Return SXP representation of this rule # # @return [String] def to_sxp(**options) for_sxp.to_sxp(**options) end alias_method :to_s, :to_sxp # Serializes this rule to an Turtle. # # @return [String] def to_ttl @ebnf.debug("to_ttl") {inspect} if @ebnf statements = [%{:#{sym} rdfs:label "#{sym}";}] if orig comment = orig.to_s.strip. gsub(/"""/, '\"\"\"'). gsub("\\", "\\\\"). sub(/^\"/, '\"'). sub(/\"$/m, '\"') statements << %{ rdfs:comment #{comment.inspect};} end statements << %{ dc:identifier "#{id}";} if id statements += ttl_expr(expr, terminal? ? "re" : "g", 1, false) "\n" + statements.join("\n") end # Return a Ruby representation of this rule # @return [String] def to_ruby "EBNF::Rule.new(#{sym.inspect}, #{id.inspect}, #{expr.inspect}#{', kind: ' + kind.inspect unless kind == :rule})" end ## # Transform EBNF rule to BNF rules: # # * Transform `(rule a "n" (op1 (op2)))` into two rules: # # (rule a "n" (op1 _a_1)) # (rule _a_1 "n.1" (op2)) # * Transform `(rule a (opt b))` into `(rule a (alt _empty b))` # * Transform `(rule a (star b))` into `(rule a (alt _empty (seq b a)))` # * Transform `(rule a (plus b))` into `(rule a (seq b (star b)` # # Transformation includes information used to re-construct non-transformed. # # AST representation # @return [Array<Rule>] def to_bnf return [self] unless rule? new_rules = [] # Look for rules containing recursive definition and rewrite to multiple rules. If `expr` contains elements which are in array form, where the first element of that array is a symbol, create a new rule for it. if expr.any? {|e| e.is_a?(Array) && (BNF_OPS + TERM_OPS).include?(e.first)} # * Transform (a [n] rule (op1 (op2))) into two rules: # (a.1 [n.1] rule (op1 a.2)) # (a.2 [n.2] rule (op2)) # duplicate ourselves for rewriting this = dup new_rules << this expr.each_with_index do |e, index| next unless e.is_a?(Array) && e.first.is_a?(Symbol) new_rule = build(e) this.expr[index] = new_rule.sym new_rules << new_rule end # Return new rules after recursively applying #to_bnf new_rules = new_rules.map {|r| r.to_bnf}.flatten elsif expr.first == :opt this = dup # * Transform (rule a (opt b)) into (rule a (alt _empty b)) this.expr = [:alt, :_empty, expr.last] this.cleanup = :opt new_rules = this.to_bnf elsif expr.first == :star # * Transform (rule a (star b)) into (rule a (alt _empty (seq b a))) this = dup this.cleanup = :star new_rule = this.build([:seq, expr.last, this.sym], cleanup: :merge) this.expr = [:alt, :_empty, new_rule.sym] new_rules = [this] + new_rule.to_bnf elsif expr.first == :plus # * Transform (rule a (plus b)) into (rule a (seq b (star b) this = dup this.cleanup = :plus this.expr = [:seq, expr.last, [:star, expr.last]] new_rules = this.to_bnf elsif [:alt, :seq].include?(expr.first) # Otherwise, no further transformation necessary new_rules << self elsif [:diff, :hex, :range].include?(expr.first) # This rules are fine, they just need to be terminals raise "Encountered #{expr.first.inspect}, which is a #{self.kind}, not :terminal" unless self.terminal? new_rules << self else # Some case we didn't think of raise "Error trying to transform #{expr.inspect} to BNF" end return new_rules end ## # Transform EBNF rule for PEG: # # * Transform `(rule a "n" (op1 ... (op2 y) ...z))` into two rules: # # (rule a "n" (op1 ... _a_1 ... z)) # (rule _a_1 "n.1" (op2 y)) # * Transform `(rule a "n" (diff op1 op2))` into two rules: # # (rule a "n" (seq _a_1 op1)) # (rule _a_1 "n.1" (not op1)) # # @return [Array<Rule>] def to_peg new_rules = [] # Look for rules containing sub-sequences if expr.any? {|e| e.is_a?(Array) && e.first.is_a?(Symbol)} # duplicate ourselves for rewriting this = dup new_rules << this expr.each_with_index do |e, index| next unless e.is_a?(Array) && e.first.is_a?(Symbol) new_rule = build(e) this.expr[index] = new_rule.sym new_rules << new_rule end # Return new rules after recursively applying #to_bnf new_rules = new_rules.map {|r| r.to_peg}.flatten elsif expr.first == :diff && !terminal? this = dup new_rule = build([:not, expr[2]]) this.expr = [:seq, new_rule.sym, expr[1]] new_rules << this new_rules << new_rule elsif [:hex, :istr, :range].include?(expr.first) # This rules are fine, they just need to be terminals raise "Encountered #{expr.first.inspect}, which is a #{self.kind}, not :terminal" unless self.terminal? new_rules << self else new_rules << self end return new_rules.map {|r| r.extend(EBNF::PEG::Rule)} end ## # For :hex or :range, create a regular expression. # # @return [Regexp] def to_regexp case expr.first when :hex Regexp.new(Regexp.escape(translate_codepoints(expr[1]))) when :istr /#{expr.last}/ui when :range Regexp.new("[#{escape_regexp_character_range(translate_codepoints(expr[1]))}]") else raise "Can't turn #{expr.inspect} into a regexp" end end # Is this a terminal? # # @return [Boolean] def terminal? kind == :terminal end # Is this a pass? # @return [Boolean] def pass? kind == :pass end # Is this a rule? # @return [Boolean] def rule? kind == :rule end # Is this rule of the form (alt ...)? def alt? expr.is_a?(Array) && expr.first == :alt end # Is this rule of the form (seq ...)? def seq? expr.is_a?(Array) && expr.first == :seq end def inspect "#<EBNF::Rule:#{object_id} " + {sym: sym, id: id, kind: kind, expr: expr}.inspect + ">" end # Two rules are equal if they have the same {#sym}, {#kind} and {#expr}. # # @param [Rule] other # @return [Boolean] def ==(other) other.is_a?(Rule) && sym == other.sym && kind == other.kind && expr == other.expr end # Two rules are equivalent if they have the same {#expr}. # # @param [Rule] other # @return [Boolean] def eql?(other) expr == other.expr end # Rules compare using their ids def <=>(other) if id && other.id if id == other.id id.to_s <=> other.id.to_s else id.to_f <=> other.id.to_f end else sym.to_s <=> other.sym.to_s end end ## # Utility function to translate code points of the form '#xN' into ruby unicode characters def translate_codepoints(str) str.gsub(/#x\h+/) {|c| c[2..-1].scanf("%x").first.chr(Encoding::UTF_8)} end # Return the non-terminals for this rule. # # * `alt` => this is every non-terminal. # * `diff` => this is every non-terminal. # * `hex` => nil # * `istr` => nil # * `not` => this is the last expression, if any. # * `opt` => this is the last expression, if any. # * `plus` => this is the last expression, if any. # * `range` => nil # * `rept` => this is the last expression, if any. # * `seq` => this is the first expression in the sequence, if any. # * `star` => this is the last expression, if any. # # @param [Array<Rule>] ast # The set of rules, used to turn symbols into rules # @param [Array<Symbol,String,Array>] expr (@expr) # The expression to check, defaults to the rule expression. # Typically, if the expression is recursive, the embedded expression is called recursively. # @return [Array<Rule>] # @note this is used for LL(1) tansformation, so rule types are limited def non_terminals(ast, expr = @expr) ([:alt, :diff].include?(expr.first) ? expr[1..-1] : expr[1,1]).map do |sym| case sym when Symbol r = ast.detect {|r| r.sym == sym} r if r && r.rule? when Array non_terminals(ast, sym) else nil end end.flatten.compact.uniq end # Return the terminals for this rule. # # * `alt` => this is every terminal. # * `diff` => this is every terminal. # * `hex` => nil # * `istr` => nil # * `not` => this is the last expression, if any. # * `opt` => this is the last expression, if any. # * `plus` => this is the last expression, if any. # * `range` => nil # * `rept` => this is the last expression, if any. # * `seq` => this is the first expression in the sequence, if any. # * `star` => this is the last expression, if any. # # @param [Array<Rule>] ast # The set of rules, used to turn symbols into rules # @param [Array<Symbol,String,Array>] expr (@expr) # The expression to check, defaults to the rule expression. # Typically, if the expression is recursive, the embedded expression is called recursively. # @return [Array<Rule>] # @note this is used for LL(1) tansformation, so rule types are limited def terminals(ast, expr = @expr) ([:alt, :diff].include?(expr.first) ? expr[1..-1] : expr[1,1]).map do |sym| case sym when Symbol r = ast.detect {|r| r.sym == sym} r if r && r.terminal? when String sym when Array terminals(ast, sym) end end.flatten.compact.uniq end # Return the symbols used in the rule. # # @param [Array<Symbol,String,Array>] expr (@expr) # The expression to check, defaults to the rule expression. # Typically, if the expression is recursive, the embedded expression is called recursively. # @return [Array<Rule>] def symbols(expr = @expr) expr[1..-1].map do |sym| case sym when Symbol sym when Array symbols(sym) end end.flatten.compact.uniq end ## # The following are used for LL(1) transformation. ## # Does this rule start with `sym`? It does if expr is that sym, # expr starts with alt and contains that sym, # or expr starts with seq and the next element is that sym. # # @param [Symbol, class] sym # Symbol matching any start element, or if it is String, any start element which is a String # @return [Array<Symbol, String>] list of symbol (singular), or strings which are start symbol, or nil if there are none def starts_with?(sym) if seq? && sym === (v = expr.fetch(1, nil)) [v] elsif alt? && expr.any? {|e| sym === e} expr.select {|e| sym === e} else nil end end ## # Validate the rule, with respect to an AST. # # @param [Array<Rule>] ast # The set of rules, used to turn symbols into rules # @param [Array<Symbol,String,Array>] expr (@expr) # The expression to check, defaults to the rule expression. # Typically, if the expression is recursive, the embedded expression is called recursively. # @raise [RangeError] def validate!(ast, expr = @expr) op = expr.first raise SyntaxError, "Unknown operator: #{op}" unless OP_ARGN.key?(op) raise SyntaxError, "Argument count missmatch on operator #{op}, had #{expr.length - 1} expected #{OP_ARGN[op]}" if OP_ARGN[op] && OP_ARGN[op] != expr.length - 1 # rept operator needs min and max if op == :alt raise SyntaxError, "alt operation must have at least one operand, had #{expr.length - 1}" unless expr.length > 1 elsif op == :rept raise SyntaxError, "rept operation must an non-negative integer minimum, was #{expr[1]}" unless expr[1].is_a?(Integer) && expr[1] >= 0 raise SyntaxError, "rept operation must an non-negative integer maximum or '*', was #{expr[2]}" unless expr[2] == '*' || expr[2].is_a?(Integer) && expr[2] >= 0 end case op when :hex raise SyntaxError, "Hex operand must be of form '#xN+': #{sym}" unless expr.last.match?(/^#x\h+$/) when :range str = expr.last.dup str = str[1..-1] if str.start_with?('^') str = str[0..-2] if str.end_with?('-') # Allowed at end of range scanner = StringScanner.new(str) hex = rchar = in_range = false while !scanner.eos? begin if scanner.scan(Terminals::HEX) raise SyntaxError if in_range && rchar rchar = in_range = false hex = true elsif scanner.scan(Terminals::R_CHAR) raise SyntaxError if in_range && hex hex = in_range = false rchar = true else raise(SyntaxError, "Range contains illegal components at offset #{scanner.pos}: was #{expr.last}") end if scanner.scan(/\-/) raise SyntaxError if in_range in_range = true end rescue SyntaxError raise(SyntaxError, "Range contains illegal components at offset #{scanner.pos}: was #{expr.last}") end end else ([:alt, :diff].include?(expr.first) ? expr[1..-1] : expr[1,1]).each do |sym| case sym when Symbol r = ast.detect {|r| r.sym == sym} raise SyntaxError, "No rule found for #{sym}" unless r when Array validate!(ast, sym) when String raise SyntaxError, "String must be of the form CHAR*" unless sym.match?(/^#{Terminals::CHAR}*$/) end end end end ## # Validate the rule, with respect to an AST. # # Uses `#validate!` and catches `RangeError` # # @param [Array<Rule>] ast # The set of rules, used to turn symbols into rules # @return [Boolean] def valid?(ast) validate!(ast) true rescue SyntaxError false end # Do the firsts of this rule include the empty string? # # @return [Boolean] def first_includes_eps? @first && @first.include?(:_eps) end # Add terminal as proceding this rule. # # @param [Array<Rule, Symbol, String>] terminals # @return [Integer] if number of terminals added def add_first(terminals) @first ||= [] terminals = terminals.map {|t| t.is_a?(Rule) ? t.sym : t} - @first @first += terminals terminals.length end # Add terminal as following this rule. Don't add _eps as a follow # # @param [Array<Rule, Symbol, String>] terminals # @return [Integer] if number of terminals added def add_follow(terminals) # Remove terminals already in follows, and empty string terminals = terminals.map {|t| t.is_a?(Rule) ? t.sym : t} - (@follow || []) - [:_eps] unless terminals.empty? @follow ||= [] @follow += terminals end terminals.length end private def ttl_expr(expr, pfx, depth, is_obj = true) indent = ' ' * depth @ebnf.debug("ttl_expr", depth: depth) {expr.inspect} if @ebnf op, *expr = expr if expr.is_a?(Array) statements = [] if is_obj bra, ket = "[ ", " ]" else bra = ket = '' end case op when :seq, :alt, :diff # Multiple operands statements << %{#{indent}#{bra}#{pfx}:#{op} (} expr.each {|a| statements += ttl_expr(a, pfx, depth + 1)} statements << %{#{indent} )#{ket}} when :opt, :plus, :star, :not # Single operand statements << %{#{indent}#{bra}#{pfx}:#{op} } statements += ttl_expr(expr.first, pfx, depth + 1) statements << %{#{indent} #{ket}} unless ket.empty? when :rept # Three operands (min, max and expr) statements << %{ #{indent}#{pfx}:min #{expr[0].inspect};} statements << %{ #{indent}#{pfx}:max #{expr[1].inspect};} statements << %{#{indent}#{bra}#{pfx}:#{op} } statements += ttl_expr(expr.last, pfx, depth + 1) statements << %{#{indent} #{ket}} unless ket.empty? when :_empty, :_eps statements << %{#{indent}"g:#{op.to_s[1..-1]}"} when :"'" statements << %{#{indent}"#{esc(expr)}"} when :istr statements << %{#{indent}#{bra} re:matches #{expr.first.inspect} #{ket}} when :range statements << %{#{indent}#{bra} re:matches #{cclass(expr.first).inspect} #{ket}} when :hex raise "didn't expect \" in expr" if expr.include?(:'"') statements << %{#{indent}#{bra} re:matches #{cclass(expr.first).inspect} #{ket}} else if is_obj statements << %{#{indent}#{expr.inspect}} else statements << %{#{indent}g:seq ( #{expr.inspect} )} end end statements.last << " ." unless is_obj @ebnf.debug("statements", depth: depth) {statements.join("\n")} if @ebnf statements end ## # turn an XML BNF character class into an N3 literal for that # character class (less the outer quote marks) # # >>> cclass("^<>'{}|^`") # "[^<>'{}|^`]" # >>> cclass("#x0300-#x036F") # "[\\u0300-\\u036F]" # >>> cclass("#xC0-#xD6") # "[\\u00C0-\\u00D6]" # >>> cclass("#x370-#x37D") # "[\\u0370-\\u037D]" # # as in: ECHAR ::= '\' [tbnrf\"'] # >>> cclass("tbnrf\\\"'") # 'tbnrf\\\\\\"\'' # # >>> cclass("^#x22#x5C#x0A#x0D") # '^\\u0022\\\\\\u005C\\u000A\\u000D' def cclass(txt) '[' + txt.gsub(/\#x[0-9a-fA-F]+/) do |hx| hx = hx[2..-1] if hx.length <= 4 "\\u#{'0' * (4 - hx.length)}#{hx}" elsif hx.length <= 8 "\\U#{'0' * (8 - hx.length)}#{hx}" end end + ']' end # Make a new symbol/number combination # @param [String] variation added to symbol to aid reconstitution from BNF to EBNF def make_sym_id(variation = nil) @id_seq ||= 0 @id_seq += 1 ["_#{@sym}_#{@id_seq}#{variation}".to_sym, ("#{@id}.#{@id_seq}#{variation}" if @id)] end # Escape "[", "]", and "\" in ranges so they don't result in a warning or error # about empty character classes. def escape_regexp_character_range(character_range) character_range.gsub(/([\[\]\\])/) {|char| "\\#{char}"} end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/ll1.rb
lib/ebnf/ll1.rb
module EBNF ## # This module extends {EBNF::Base} to create metadata including _branch_, [First/Follow][], and other tables which is used by {EBNF::LL1::Parser} to recognize examples of the associated grammar. # ### Branch Table # # The Branch table is a hash mapping production rules to a hash relating terminals appearing in input to sequence of productions to follow when the corresponding input terminal is found. This allows either the `seq` primitive, where all terminals map to the same sequence of productions, or the `alt` primitive, where each terminal may map to a different production. # # BRANCH = { # :alt => { # "(" => [:seq, :_alt_1], # :HEX => [:seq, :_alt_1], # :O_RANGE => [:seq, :_alt_1], # :RANGE => [:seq, :_alt_1], # :STRING1 => [:seq, :_alt_1], # :STRING2 => [:seq, :_alt_1], # :SYMBOL => [:seq, :_alt_1], # }, # ... # :declaration => { # "@pass" => [:pass], # "@terminals" => ["@terminals"], # }, # ... # } # # In this case the `alt` rule is `seq ('|' seq)*` can happen when any of the specified tokens appears on the input stream. The all cause the same token to be passed to the `seq` rule and follow with `_alt_1`, which handles the `('|' seq)*` portion of the rule, after the first sequence is matched. # # The `declaration` rule is `@terminals' | pass` using the `alt` primitive determining the production to run based on the terminal appearing on the input stream. Eventually, a terminal production is found and the token is consumed. # ### First/Follow Table # # The [First/Follow][] table is a hash mapping production rules to the terminals that may proceed or follow the rule. For example: # # FIRST = { # :alt => [ # :HEX, # :SYMBOL, # :RANGE, # :O_RANGE, # :STRING1, # :STRING2, # "("], # ... # } # ### Terminals Table # # This table is a simple list of the terminal productions found in the grammar. For example: # # TERMINALS = ["(", ")", "-", # "@pass", "@terminals", # :HEX, :LHS, :O_RANGE,:POSTFIX, # :RANGE, :STRING1, :STRING2, :SYMBOL,"|" # ].freeze # ### Cleanup Table # # This table identifies productions which used EBNF rules, which are transformed to BNF for actual parsing. This allows the parser, in some cases, to reproduce *star*, *plus*, and *opt* rule matches. For example: # # CLEANUP = { # :_alt_1 => :star, # :_alt_3 => :merge, # :_diff_1 => :opt, # :ebnf => :star, # :_ebnf_2 => :merge, # :_postfix_1 => :opt, # :seq => :plus, # :_seq_1 => :star, # :_seq_2 => :merge, # }.freeze # # In this case the `ebnf` rule was `(declaration | rule)*`. As BNF does not support a star operator, this is decomposed into a set of rules using `alt` and `seq` primitives: # # ebnf ::= _empty _ebnf_2 # _ebnf_1 ::= declaration | rule # _ebnf_2 ::= _ebnf_1 ebnf # _ebnf_3 ::= ebnf # # The `_empty` production matches an empty string, so allows for now value. `_ebnf_2` matches `declaration | rule` (using the `alt` primitive) followed by `ebnf`, creating a sequence of zero or more `declaration` or `alt` members. # # [First/Follow]: https://en.wikipedia.org/wiki/LL_parser#Constructing_an_LL.281.29_parsing_table module LL1 autoload :Lexer, "ebnf/ll1/lexer" autoload :Parser, "ebnf/ll1/parser" autoload :Scanner, "ebnf/ll1/scanner" # Branch table, represented as a recursive hash. # The table is indexed by rule symbol, which in-turn references a hash of terminals (which are the first terminals of the production), which in turn reference the sequence of rules that follow, given that terminal as input # # @return [Hash{Symbol => Hash{String, Symbol => Array<Symbol>}}] attr_reader :branch # First table # # @return [Hash{Symbol => Array<String, Symbol>}] attr_reader :first # Follow table # # @return [Hash{Symbol => Array<String, Symbol>}] attr_reader :follow # EBNF Cleanup table # # The list of terminals used in the grammar. # # @return [Hash{Symbol => Symbol}] attr_reader :cleanup # Terminal table # # The list of terminals used in the grammar. # # @return [Array<String, Symbol>] attr_reader :terminals # Pass expression # # A Terminal symbol used for skipping whitespace and comments # # @return [Symbol, String] attr_reader :pass # Start symbol # # The rule which starts the grammar # # @return [Symbol] attr_reader :start ## # Create first/follow for each rule using techniques defined for LL(1) parsers. # # This takes rules which have transformed into BNF and adds first/follow and otehr information to the rules to allow the generation of metadata tables used for driving a parser. # # Given an initial rule in EBNF: # # (rule enbf "1" (star declaration rule)) # # The BNF transformation becomes: # # (rule ebnf "1" (alt _empty _ebnf_2)) # (rule _ebnf_1 "1.1" (alt declaration rule)) # (rule _ebnf_2 "1.2" (seq _ebnf_1 ebnf)) # (rule _ebnf_3 "1.3" (seq ebnf)) # # After running this method, the rules are annotated with first/follow and cleanup rules: # # (rule ebnf "1" # (start #t) # (first "@pass" "@terminals" LHS _eps) # (follow _eof) # (cleanup star) # (alt _empty _ebnf_2)) # (rule _ebnf_1 "1.1" # (first "@pass" "@terminals" LHS) # (follow "@pass" "@terminals" LHS _eof) # (alt declaration rule)) # (rule _ebnf_2 "1.2" # (first "@pass" "@terminals" LHS) # (follow _eof) # (cleanup merge) # (seq _ebnf_1 ebnf)) # (rule _ebnf_3 "1.3" (first "@pass" "@terminals" LHS _eps) (follow _eof) (seq ebnf)) # # @return [EBNF] self # @see https://en.wikipedia.org/wiki/LL_parser#Constructing_an_LL.281.29_parsing_table # @param [Array<Symbol>] starts # Set of symbols which are start rules def first_follow(*starts) # Add _eof to follow all start rules @starts = starts if @start = starts.first starts.each do |start| start_rule = find_rule(start) raise "No rule found for start symbol #{start}" unless start_rule start_rule.add_follow([:_eof]) start_rule.start = true end end # Comprehnsion rule, create shorter versions of all non-terminal sequences. This is necessary as the FF rules reference w', which is a comprehension. comprehensions = [] ittr = 0 depth do begin comprehensions = [] ast.select {|r| r.rule? && r.seq? && r.comp.nil? && r.expr.length > 2}.each do |rule| new_expr = rule.expr[2..-1].unshift(:seq) if new_rule = ast.detect {|r| r.expr == new_expr} # Link to existing comprehension used for another rules debug("FF.c") {"(#{ittr}) link comprehension rule for #{rule.sym} => #{new_rule.sym}[#{new_expr.inspect}]"} else new_rule = rule.build(new_expr) debug("FF.c") {"(#{ittr}) add comprehension rule for #{rule.sym} => #{new_rule.sym}[#{new_expr.inspect}]"} comprehensions << new_rule end rule.comp = new_rule end @ast += comprehensions progress("FF.c") {"(#{ittr}) comprehensions #{comprehensions.length}"} ittr += 1 end while !comprehensions.empty? ittr = 0 begin firsts, follows = 0, 0 # add Fi(wi) to Fi(Ai) for every rule Ai → wi # # * For sequences, this is the first rule in the sequence. # * For alts, this is every rule in the sequence # * Other rules don't matter, as they don't appear in strict BNF each(:rule) do |ai| # Fi(a w' ) = { a } for every terminal a ai.terminals(ast).each do |t| debug("Fi.2.1") {"(#{ittr}) add terminal #{t} to #{ai.sym}"} firsts += ai.add_first([t]) end ai.non_terminals(ast).select(&:first).each do |a| if !a.first_includes_eps? # Fi(A w' ) = Fi(A) for every nonterminal A with ε not in Fi(A) debug("Fi.2.2") {"(#{ittr}) add first from #{a.sym} to #{ai.sym}: #{a.first.inspect}"} firsts += ai.add_first(a.first) else # Fi(A w' ) = Fi(A) \ { ε } ∪ Fi(w' ) for every nonterminal A with ε in Fi(A) if ai.seq? # w' is either comprehnsion of ai, or empty, if there is no comprehension comp = ai.comp || find_rule(:_empty) fi = a.first - [:_eps] + (comp.first || []) debug("Fi.2.3a") {"(#{ittr}) add first #{fi.inspect} from #{a.sym} and #{comp.sym} to #{ai.sym}"} firsts += ai.add_first(fi) else # ai is an alt, so there are no comprehensions of non-terminals, add Fi(A) including ε debug("Fi.2.3b") {"(#{ittr}) add first #{a.first} from #{a.sym} to #{ai.sym}"} firsts += ai.add_first(a.first) end end end end # # Fi(ε) = { ε } # Add _eps as a first of _empty find_rule(:_empty).add_first([:_eps]) # Add follows # if there is a rule of the form Aj → wAiw' , then # First do this for the case when Ai is the first rule each(:rule) do |aj| comp = aj.comp || find_rule(:_empty) aj.non_terminals(ast).reject {|r| r.sym == :_empty}.each do |ai| # if the terminal a is in Fi(w' ), then add a to Fo(Ai) # Basically, this says that the firsts of a comprehension of a rule are the follows of the first non-terminal in the rule. if comp.first debug("Fo.2.1") {"(#{ittr}) add follow #{comp.first.inspect} from #{comp.sym} to #{ai.sym}"} follows += ai.add_follow(comp.first) end # If there is no comprehension of this rule (meaning, it is a sequence of one non-terminal), then the follows of the non-terminal include the follows of the rule. This handles rules with multiple sequences because it will have a comprehension that includes the last element in the sequence if !aj.comp && aj.follow debug("Fo.2.1a") {"(#{ittr}) add follow #{aj.follow.inspect} from #{aj.sym} to #{ai.sym}"} follows += ai.add_follow(aj.follow) end # if ε is in Fi(w' ), then add Fo(Aj) to Fo(Ai) if comp.first_includes_eps? && aj.follow debug("Fo.2.2") {"(#{ittr}) add follow #{aj.follow.inspect} from #{aj.sym} to #{ai.sym}"} follows += ai.add_follow(aj.follow) end end # Since the rules are of the form wAiw', and we've handled the case which is just Aiw', this leaves those cases that have rules prior to Ai. This basically says that the follows of a rule are added to the follows of the comprehension of the rule if aj.comp && aj.follow debug("Fo.2.3") {"(#{ittr}) add follow #{aj.follow.inspect} from #{aj.sym} to #{aj.comp.sym}"} follows += aj.comp.add_follow(aj.follow) end end progress("first_follow") {"(#{ittr}) firsts #{firsts}, follows #{follows}"} ittr += 1 end while (firsts + follows) > 0 debug("Fi.2-post: non-terminals without first") do ast.reject(&:terminal?).reject(&:first).map(&:sym) end if ast.reject(&:terminal?).any? {|r| r.first.nil?} end end ## # Generate parser tables, {#branch}, {#first}, {#follow}, and {#terminals} def build_tables progress("build_tables") { "Terminals: #{ast.count {|r| r.terminal?}} " + "Non-Terminals: #{ast.count {|r| r.rule?}}" } @first = ast. select(&:first). inject({}) {|memo, r| memo.merge(r.sym => r.first) } @follow = ast. select(&:follow). inject({}) {|memo, r| memo.merge(r.sym => r.follow) } @cleanup = ast. select(&:cleanup). inject({}) {|memo, r| memo.merge(r.sym => r.cleanup)} @terminals = ast.map {|r| Array(r.first) + Array(r.follow)}.flatten.uniq @terminals = (@terminals - [:_eps, :_eof]).sort_by{|t| t.to_s.sub(/^_/, '')} # FIXME: assumes that this is a (seq :PASS), or similar if pass = ast.detect {|r| r.pass?} @pass = pass.expr.last end # If a generated terminal is found, this indicates an error, as this version does not automatically generate regular expressions for automatic terminals @terminals. select {|t| t.to_s.start_with?("_")}. reject {|t| t.to_s.start_with?("_pass_")}. # Concession to backwards compatibility each do |term| error("build_tables", "terminal #{term} is automatically generated; " + "regular expressions are not yet generated and parsing " + "is not supported") end @branch = {} @already = [] @agenda = [] Array(@starts).each do |start| do_production(start) while !@agenda.empty? x = @agenda.shift do_production(x) end end if !@errors.empty? progress("###### FAILED with #{errors.length} errors.") @errors.each {|s| progress(" #{s}")} raise "Table creation failed with errors" else progress("Ok for predictive parsing") end end # Generate an output table in Ruby format # @param [IO, StringIO] io # @param [String] name of the table constant # @param [String] table # to output, one of {#branch}, {#first}, {#follow}, {#cleanup} or {#terminals} # @param [Integer] indent = 0 def outputTable(io, name, table, indent = 0) ind0 = ' ' * indent ind1 = ind0 + ' ' ind2 = ind1 + ' ' if table.is_a?(Hash) io.puts "#{ind0}#{name} = {" table.keys.sort_by{|t| t.to_s.sub(/^_/, '')}.each do |prod| case table[prod] when Symbol, String io.puts "#{ind1}#{prod.inspect} => #{table[prod].inspect}," when Array list = table[prod].map(&:inspect).join(",\n#{ind2}") io.puts "#{ind1}#{prod.inspect} => [\n#{ind2}#{list}]," when Hash io.puts "#{ind1}#{prod.inspect} => {" table[prod].keys.sort_by{|t| t.to_s.sub(/^_/, '')}.each do |term| list = table[prod][term].map(&:inspect).join(", ") io.puts "#{ind2}#{term.inspect} => [#{list}]," end io.puts "#{ind1}}," else "Unknown table entry type: #{table[prod].class}" end end io.puts "#{ind0}}.freeze\n" elsif table io.puts "#{ind0}#{name} = [\n#{ind1}" + table.sort_by{|t| t.to_s.sub(/^_/, '')}.map(&:inspect).join(",\n#{ind1}") + "\n#{ind0}].freeze\n" end end ## # Output Ruby parser files for LL(1) parsing # # @param [IO, StringIO] output def to_ruby_ll1(output, **options) self.outputTable(output, 'BRANCH', self.branch, 1) self.outputTable(output, 'TERMINALS', self.terminals, 1) self.outputTable(output, 'FIRST', self.first, 1) self.outputTable(output, 'FOLLOW', self.follow, 1) self.outputTable(output, 'CLEANUP', self.cleanup, 1) self.outputTable(output, 'PASS', [self.pass], 1) if self.pass end private def do_production(lhs) rule = find_rule(lhs) if rule.nil? || !rule.rule? || rule.sym == :_empty progress("prod") {"Skip: #{lhs.inspect}"} return end @already << lhs branchDict = {} progress("prod") {"Production #{lhs.inspect}"} if rule.expr.first == :matches debug("prod") {"Rule is regexp: #{rule}"} return end error("No record of what token #{lhs.inspect} can start with") unless rule.first if rule.alt? # A First/Follow conflict appears when _eps is in the first # of one rule and there is a token in the first and # follow of the same rule if Array(rule.first).include?(:_eps) && !(overlap = ((Array(rule.first) & (rule.follow || [])) - [:eps])).empty? error("First/Follow Conflict: #{overlap.first.inspect} is both first and follow of #{rule.sym}") end # Add entries for each alternative, based on the alternative's first/seq rule.expr[1..-1].each do |prod| prod_rule = find_rule(prod) debug(" Alt", prod) @agenda << prod unless @already.include?(prod) || @agenda.include?(prod) if prod == :_empty debug(" empty") # Skip empty, rules added bellow for follows elsif prod_rule.nil? || prod_rule.first.nil? debug(" no first =>", prod) branchDict[prod] = [prod] else prod_rule.first.reject{|f| f == :_eps}.each do |f| # A First/First conflict appears when there are two rules having # the same first, so the parser can't know which one to choose. if branchDict.has_key?(f) error("First/First Conflict: #{f.inspect} is the condition for both #{prod_rule.sym} and #{branchDict[f].first}") end debug(" alt") {"[#{f}] => #{prod}"} branchDict[f] = [prod] end end end else error("prod") {"Expected lhs to be alt or seq, was: #{rule}"} unless rule.seq? debug(" Seq", rule) # Entries for each first element referencing the sequence (rule.first || []).each do |f| if [:_eps, :_eof].include?(f) # Skip eps/eof, rules added below for follows else debug(" seq") {"[#{f}] => #{rule.expr[1..-1].inspect}"} branchDict[f] = rule.expr[1..-1] end end # Add each production to the agenda rule.expr[1..-1].each do |prod| @agenda << prod unless @already.include?(prod) || @agenda.include?(prod) end end # Add follow rules, if first includes eps if rule.first_includes_eps? (rule.follow || []).reject {|f| f == :_eof}.each do |f| debug(" Follow") {f.inspect} branchDict[f] ||= [] end end @branch[lhs] = branchDict end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/unescape.rb
lib/ebnf/unescape.rb
# encoding: utf-8 # Unsecape strings module EBNF::Unescape ESCAPE_CHARS = { '\\t' => "\t", # \u0009 (tab) '\\n' => "\n", # \u000A (line feed) '\\r' => "\r", # \u000D (carriage return) '\\b' => "\b", # \u0008 (backspace) '\\f' => "\f", # \u000C (form feed) '\\"' => '"', # \u0022 (quotation mark, double quote mark) "\\'" => '\'', # \u0027 (apostrophe-quote, single quote mark) '\\\\' => '\\' # \u005C (backslash) }.freeze ESCAPE_CHAR4 = /\\u(?:[0-9A-Fa-f]{4,4})/u.freeze # \uXXXX ESCAPE_CHAR8 = /\\U(?:[0-9A-Fa-f]{8,8})/u.freeze # \UXXXXXXXX ECHAR = /\\./u.freeze # More liberal unescaping UCHAR = /#{ESCAPE_CHAR4}|#{ESCAPE_CHAR8}/n.freeze ## # Returns a copy of the given `input` string with all `\uXXXX` and # `\UXXXXXXXX` Unicode codepoint escape sequences replaced with their # unescaped UTF-8 character counterparts. # # @param [String] string # @return [String] # @see https://www.w3.org/TR/rdf-sparql-query/#codepointEscape def unescape_codepoints(string) string = string.dup string.force_encoding(Encoding::ASCII_8BIT) if string.respond_to?(:force_encoding) # Decode \uXXXX and \UXXXXXXXX code points: string = string.gsub(UCHAR) do |c| s = [(c[2..-1]).hex].pack('U*') s.respond_to?(:force_encoding) ? s.force_encoding(Encoding::ASCII_8BIT) : s end string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding) string end module_function :unescape_codepoints ## # Returns a copy of the given `input` string with all string escape # sequences (e.g. `\n` and `\t`) replaced with their unescaped UTF-8 # character counterparts. # # @param [String] input # @return [String] # @see https://www.w3.org/TR/rdf-sparql-query/#grammarEscapes def unescape_string(input) input.gsub(ECHAR) {|escaped| ESCAPE_CHARS[escaped] || escaped} end module_function :unescape_string # Perform string and codepoint unescaping if defined for this terminal # @param [String] string # @return [String] def unescape(string) unescape_string(unescape_codepoints(string)) end module_function :unescape end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/bnf.rb
lib/ebnf/bnf.rb
module EBNF module BNF ## # Transform EBNF Rule set to BNF: # # * Add rule [0] (_empty rule (seq)) # * Transform each rule into a set of rules that are just BNF, using {Rule#to_bnf}. # @return [ENBF] self def make_bnf progress("make_bnf") {"Start: #{@ast.length} rules"} new_ast = [Rule.new(:_empty, "0", [:seq], kind: :rule)] ast.each do |rule| debug("make_bnf") {"expand from: #{rule.inspect}"} new_rules = rule.to_bnf debug(" => ") {new_rules.map(&:sym).join(', ')} new_ast += new_rules end @ast = new_ast progress("make_bnf") {"End: #{@ast.length} rules"} self end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/terminals.rb
lib/ebnf/terminals.rb
# encoding: utf-8 # Terminal definitions for the EBNF grammar module EBNF::Terminals SYMBOL_BASE = %r(\b[a-zA-Z0-9_\.]+\b)u.freeze # Word boundaries SYMBOL = %r((?:#{SYMBOL_BASE}|(?:<#{SYMBOL_BASE}>))(?!\s*::=))u.freeze HEX = %r(\#x\h+)u.freeze CHAR = %r([\u0009\u000A\u000D\u0020-\uD7FF\u{10000}-\u{10FFFF}])u.freeze R_CHAR = %r([\u0009\u000A\u000D\u0020-\u002C\u002E-\u005C\u005E-\uD7FF\u{10000}-\u{10FFFF}])u.freeze LHS = %r((?:\[#{SYMBOL_BASE}\])?\s*<?#{SYMBOL_BASE}>?\s*::=)u.freeze RANGE = %r(\[(?:(?:#{R_CHAR}\-#{R_CHAR})|(?:#{HEX}\-#{HEX})|#{R_CHAR}|#{HEX})+-?\])u.freeze RANGE_NOT_LHS = %r(\[(?:(?:#{R_CHAR}\-#{R_CHAR})|(?:#{HEX}\-#{HEX})|#{R_CHAR}|#{HEX})+-?\](?!\s*<?#{SYMBOL_BASE}>?\s*::=))u.freeze O_RANGE = %r(\[\^(?:(?:#{R_CHAR}\-#{R_CHAR})|(?:#{HEX}\-#{HEX}|#{R_CHAR}|#{HEX}))+-?\])u.freeze STRING1 = %r("[\u0009\u000A\u000D\u0020\u0021\u0023-\uD7FF\u{10000}-\u{10FFFF}]*")u.freeze STRING2 = %r('[\u0009\u000A\u000D\u0020-\u0026\u0028-\uD7FF\u{10000}-\u{10FFFF}]*')u.freeze POSTFIX = %r([?*+])u.freeze PASS = %r(( \s | (?:(?:\#[^x]|//)[^\n\r]*) | (?:/\*(?:(?:\*[^/])|[^*])*\*/) | (?:\(\*(?:(?:\*[^\)])|[^*])*\*\)) )+)xmu.freeze end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/isoebnf/meta.rb
lib/ebnf/isoebnf/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from etc/iso-ebnf.ebnf module ISOEBNFMeta RULES = [ EBNF::Rule.new(:syntax, nil, [:star, :syntax_rule]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:syntax_rule, nil, [:seq, :meta_identifier, :defining_symbol, :definitions_list, :terminator_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:definitions_list, nil, [:seq, :single_definition, :_definitions_list_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_definitions_list_1, nil, [:star, :_definitions_list_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_definitions_list_2, nil, [:seq, :definition_separator_symbol, :definitions_list]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:single_definition, nil, [:seq, :term, :_single_definition_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_single_definition_1, nil, [:star, :_single_definition_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_single_definition_2, nil, [:seq, ",", :term]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:term, nil, [:seq, :factor, :_term_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_term_1, nil, [:opt, :_term_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_term_2, nil, [:seq, "-", :exception]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:exception, nil, [:seq, :factor]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:factor, nil, [:seq, :_factor_1, :primary]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_factor_1, nil, [:opt, :_factor_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_factor_2, nil, [:seq, :integer, "*"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:primary, nil, [:alt, :optional_sequence, :repeated_sequence, :special_sequence, :grouped_sequence, :meta_identifier, :terminal_string, :empty]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:optional_sequence, nil, [:seq, :start_option_symbol, :definitions_list, :end_option_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repeated_sequence, nil, [:seq, :start_repeat_symbol, :definitions_list, :end_repeat_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:grouped_sequence, nil, [:seq, "(", :definitions_list, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminal_string, nil, [:alt, :_terminal_string_1, :_terminal_string_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_1, nil, [:seq, "'", :_terminal_string_3, "'"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_3, nil, [:plus, :first_terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_2, nil, [:seq, "\"", :_terminal_string_4, "\""]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminal_string_4, nil, [:plus, :second_terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:meta_identifier, nil, [:seq, :letter, :_meta_identifier_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_meta_identifier_1, nil, [:star, :meta_identifier_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:integer, nil, [:plus, :decimal_digit], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence, nil, [:seq, "?", :_special_sequence_1, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_special_sequence_1, nil, [:star, :special_sequence_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment, nil, [:seq, :start_comment_symbol, :_comment_1, :end_comment_symbol], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_1, nil, [:star, :comment_symbol]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment_symbol, nil, [:alt, :comment, :commentless_symbol, :other_character], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:commentless_symbol, nil, [:alt, :terminal_character, :meta_identifier, :integer, :terminal_string, :special_sequence], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:letter, nil, [:range, "a-zA-Z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:decimal_digit, nil, [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:meta_identifier_character, nil, [:alt, :letter, :decimal_digit, "_"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:first_terminal_character, nil, [:diff, :terminal_character, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:second_terminal_character, nil, [:diff, :terminal_character, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence_character, nil, [:diff, :terminal_character, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminal_character, nil, [:alt, :letter, :decimal_digit, :concatenate_symbol, :defining_symbol, :definition_separator_symbol, :end_comment_symbol, :end_group_symbol, :end_option_symbol, :end_repeat_symbol, :except_symbol, :first_quote_symbol, :repetition_symbol, :second_quote_symbol, :special_sequence_symbol, :start_comment_symbol, :start_group_symbol, :start_option_symbol, :start_repeat_symbol, :terminator_symbol, :other_character], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:other_character, nil, [:alt, :_other_character_1, "\\"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_other_character_1, nil, [:range, ":+_%@&$<>^` ̃#x20#x23"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:gap_separator, nil, [:range, "#x9#xa#xb#xc#xd#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_pass, nil, [:alt, :__pass_1, :comment], kind: :pass).extend(EBNF::PEG::Rule), EBNF::Rule.new(:__pass_1, nil, [:plus, :gap_separator]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:empty, nil, [:seq, ""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:concatenate_symbol, nil, [:seq, ","], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repetition_symbol, nil, [:seq, "*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:except_symbol, nil, [:seq, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:first_quote_symbol, nil, [:seq, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:second_quote_symbol, nil, [:seq, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_comment_symbol, nil, [:seq, "(*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_comment_symbol, nil, [:seq, "*)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_group_symbol, nil, [:seq, "("], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_group_symbol, nil, [:seq, ")"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:special_sequence_symbol, nil, [:seq, "?"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:defining_symbol, nil, [:alt, "=", ":"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:definition_separator_symbol, nil, [:alt, "|", "/", "!"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:terminator_symbol, nil, [:alt, ";", "."], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_option_symbol, nil, [:seq, "["], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_option_symbol, nil, [:seq, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:start_repeat_symbol, nil, [:alt, "{", "(:"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:end_repeat_symbol, nil, [:alt, "}", ":)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:gap_free_symbol, nil, [:alt, :_gap_free_symbol_1, :terminal_string], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_1, nil, [:seq, :_gap_free_symbol_3, :terminal_character]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_3, nil, [:not, :_gap_free_symbol_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_gap_free_symbol_2, nil, [:range, "'\""], kind: :terminal).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/abnf/core.rb
lib/ebnf/abnf/core.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from etc/abnf-core.ebnf module ABNFCore RULES = [ EBNF::Rule.new(:ALPHA, nil, [:range, "#x41-#x5A#x61-#x7A"], kind: :terminal), EBNF::Rule.new(:BIT, nil, [:alt, "0", "1"], kind: :terminal), EBNF::Rule.new(:CHAR, nil, [:range, "#x01-#x7F"], kind: :terminal), EBNF::Rule.new(:CR, nil, [:hex, "#x0D"], kind: :terminal), EBNF::Rule.new(:CRLF, nil, [:seq, [:opt, :CR], :LF], kind: :terminal), EBNF::Rule.new(:CTL, nil, [:alt, [:range, "#x00-#x1F"], [:hex, "#x7F"]], kind: :terminal), EBNF::Rule.new(:DIGIT, nil, [:range, "#x30-#x39"], kind: :terminal), EBNF::Rule.new(:DQUOTE, nil, [:hex, "#x22"], kind: :terminal), EBNF::Rule.new(:HEXDIG, nil, [:alt, :DIGIT, [:range, "A-F"]], kind: :terminal), EBNF::Rule.new(:HTAB, nil, [:hex, "#x09"], kind: :terminal), EBNF::Rule.new(:LF, nil, [:hex, "#x0A"], kind: :terminal), EBNF::Rule.new(:LWSP, nil, [:star, [:alt, :WSP, [:seq, :CRLF, :WSP]]], kind: :terminal), EBNF::Rule.new(:OCTET, nil, [:range, "#x00-#xFF"], kind: :terminal), EBNF::Rule.new(:SP, nil, [:hex, "#x20"], kind: :terminal), EBNF::Rule.new(:VCHAR, nil, [:range, "#x21-#x7E"], kind: :terminal), EBNF::Rule.new(:WSP, nil, [:alt, :SP, :HTAB], kind: :terminal), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/abnf/meta.rb
lib/ebnf/abnf/meta.rb
# This file is automatically generated by ebnf version 2.0.0 # Derived from abnf.ebnf module ABNFMeta RULES = [ EBNF::Rule.new(:rulelist, nil, [:plus, :_rulelist_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_1, nil, [:alt, :rule, :_rulelist_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_2, nil, [:seq, :_rulelist_3, :c_nl]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulelist_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rule, nil, [:seq, :rulename, :defined_as, :elements, :c_nl]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:elements, nil, [:seq, :alternation, :_elements_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_elements_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:alternation, nil, [:seq, :concatenation, :_alternation_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_1, nil, [:star, :_alternation_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_2, nil, [:seq, :_alternation_3, "/", :_alternation_4, :concatenation]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alternation_4, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:concatenation, nil, [:seq, :repetition, :_concatenation_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_1, nil, [:star, :_concatenation_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_2, nil, [:seq, :_concatenation_3, :repetition]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_concatenation_3, nil, [:plus, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repetition, nil, [:seq, :_repetition_1, :element]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repetition_1, nil, [:opt, :repeat]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:repeat, nil, [:alt, :_repeat_1, :_repeat_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_1, nil, [:seq, :_repeat_3, "*", :_repeat_4]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_3, nil, [:star, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_4, nil, [:star, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_repeat_2, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:element, nil, [:alt, :rulename, :group, :option, :char_val, :num_val, :prose_val]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:group, nil, [:seq, "(", :_group_1, :alternation, :_group_2, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_group_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_group_2, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:option, nil, [:seq, "[", :_option_1, :alternation, :_option_2, "]"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_option_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_option_2, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:char_val, nil, [:alt, :case_insensitive_string, :case_sensitive_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:case_insensitive_string, nil, [:seq, :_case_insensitive_string_1, :quoted_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_case_insensitive_string_1, nil, [:opt, "%i"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:case_sensitive_string, nil, [:seq, "%s", :quoted_string]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:num_val, nil, [:seq, "%", :_num_val_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_num_val_1, nil, [:alt, :bin_val, :dec_val, :hex_val]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rulename, nil, [:seq, :ALPHA, :_rulename_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulename_1, nil, [:star, :_rulename_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_rulename_2, nil, [:alt, :ALPHA, :DIGIT, "-"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:defined_as, nil, [:seq, :_defined_as_1, :_defined_as_2, :_defined_as_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_1, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_2, nil, [:alt, "=", "=/"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_defined_as_3, nil, [:star, :c_wsp]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:c_wsp, nil, [:alt, :WSP, :_c_wsp_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_c_wsp_1, nil, [:seq, :c_nl, :WSP]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:c_nl, nil, [:alt, :COMMENT, :CRLF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:comment, nil, [:seq, ";", :_comment_1, :CRLF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_1, nil, [:star, :_comment_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_comment_2, nil, [:alt, :WSP, :VCHAR]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:quoted_string, nil, [:seq, :DQUOTE, :_quoted_string_1, :DQUOTE], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_quoted_string_1, nil, [:star, :_quoted_string_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_quoted_string_2, nil, [:range, "#x20-#x21#x23-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:bin_val, nil, [:seq, "b", :_bin_val_1, :_bin_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_1, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_2, nil, [:opt, :_bin_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_3, nil, [:alt, :_bin_val_4, :_bin_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_4, nil, [:plus, :_bin_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_6, nil, [:seq, ".", :_bin_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_7, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_5, nil, [:seq, "-", :_bin_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_bin_val_8, nil, [:plus, :BIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:dec_val, nil, [:seq, "d", :_dec_val_1, :_dec_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_1, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_2, nil, [:opt, :_dec_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_3, nil, [:alt, :_dec_val_4, :_dec_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_4, nil, [:plus, :_dec_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_6, nil, [:seq, ".", :_dec_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_7, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_5, nil, [:seq, "-", :_dec_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_dec_val_8, nil, [:plus, :DIGIT]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:hex_val, nil, [:seq, "x", :_hex_val_1, :_hex_val_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_1, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_2, nil, [:opt, :_hex_val_3]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_3, nil, [:alt, :_hex_val_4, :_hex_val_5]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_4, nil, [:plus, :_hex_val_6]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_6, nil, [:seq, ".", :_hex_val_7]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_7, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_5, nil, [:seq, "-", :_hex_val_8]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_hex_val_8, nil, [:plus, :HEXDIG]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:prose_val, nil, [:seq, "<", :_prose_val_1, ">"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_prose_val_1, nil, [:star, :_prose_val_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_prose_val_2, nil, [:range, "#x20-#x3D#x3F-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:ALPHA, nil, [:range, "#x41-#x5A#x61-#x7A"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:BIT, nil, [:alt, "0", "1"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CHAR, nil, [:range, "#x01-#x7F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CR, nil, [:hex, "#x0D"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CRLF, nil, [:seq, :_CRLF_1, :LF], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CRLF_1, nil, [:opt, :CR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CTL, nil, [:alt, :_CTL_1, :_CTL_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CTL_1, nil, [:range, "#x00-#x1F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CTL_2, nil, [:hex, "#x7F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:DIGIT, nil, [:range, "#x30-#x39"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:DQUOTE, nil, [:hex, "#x22"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HEXDIG, nil, [:alt, :DIGIT, :_HEXDIG_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEXDIG_1, nil, [:range, "A-F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HTAB, nil, [:hex, "#x09"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LF, nil, [:hex, "#x0A"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LWSP, nil, [:star, :_LWSP_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LWSP_1, nil, [:alt, :WSP, :_LWSP_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LWSP_2, nil, [:seq, :CRLF, :WSP], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:OCTET, nil, [:range, "#x00-#xFF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:SP, nil, [:hex, "#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:VCHAR, nil, [:range, "#x21-#x7E"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:WSP, nil, [:alt, :SP, :HTAB], kind: :terminal).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/peg/parser.rb
lib/ebnf/peg/parser.rb
module EBNF::PEG ## # A Generic PEG parser using the parsed rules modified for PEG parseing. module Parser ## # @return [Regexp, Rule] how to remove inter-rule whitespace attr_reader :whitespace ## # @return [Scanner] used for scanning input. attr_reader :scanner ## # A Hash structure used for memoizing rule results for a given input location. # # @example Partial structure for memoizing results for a particular rule # # { # rule: { # 86: { # pos: # result: [<EBNF::Rule:80 { # sym: :ebnf, # id: "1", # kind: :rule, # expr: [:star, [:alt, :declaration, :rule]]}>], # } # 131: [<EBNF::Rule:80 {sym: :ebnf, # id: "1", # kind: :rule, # expr: [:star, [:alt, :declaration, :rule]]}>, # <EBNF::Rule:100 { # sym: :declaration, # id: "2", # kind: :rule, # expr: [:alt, "@terminals", :pass]}>] # }, # POSTFIX: { # 80: "*", # 368: "*", # 399: "+" # } # } # @return [Hash{Integer => Hash{Symbol => Object}}] attr_reader :packrat def self.included(base) base.extend(ClassMethods) end # DSL for creating terminals and productions module ClassMethods def start_handlers; (@start_handlers ||= {}); end def start_options; (@start_hoptions ||= {}); end def production_handlers; (@production_handlers ||= {}); end def terminal_handlers; (@terminal_handlers ||= {}); end def terminal_regexps; (@terminal_regexps ||= {}); end def terminal_options; (@terminal_options ||= {}); end ## # Defines the pattern for a terminal node and a block to be invoked # when ther terminal is encountered. If the block is missing, the # value of the terminal will be placed on the input hash to be returned # to a previous production. Block is called in an evaluation block from # the enclosing parser. # # If no block is provided, then the value which would have been passed to the block is used as the result directly. # # @param [Symbol] term # The terminal name. # @param [Regexp, Proc] regexp # Pattern used to scan for this terminal. # Passing a Proc will evaluate that proc to retrieve a regular expression. # @param [Hash] options # @option options [Boolean] :unescape # Cause strings and codepoints to be unescaped. # @yield [value, prod] # @yieldparam [String] value # The scanned terminal value. # @yieldparam [Symbol] prod # A symbol indicating the production which referenced this terminal # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize def terminal(term, regexp, **options, &block) terminal_regexps[term] = regexp terminal_handlers[term] = block if block_given? terminal_options[term] = options.freeze end ## # Defines a production called at the beggining of a particular production # with data from previous production along with data defined for the # current production. Block is called in an evaluation block from # the enclosing parser. # # @param [Symbol] term # The rule name # @param [Hash{Symbol => Object}] options # Options which are returned from {Parser#onStart}. # @option options [Boolean] :as_hash (false) # If the production is a `seq`, causes the value to be represented as a single hash, rather than an array of individual hashes for each sub-production. Note that this is not always advisable due to the possibility of repeated productions within the sequence. # @option options[:upper, :lower] :insensitive_strings # Perform case-insensitive match of strings not defined as terminals, and map to either upper or lower case. # @yield [data, block] # @yieldparam [Hash] data # A Hash defined for the current production, during :start # may be initialized with data to pass to further productions, # during :finish, it contains data placed by earlier productions # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize # Yield to generate a triple def start_production(term, **options, &block) start_handlers[term] = block start_options[term] = options.freeze end ## # Defines a production called when production of associated # non-terminals has completed # with data from previous production along with data defined for the # current production. Block is called in an evaluation block from # the enclosing parser. # # @param [Symbol] term # Term which is a key in the branch table # @param [Boolean] clear_packrat (false) # Clears the packrat state on completion to reduce memory requirements of parser. Use only on a top-level rule when it is determined that no further backtracking is necessary. # @yield [result, data, block] # @yieldparam [Object] result # The result from sucessfully parsing the production. # @yieldparam [Hash] data # A Hash defined for the current production, during :start # may be initialized with data to pass to further productions, # during :finish, it contains data placed by earlier productions # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize # @yieldparam [Hash] **options # Other data that may be passed to the production # @yieldreturn [Object] the result of this production. # Yield to generate a triple def production(term, clear_packrat: false, &block) production_handlers[term] = [block, clear_packrat] end # Evaluate a handler, delegating to the specified object. # This is necessary so that handlers can operate within the # binding context of the parser in which they're invoked. # @param [Object] object # @return [Object] def eval_with_binding(object) @delegate = object object.instance_eval {yield} end private def method_missing(method, *args, &block) if @delegate ||= nil # special handling when last arg is **options params = @delegate.method(method).parameters if params.any? {|t, _| t == :keyrest} && args.last.is_a?(Hash) opts = args.pop @delegate.send(method, *args, **opts, &block) else @delegate.send(method, *args, &block) end else super end end end ## # Initializes a new parser instance. # # @param [String, #to_s] input # @param [Symbol, #to_s] start # The starting production for the parser. It may be a URI from the grammar, or a symbol representing the local_name portion of the grammar URI. # @param [Array<EBNF::PEG::Rule>] rules # The parsed rules, which control parsing sequence. # Identify the symbol of the starting rule with `start`. # @param [Hash{Symbol => Object}] options # @option options[Integer] :high_water passed to lexer # @option options[:upper, :lower] :insensitive_strings # Perform case-insensitive match of strings not defined as terminals, and map to either upper or lower case. # @option options [Logger] :logger for errors/progress/debug. # @option options[Integer] :low_water passed to lexer # @option options[Boolean] :seq_hash (false) # If `true`, sets the default for the value sent to a production handler that is for a `seq` to a hash composed of the flattened consitutent hashes that are otherwise provided. # @option options [Symbol, Regexp] :whitespace # Symbol of whitespace rule (defaults to `@pass`), or a regular expression # for eating whitespace between non-terminal rules (strongly encouraged). # @yield [context, *data] # Yields to return data to parser # @yieldparam [:statement, :trace] context # Context for block # @yieldparam [Symbol] *data # Data specific to the call # @return [Object] AST resulting from parse # @raise [Exception] Raises exceptions for parsing errors # or errors raised during processing callbacks. Internal # errors are raised using {Error}. # @todo FIXME implement seq_hash def parse(input = nil, start = nil, rules = nil, insensitive_strings: nil, **options, &block) start ||= options[:start] rules ||= options[:rules] || [] @rules = rules.inject({}) {|memo, rule| memo.merge(rule.sym => rule)} @packrat = {} # Add parser reference to each rule @rules.each_value {|rule| rule.parser = self} # Take whitespace from options, a named rule, a `pass` rule, a rule named :WS, or a default @whitespace = case options[:whitespace] when Regexp then options[:whitespace] when Symbol then @rules[options[:whitespace]] else options[:whitespace] end || @rules.values.detect(&:pass?) || /(?:\s|(?:#[^x][^\n\r]*))+/m.freeze @options = options.dup @productions = [] @parse_callback = block @error_log = [] @prod_data = [] @scanner = EBNF::LL1::Scanner.new(input) start = start.split('#').last.to_sym unless start.is_a?(Symbol) start_rule = @rules[start] raise Error, "Starting production #{start.inspect} not defined" unless start_rule result = start_rule.parse(scanner, insensitive_strings: insensitive_strings) if result == :unmatched # Start rule wasn't matched, which is about the only error condition error("--top--", @furthest_failure.to_s, pos: @furthest_failure.pos, lineno: @furthest_failure.lineno, rest: scanner.string[@furthest_failure.pos, 20]) end # Eat any remaining whitespace start_rule.eat_whitespace(scanner) if !scanner.eos? error("--top--", @furthest_failure.to_s, pos: @furthest_failure.pos, lineno: @furthest_failure.lineno, rest: scanner.string[@furthest_failure.pos, 20]) end # When all is said and done, raise the error log unless @error_log.empty? raise Error, @error_log.join("\n") end result end # Depth of parsing, for log output. def depth; (@productions || []).length; end # Current ProdData element def prod_data; @prod_data.last || {}; end # Clear out packrat memoizer. This is appropriate when completing a top-level rule when there is no possibility of backtracking. def clear_packrat; @packrat.clear; end ## # Error information, used as level `3` logger messages. # Messages may be logged and are saved for reporting at end of parsing. # # @param [String] node Relevant location associated with message # @param [String] message Error string # @param [Hash{Symbol => Object}] options # @option options [URI, #to_s] :production # @option options [Boolean] :raise abort furhter processing # @option options [Array] :backtrace state where error occured # @see #debug def error(node, message, **options) lineno = options[:lineno] || (scanner.lineno if scanner) m = "ERROR " m += "[line: #{lineno}] " if lineno m += message m += " (found #{options[:rest].inspect})" if options[:rest] m += ", production = #{options[:production].inspect}" if options[:production] @error_log << m unless @recovering @recovering = true debug(node, m, level: 3, **options) if options[:raise] || @options[:validate] raise Error.new(m, lineno: lineno, rest: options[:rest], production: options[:production], backtrace: options[:backtrace]) end end ## # Warning information, used as level `2` logger messages. # Messages may be logged and are saved for reporting at end of parsing. # # @param [String] node Relevant location associated with message # @param [String] message Error string # @param [Hash] options # @option options [URI, #to_s] :production # @option options [Token] :token # @see #debug def warn(node, message, **options) lineno = options[:lineno] || (scanner.lineno if scanner) m = "WARNING " m += "[line: #{lineno}] " if lineno m += message m += " (found #{options[:rest].inspect})" if options[:rest] m += ", production = #{options[:production].inspect}" if options[:production] debug(node, m, level: 2, **options) end ## # Progress logged when parsing. Passed as level `1` logger messages. # # The call is ignored, unless `@options[:logger]` is set. # # @overload progress(node, message, **options, &block) # @param [String] node Relevant location associated with message # @param [String] message ("") # @param [Hash] options # @option options [Integer] :depth # Recursion depth for indenting output # @see #debug def progress(node, *args, &block) return unless @options[:logger] args << {} unless args.last.is_a?(Hash) args.last[:level] ||= 1 debug(node, *args, &block) end ## # Debug logging. # # The call is ignored, unless `@options[:logger]` is set. # # @overload debug(node, message, **options) # @param [Array<String>] args Relevant location associated with message # @param [Hash] options # @option options [Integer] :depth # Recursion depth for indenting output # @yieldreturn [String] additional string appended to `message`. def debug(*args, &block) return unless @options[:logger] options = args.last.is_a?(Hash) ? args.pop : {} lineno = options[:lineno] || (scanner.lineno if scanner) level = options.fetch(:level, 0) depth = options[:depth] || self.depth if self.respond_to?(:log_debug) level = [:debug, :info, :warn, :error, :fatal][level] log_debug(*args, **options.merge(level: level, lineno: lineno, depth: depth), &block) elsif @options[:logger].respond_to?(:add) args << yield if block_given? @options[:logger].add(level, "[#{lineno}]" + (" " * depth) + args.join(" ")) elsif @options[:logger].respond_to?(:<<) args << yield if block_given? @options[:logger] << "[#{lineno}]" + (" " * depth) + args.join(" ") end end # Start for production # Adds data avoiable during the processing of the production # # @param [Symbol] prod # @param [Hash] **options other options available for handlers # @return [Hash] composed of production options. Currently only `as_hash` is supported. # @see ClassMethods#start_production def onStart(prod, **options) handler = self.class.start_handlers[prod] @productions << prod if handler # Create a new production data element, potentially allowing handler # to customize before pushing on the @prod_data stack data = {_production: prod}.merge(options) begin self.class.eval_with_binding(self) { handler.call(data, @parse_callback) } rescue ArgumentError, Error => e error("start", "#{e.class}: #{e.message}", production: prod, backtrace: e.backtrace) @recovering = false end @prod_data << data elsif self.class.production_handlers[prod] # Make sure we push as many was we pop, even if there is no # explicit start handler @prod_data << {_production: prod} end progress("#{prod}(:start)", "", lineno: (scanner.lineno if scanner), pos: (scanner.pos if scanner) ) do "#{data.inspect}@(#{scanner ? scanner.pos : '?'}), rest: #{scanner ? scanner.rest[0..20].inspect : '?'}" end return self.class.start_options.fetch(prod, {}) # any options on this production end # Finish of production # # @param [Object] result parse result # @param [Hash] **options other options available for handlers # @return [Object] parse result, or the value returned from the handler def onFinish(result, **options) #puts "prod_data(f): " + @prod_data.inspect prod = @productions.last handler, clear_packrat = self.class.production_handlers[prod] data = @prod_data.pop if handler || self.class.start_handlers[prod] error("finish", "prod_data production mismatch: expected #{prod.inspect}, got #{data[:_production].inspect}", production: prod, prod_data: @prod_data) if data && prod != data[:_production] if handler && !@recovering && result != :unmatched # Pop production data element from stack, potentially allowing handler to use it result = begin self.class.eval_with_binding(self) { handler.call(result, data, @parse_callback, **options) } rescue ArgumentError, Error => e error("finish", "#{e.class}: #{e.message}", production: prod, backtrace: e.backtrace) @recovering = false end end progress("#{prod}(:finish)", "", lineno: (scanner.lineno if scanner), level: result == :unmatched ? 0 : 1) do "#{result.inspect}@(#{scanner ? scanner.pos : '?'}), rest: #{scanner ? scanner.rest[0..20].inspect : '?'}" end self.clear_packrat if clear_packrat @productions.pop result end # A terminal with a defined handler # # @param [Symbol] prod from the symbol of the associated rule # @param [String] value the scanned string # @return [String, Object] either the result from the handler, or the token def onTerminal(prod, value) parentProd = @productions.last handler = self.class.terminal_handlers[prod] if handler && value != :unmatched value = begin self.class.eval_with_binding(self) { handler.call(value, parentProd, @parse_callback) } rescue ArgumentError, Error => e error("terminal", "#{e.class}: #{e.message}", value: value, production: prod, backtrace: e.backtrace) @recovering = false end end progress("#{prod}(:terminal)", "", depth: (depth + 1), lineno: (scanner.lineno if scanner), level: value == :unmatched ? 0 : 1) do "#{value.inspect}@(#{scanner ? scanner.pos : '?'})" end value end ## # Find a rule for a symbol # # @param [Symbol] sym # @return [Rule] def find_rule(sym) @rules[sym] end ## # Find a regular expression defined for a terminal # # @param [Symbol] sym # @return [Regexp] def terminal_regexp(sym) self.class.terminal_regexps[sym] end ## # Find a regular expression defined for a terminal # # @param [Symbol] sym # @return [Regexp] def terminal_options(sym) self.class.terminal_options[sym] end ## # Record furthest failure. # # @param [Integer] pos # The position in the input stream where the failure occured. # @param [Integer] lineno # Line where the failure occured. # @param [Symbol, String] token # The terminal token or string which attempted to match. # @see https://arxiv.org/pdf/1405.6646.pdf def update_furthest_failure(pos, lineno, token) # Skip generated productions return if token.is_a?(Symbol) && token.to_s.start_with?('_') if @furthest_failure.nil? || pos > @furthest_failure.pos @furthest_failure = Unmatched.new(pos, lineno, [token]) elsif pos == @furthest_failure.pos && !@furthest_failure[:expecting].include?(token) @furthest_failure[:expecting] << token end end public ## # @!parse # # Record details about an inmatched rule, including the following: # # # # * Input location and line number at time of failure. # # * The rule at which this was found (non-terminal, and nat starting with '_'). # class Unmatched # # @return [Integer] The position within the scanner which did not match. # attr_reader :pos # # @return [Integer] The line number which did not match. # attr_reader :lineno # # @return [Array<Symbol,String>] # # Strings or production rules that attempted to match at this position. # attr_reader :expecting # end class Unmatched < Struct.new(:pos, :lineno, :expecting) def to_s "syntax error, expecting #{expecting.map(&:inspect).join(', ')}" end end ## # Raised for errors during parsing. # # @example Raising a parser error # raise Error.new( # "invalid token '%' on line 10", # rest: '%', lineno: 9, production: :turtleDoc) # # @see https://ruby-doc.org/core/classes/StandardError.html class Error < StandardError ## # The current production. # # @return [Symbol] attr_reader :production ## # The read head when scanning failed # # @return [String] attr_reader :rest ## # The line number where the error occurred. # # @return [Integer] attr_reader :lineno ## # Initializes a new lexer error instance. # # @param [String, #to_s] message # @param [Hash{Symbol => Object}] options # @option options [Symbol] :production (nil) # @option options [String] :rest (nil) # @option options [Integer] :lineno (nil) def initialize(message, **options) @production = options[:production] @rest = options[:rest] @lineno = options[:lineno] super(message.to_s) end end # class Error end # module Parser end # module EBNF::PEG
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/peg/rule.rb
lib/ebnf/peg/rule.rb
module EBNF::PEG # Behaviior for parsing a PEG rule module Rule include ::EBNF::Unescape ## # Initialized by parser when loading rules. # Used for finding rules and invoking elements of the parse process. # # @return [EBNF::PEG::Parser] parser attr_accessor :parser ## # Parse a rule or terminal, invoking callbacks, as appropriate # If there are `start_production` and/or `production` handlers, # they are invoked with a `prod_data` stack, the input stream and offset. # Otherwise, the results are added as an array value # to a hash indexed by the rule name. # # If matched, the input position is updated and the results returned in a Hash. # # * `alt`: returns the value of the matched production or `:unmatched`. # * `diff`: returns the value matched, or `:unmatched`. # * `hex`: returns a string composed of the matched hex character, or `:unmatched`. # * `opt`: returns the value matched, or `nil` if unmatched. # * `plus`: returns an array of the values matched for the specified production, or `:unmatched`, if none are matched. For Terminals, these are concatenated into a single string. # * `range`: returns a string composed of the values matched, or `:unmatched`, if less than `min` are matched. # * `rept`: returns an array of the values matched for the speficied production, or `:unmatched`, if none are matched. For Terminals, these are concatenated into a single string. # * `seq`: returns an array composed of single-entry hashes for each matched production indexed by the production name, or `:unmatched` if any production fails to match. For Terminals, returns a string created by concatenating these values. Via option in a `production` or definition, the result can be a single hash with values for each matched production; note that this is not always possible due to the possibility of repeated productions within the sequence. # * `star`: returns an array of the values matched for the specified production. For Terminals, these are concatenated into a single string. # # @param [Scanner] input # @param [Hash] **options Other data that may be passed to handlers. # @return [Hash{Symbol => Object}, :unmatched] A hash with keys for matched component of the expression. Returns :unmatched if the input does not match the production. def parse(input, **options) # Save position and linenumber for backtracking pos, lineno = input.pos, input.lineno parser.packrat[sym] ||= {} if parser.packrat[sym][pos] parser.debug("#{sym}(:memo)", lineno: lineno) { "#{parser.packrat[sym][pos].inspect}(@#{pos})"} input.pos, input.lineno = parser.packrat[sym][pos][:pos], parser.packrat[sym][pos][:lineno] return parser.packrat[sym][pos][:result] end if terminal? # If the terminal is defined with a regular expression, # use that to match the input, # otherwise, if regexp = parser.terminal_regexp(sym) regexp = regexp.call() if regexp.is_a?(Proc) term_opts = parser.terminal_options(sym) if matched = input.scan(regexp) # Optionally map matched matched = term_opts.fetch(:map, {}).fetch(matched.downcase, matched) # Optionally unescape matched matched = unescape(matched) if term_opts[:unescape] end result = parser.onTerminal(sym, (matched ? matched : :unmatched)) # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, sym) if result == :unmatched parser.packrat[sym][pos] = { pos: input.pos, lineno: input.lineno, result: result } return parser.packrat[sym][pos][:result] end else eat_whitespace(input) end start_options = options.merge(parser.onStart(sym, **options)) string_regexp_opts = start_options[:insensitive_strings] ? Regexp::IGNORECASE : 0 result = case expr.first when :alt # Return the first expression to match. Look at strings before terminals before non-terminals, with strings ordered by longest first # Result is either :unmatched, or the value of the matching rule alt = :unmatched expr[1..-1].each do |prod| alt = case prod when Symbol rule = parser.find_rule(prod) raise "No rule found for #{prod}" unless rule rule.parse(input, **options) when String # If the input matches a terminal for which the string is a prefix, don't match the string if terminal_also_matches(input, prod, string_regexp_opts) :unmatched else s = input.scan(Regexp.new(Regexp.quote(prod), string_regexp_opts)) case start_options[:insensitive_strings] when :lower then s && s.downcase when :upper then s && s.upcase else s end || :unmatched end end if alt == :unmatched # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, prod) if prod.is_a?(String) || rule.terminal? else break end end alt when :diff # matches any string that matches A but does not match B. # (Note, this is only used for Terminal rules, non-terminals will use :not) raise "Diff used on non-terminal #{prod}" unless terminal? re1, re2 = Regexp.new(translate_codepoints(expr[1])), Regexp.new(translate_codepoints(expr[2])) matched = input.scan(re1) if !matched || re2.match?(matched) # Update furthest failure for terminals parser.update_furthest_failure(input.pos, input.lineno, sym) :unmatched else matched end when :hex # Matches the given hex character if expression matches the character whose number (code point) in ISO/IEC 10646 is N. The number of leading zeros in the #xN form is insignificant. input.scan(to_regexp) || begin # Update furthest failure for terminals parser.update_furthest_failure(input.pos, input.lineno, expr.last) :unmatched end when :not # matches any string that does not match B. res = case prod = expr[1] when Symbol rule = parser.find_rule(prod) raise "No rule found for #{prod}" unless rule rule.parse(input, **options) when String if terminal_also_matches(input, prod, string_regexp_opts) :unmatched else s = input.scan(Regexp.new(Regexp.quote(prod), string_regexp_opts)) case start_options[:insensitive_strings] when :lower then s && s.downcase when :upper then s && s.upcase else s end || :unmatched end end if res != :unmatched # Update furthest failure for terminals parser.update_furthest_failure(input.pos, input.lineno, sym) if terminal? :unmatched else nil end when :opt # Result is the matched value or nil opt = rept(input, 0, 1, expr[1], string_regexp_opts, **start_options) # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, expr[1]) if terminal? opt.first when :plus # Result is an array of all expressions while they match, # at least one must match plus = rept(input, 1, '*', expr[1], string_regexp_opts, **options) # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, expr[1]) if terminal? plus.is_a?(Array) && terminal? ? plus.join("") : plus when :range, :istr # Matches the specified character range input.scan(to_regexp) || begin # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, expr[1]) :unmatched end when :rept # Result is an array of all expressions while they match, # an empty array of none match rept = rept(input, expr[1], expr[2], expr[3], string_regexp_opts, **options) # # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, expr[3]) if terminal? rept.is_a?(Array) && terminal? ? rept.join("") : rept when :seq # Evaluate each expression into an array of hashes where each hash contains a key from the associated production and the value is the parsed value of that production. Returns :unmatched if the input does not match the production. Value ordering is ensured by native Hash ordering. seq = expr[1..-1].each_with_object([]) do |prod, accumulator| eat_whitespace(input) unless accumulator.empty? || terminal? res = case prod when Symbol rule = parser.find_rule(prod) raise "No rule found for #{prod}" unless rule rule.parse(input, **options.merge(_rept_data: accumulator)) when String if terminal_also_matches(input, prod, string_regexp_opts) :unmatched else s = input.scan(Regexp.new(Regexp.quote(prod), string_regexp_opts)) case start_options[:insensitive_strings] when :lower then s && s.downcase when :upper then s && s.upcase else s end || :unmatched end end if res == :unmatched # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, prod) break :unmatched end accumulator << {prod.to_sym => res} end if seq == :unmatched :unmatched elsif terminal? seq.map(&:values).compact.join("") # Concat values for terminal production elsif start_options[:as_hash] seq.inject {|memo, h| memo.merge(h)} else seq end when :star # Result is an array of all expressions while they match, # an empty array of none match star = rept(input, 0, '*', expr[1], string_regexp_opts, **options) # Update furthest failure for strings and terminals parser.update_furthest_failure(input.pos, input.lineno, expr[1]) if terminal? star.is_a?(Array) && terminal? ? star.join("") : star else raise "attempt to parse unknown rule type: #{expr.first}" end if result == :unmatched # Rewind input to entry point if unmatched. input.pos, input.lineno = pos, lineno end result = parser.onFinish(result, **options) (parser.packrat[sym] ||= {})[pos] = { pos: input.pos, lineno: input.lineno, result: result } return parser.packrat[sym][pos][:result] end ## # Repitition, 0-1, 0-n, 1-n, ... # # Note, nil results are removed from the result, but count towards min/max calculations. # Saves temporary production data to prod_data stack. # # @param [Scanner] input # @param [Integer] min # @param [Integer] max # If it is an integer, it stops matching after max entries. # @param [Symbol, String] prod # @param [Integer] string_regexp_opts # @return [:unmatched, Array] def rept(input, min, max, prod, string_regexp_opts, **options) result = [] case prod when Symbol rule = parser.find_rule(prod) raise "No rule found for #{prod}" unless rule while (max == '*' || result.length < max) && (res = rule.parse(input, **options.merge(_rept_data: result))) != :unmatched eat_whitespace(input) unless terminal? result << res end when String # FIXME: don't match a string, if input matches a terminal while (res = input.scan(Regexp.new(Regexp.quote(prod), string_regexp_opts))) && (max == '*' || result.length < max) eat_whitespace(input) unless terminal? result << case options[:insensitive_strings] when :lower then res.downcase when :upper then res.upcase else res end end end result.length < min ? :unmatched : result.compact end ## # See if a terminal could have a longer match than a string def terminal_also_matches(input, prod, string_regexp_opts) str_regex = Regexp.new(Regexp.quote(prod), string_regexp_opts) input.match?(str_regex) && parser.class.terminal_regexps.any? do |sym, re| re = re.call() if re.is_a?(Proc) (match_len = input.match?(re)) && match_len > prod.length end end ## # Eat whitespace between non-terminal rules def eat_whitespace(input) if parser.whitespace.is_a?(Regexp) # Eat whitespace before a non-terminal input.skip(parser.whitespace) elsif parser.whitespace.is_a?(Rule) parser.whitespace.parse(input) # throw away result end end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/ll1/parser.rb
lib/ebnf/ll1/parser.rb
require 'ebnf/ll1/lexer' module EBNF::LL1 ## # A Generic LL1 parser using a lexer and branch tables defined using the SWAP tool chain (modified). # # # Creating terminal definitions and parser rules to parse generated grammars # # The parser is initialized to callbacks invoked on entry and exit # to each `terminal` and `production`. A trivial parser loop can be described as follows: # # require 'ebnf/ll1/parser' # require 'meta' # # class Parser # include Meta # include EBNF::LL1::Parser # # terminal(:SYMBOL, /([a-z]|[A-Z]|[0-9]|_)+/) do |prod, token, input| # # Add data based on scanned token to input # input[:symbol] = token.value # end # # start_production(:rule) do |input, current, callback| # # Process on start of production # # Set state for entry into recursed rules through current # # # Callback to parser loop with callback # end # # production(:rule) do |input, current, callback| # # Process on end of production # # return results in input, retrieve results from recursed rules in current # # # Callback to parser loop with callback # end # # def initialize(input) # parse(input, start_symbol, # branch: BRANCH, # first: FIRST, # follow: FOLLOW, # cleanup: CLEANUP # ) do |context, *data| # # Process calls from callback from productions # # rescue ArgumentError, RDF::LL1::Parser::Error => e # progress("Parsing completed with errors:\n\t#{e.message}") # raise RDF::ReaderError, e.message if validate? # end module Parser ## # @return [Integer] line number of current token attr_reader :lineno def self.included(base) base.extend(ClassMethods) end # DSL for creating terminals and productions module ClassMethods def start_handlers; @start_handlers || {}; end def production_handlers; @production_handlers || {}; end def terminal_handlers; @terminal_handlers || {}; end def patterns; @patterns || []; end ## # Defines the pattern for a terminal node and a block to be invoked # when ther terminal is encountered. If the block is missing, the # value of the terminal will be placed on the input hash to be returned # to a previous production. Block is called in an evaluation block from # the enclosing parser. # # @param [Symbol, String] term # Defines a terminal production, which appears as within a sequence in the branch table # @param [Regexp] regexp # Pattern used to scan for this terminal # @param [Hash] options # @option options [Hash{String => String}] :map ({}) # A mapping from terminals, in lower-case form, to # their canonical value # @option options [Boolean] :unescape # Cause strings and codepoints to be unescaped. # @yield [term, token, input, block] # @yieldparam [Symbol] term # A symbol indicating the production which referenced this terminal # @yieldparam [String] token # The scanned token # @yieldparam [Hash] input # A Hash containing input from the parent production # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize def terminal(term, regexp, **options, &block) @patterns ||= [] # Passed in order to define evaulation sequence @patterns << EBNF::LL1::Lexer::Terminal.new(term, regexp, **options) @terminal_handlers ||= {} @terminal_handlers[term] = block if block_given? end ## # Defines a production called at the beggining of a particular production # with data from previous production along with data defined for the # current production. Block is called in an evaluation block from # the enclosing parser. # # @param [Symbol] term # Term which is a key in the branch table # @yield [input, current, block] # @yieldparam [Hash] input # A Hash containing input from the parent production # @yieldparam [Hash] current # A Hash defined for the current production, during :start # may be initialized with data to pass to further productions, # during :finish, it contains data placed by earlier productions # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize # Yield to generate a triple def start_production(term, &block) @start_handlers ||= {} @start_handlers[term] = block end ## # Defines a production called when production of associated # terminals and non-terminals has completed # with data from previous production along with data defined for the # current production. Block is called in an evaluation block from # the enclosing parser. # # @param [Symbol] term # Term which is a key in the branch table # @yield [input, current, block] # @yieldparam [Hash] input # A Hash containing input from the parent production # @yieldparam [Hash] current # A Hash defined for the current production, during :start # may be initialized with data to pass to further productions, # during :finish, it contains data placed by earlier productions # @yieldparam [Proc] block # Block passed to initialization for yielding to calling parser. # Should conform to the yield specs for #initialize # Yield to generate a triple def production(term, &block) @production_handlers ||= {} @production_handlers[term] = block end # Evaluate a handler, delegating to the specified object. # This is necessary so that handlers can operate within the # binding context of the parser in which they're invoked. # @param [Object] object # @return [Object] def eval_with_binding(object) @delegate = object object.instance_eval {yield} end private def method_missing(method, *args, &block) if @delegate ||= nil # special handling when last arg is **options params = @delegate.method(method).parameters if params.any? {|t, _| t == :keyrest} && args.last.is_a?(Hash) opts = args.pop @delegate.send(method, *args, **opts, &block) else @delegate.send(method, *args, &block) end else super end end end ## # Initializes a new parser instance. # # Attempts to recover from errors. # # @example # require 'rdf/ll1/parser' # # class MyParser # include EBNF::LL1::Parser # # branch MyParser::BRANCH # # ## # # Defines a production called during before parsing a non-terminal # # with data from previous production along with data defined for the # # current production # # # start_production :object do |input, current, callback| # # Note production as triples for blankNodePropertyList # # to set :subject instead of :resource # current[:triples] = true # end # # ## # # Defines a production called during after parsing a non-terminal # # with data from previous production along with data defined for the # # current production # # # # callback to processor block # production :object do |input, current, callback| # object = current[:resource] # callback.call :statement, RDF::Statement.new(input[:subject], input[:predicate], object) # end # # ## # # Defines the pattern for a terminal node # terminal :BLANK_NODE_LABEL, %r(_:(#{PN_LOCAL})) do |production, token, input| # input[:BLANK_NODE_LABEL] = RDF::Node.new(token) # end # # ## # # Iterates the given block for each RDF statement in the input. # # # # @yield [statement] # # @yieldparam [RDF::Statement] statement # # @return [void] # def each_statement(&block) # @callback = block # # parse(input, START.to_sym) do |context, *data| # case context # when :statement # yield *data # end # end # end # # end # # @param [String, #to_s] input # @param [Symbol, #to_s] start # The starting production for the parser. It may be a URI from the grammar, or a symbol representing the local_name portion of the grammar URI. # @param [Hash{Symbol => Object}] options # @option options [Hash{Symbol,String => Hash{Symbol,String => Array<Symbol,String>}}] :branch LL1 branch table. # @option options [HHash{Symbol,String => Array<Symbol,String>}] :first ({}) # Lists valid terminals that can precede each production (for error recovery). # @option options [Hash{Symbol,String => Array<Symbol,String>}] :follow ({}) # Lists valid terminals that can follow each production (for error recovery). # @option options[Integer] :high_water passed to lexer # @option options [Logger] :logger for errors/progress/debug. # @option options[Integer] :low_water passed to lexer # @option options [Boolean] :reset_on_start # Reset the parser state if the start token set with `prod` is found in a production. This reduces the production stack depth growth, which is appropriate for some grammars. # @option options [Boolean] :validate (false) # whether to validate the parsed statements and values. If not validating, the parser will attempt to recover from errors. # @yield [context, *data] # Yields for to return data to parser # @yieldparam [:statement, :trace] context # Context for block # @yieldparam [Symbol] *data # Data specific to the call # @return [EBNF::LL1::Parser] # @raise [Exception] Raises exceptions for parsing errors # or errors raised during processing callbacks. Internal # errors are raised using {Error}. # @see https://cs.adelaide.edu.au/~charles/lt/Lectures/07-ErrorRecovery.pdf def parse(input = nil, start = nil, **options, &block) @options = options.dup @branch = options[:branch] @first = options[:first] ||= {} @follow = options[:follow] ||= {} @cleanup = options[:cleanup] ||= {} @lexer = input.is_a?(Lexer) ? input : Lexer.new(input, self.class.patterns, **@options) @productions = [] @parse_callback = block @recovering = false @error_log = [] terminals = self.class.patterns.map(&:type) # Get defined terminals to help with branching # Unrecoverable errors raise Error, "Branch table not defined" unless @branch && @branch.length > 0 raise Error, "Starting production not defined" unless start @prod_data = [{}] start = start.split('#').last.to_sym unless start.is_a?(Symbol) todo_stack = [{prod: start, terms: nil}] while !todo_stack.empty? begin @recovering = false pushed = false if todo_stack.last[:terms].nil? todo_stack.last[:terms] = [] cur_prod = todo_stack.last[:prod] # If cur_prod is the starting production, we can reset the stack # to the beginning to avoid excessive growth in the production # stack if options[:reset_on_start] && cur_prod == start todo_stack = [{prod: start, terms: []}] @productions = [] @prod_data = [{}] end # Fetch the current token token = get_token(:recover) # At this point, token is either nil, in the first set of the production, # or in the follow set of this production or any previous production debug("parse(production)") do "token #{token ? token.representation.inspect : 'nil'}, " + "prod #{cur_prod.inspect}, " + "depth #{depth}" end # Got an opened production onStart(cur_prod) if token.nil? if !(first_include?(cur_prod, :_eps) && follow_include?(cur_prod, :_eof)) # End of file, and production does not contain eps, or it does, but follow does not contain eof error("parse(production)", "Unexpected end of input", lineno: lineno, production: cur_prod, raise: true) else debug("parse(production)") {"End of input prod #{cur_prod.inspect}"} end elsif prod_branch = @branch[cur_prod] sequence = prod_branch.fetch(token.representation) do error("parse(production)", "Expected one of #{@first[cur_prod].inspect}", token: token, production: cur_prod, raise: true) end debug("parse(production)") do "token #{token.representation.inspect} " + "prod #{cur_prod.inspect}, " + "prod_branch #{prod_branch.keys.inspect}, " + "sequence #{sequence.inspect}" end todo_stack.last[:terms] += sequence else error("parse(production)", "Unexpected", token: token, production: cur_prod, raise: true) end end debug("parse(terms)") {"todo #{todo_stack.last.inspect}, depth #{depth}"} while !todo_stack.last[:terms].to_a.empty? # Get the next term in this sequence term = todo_stack.last[:terms].shift debug("parse(token)") {"accept #{term.inspect}"} if token = accept(term) debug("parse(token)") {"token #{token.inspect}, term #{term.inspect}"} onTerminal(term, token) elsif terminals.include?(term) # If term is a terminal, then it is an error if token does not # match it error("parse(token)", "Expected #{term.inspect}", token: get_token, production: cur_prod, raise: true) else token = get_token # If token is not in firsts of term, but eps is, skip to next # term if first_include?(term, :_eps) && !first_include?(term, token) debug("parse(token)") {"skip optional term #{term.inspect} on #{token.inspect}"} break else # Push term onto stack todo_stack << {prod: term, terms: nil} debug("parse(push)") {"term #{term.inspect}, depth #{depth}"} pushed = true break end end end rescue Lexer::Error, Error => e # Lexer encountered an illegal token or the parser encountered # a terminal which is inappropriate for the current production. # Perform error recovery to find a reasonable terminal based # on the follow sets of the relevant productions. This includes # remaining terms from the current production and the stacked # productions @lineno = e.lineno if e.is_a?(Lexer::Error) # Skip to the next valid terminal @lexer.recover error("parse(#{e.class})", "With input '#{e.input}': #{e.message}", production: @productions.last, token: e.token) else # Otherwise, the terminal is fine, just not for this production. @lexer.shift error("parse(#{e.class})", "#{e.message}", production: @productions.last, token: e.token) end # Get the list of follows for this sequence, this production and the stacked productions. debug("recovery", "stack follows:") todo_stack.reverse.each do |todo| debug("recovery") {" #{todo[:prod]}: #{@follow[todo[:prod]].inspect}"} end # Find all follows to the top of the stack follows = todo_stack.inject([]) do |follow, todo| prod = todo[:prod] follow += @follow[prod] || [] end.uniq debug("recovery") {"follows: #{follows.inspect}"} # Skip tokens until one is found in follows while (token = get_token(:recover)) && follows.none? {|t| token === t} skipped = @lexer.shift progress("recovery") {"skip #{skipped.inspect}"} end debug("recovery") {"found #{token.inspect} in follows"} # Pop stack elements until token is in follows while !todo_stack.empty? && !follow_include?(todo_stack.last[:prod], token || :_eof) debug("recovery(pop)") {"todo #{todo_stack.last.inspect}, depth #{depth}"} todo_stack.pop onFinish end # Token is now in the first of the top production unless todo_stack.empty? todo_stack.pop onFinish end if todo_stack.empty? # Recovered to end of last production warn("recover", "recovered to end of productions") else warn("recover", "recovered to #{todo_stack.last[:prod].inspect} with #{token.inspect}") end @recovering = false ensure # After completing the last production in a sequence, pop down until we find a production # # If in recovery mode, continue popping until we find a term with a follow list while !pushed && !todo_stack.empty? && todo_stack.last.fetch(:terms, []).empty? debug("parse(pop)") {"todo #{todo_stack.last.inspect}, depth #{depth}"} todo_stack.pop onFinish end end end error("parse(eof)", "Finished processing before end of file", token: @lexer.first) if @lexer.first # Continue popping contexts off of the stack while !todo_stack.empty? debug("parse(eof)") {"stack #{todo_stack.last.inspect}, depth #{depth}"} # There can't be anything left to do, or if there is, it must be optional last_terms = todo_stack.last[:terms] if last_terms.length > 0 && last_terms.none? {|t|first_include?(t, :_eps)} error("parse(eof)", "End of input before end of production: stack #{todo_stack.last.inspect}, depth #{depth}" ) end todo_stack.pop onFinish end # When all is said and done, raise the error log unless @error_log.empty? raise Error, @error_log.join("\n") end end def depth; (@productions || []).length; end # Current ProdData element def prod_data; @prod_data.last; end # Add a single value to prod_data, allows for values to be an array def add_prod_datum(sym, values) case values when Array prod_data[sym] ||= [] debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"} prod_data[sym] += values when nil return else prod_data[sym] ||= [] debug("add_prod_datum(#{sym})") {"#{prod_data[sym].inspect} << #{values.inspect}"} prod_data[sym] << values end end # Add values to production data, values aranged as an array def add_prod_data(sym, *values) return if values.compact.empty? prod_data[sym] ||= [] prod_data[sym] += values debug("add_prod_data(#{sym})") {"#{prod_data[sym].inspect} += #{values.inspect}"} end protected ## # Error information, used as level `3` logger messages. # Messages may be logged and are saved for reporting at end of parsing. # # @param [String] node Relevant location associated with message # @param [String] message Error string # @param [Hash{Symbol => Object}] options # @option options [URI, #to_s] :production # @option options [Token] :token # @see #debug def error(node, message, **options) lineno = @lineno || (options[:token].lineno if options[:token].respond_to?(:lineno)) m = "ERROR " m += "[line: #{lineno}] " if lineno m += message m += " (found #{options[:token].inspect})" if options[:token] m += ", production = #{options[:production].inspect}" if options[:production] @error_log << m unless @recovering @recovering = true debug(node, m, level: options.fetch(:level, 3), **options) if options[:raise] || @options[:validate] raise Error.new(m, lineno: lineno, token: options[:token], production: options[:production]) end end ## # Warning information, used as level `2` logger messages. # Messages may be logged and are saved for reporting at end of parsing. # # @param [String] node Relevant location associated with message # @param [String] message Error string # @param [Hash] options # @option options [URI, #to_s] :production # @option options [Token] :token # @see #debug def warn(node, message, **options) lineno = @lineno || (options[:token].lineno if options[:token].respond_to?(:lineno)) m = "WARNING " m += "[line: #{lineno}] " if lineno m += message m += " (found #{options[:token].inspect})" if options[:token] m += ", production = #{options[:production].inspect}" if options[:production] @error_log << m unless @recovering debug(node, m, level: 2, lineno: lineno, **options) end ## # Progress logged when parsing. Passed as level `1` logger messages. # # The call is ignored, unless `@options[:logger]` is set. # # @overload progress(node, message, **options, &block) # @param [String] node Relevant location associated with message # @param [String] message ("") # @param [Hash] options # @option options [Integer] :depth # Recursion depth for indenting output # @see #debug def progress(node, *args, &block) return unless @options[:logger] lineno = @lineno || (options[:token].lineno if options[:token].respond_to?(:lineno)) args << {} unless args.last.is_a?(Hash) args.last[:level] ||= 1 args.last[:lineno] ||= lineno debug(node, *args, &block) end ## # Debug logging. # # The call is ignored, unless `@options[:logger]` is set. # # @overload debug(node, message, **options) # @param [Array<String>] args Relevant location associated with message # @param [Hash] options # @option options [Integer] :depth # Recursion depth for indenting output # @yieldreturn [String] additional string appended to `message`. def debug(*args, &block) return unless @options[:logger] options = args.last.is_a?(Hash) ? args.pop : {} lineno = @lineno || (options[:token].lineno if options[:token].respond_to?(:lineno)) level = options.fetch(:level, 0) depth = options[:depth] || self.depth if self.respond_to?(:log_debug) level = [:debug, :info, :warn, :error, :fatal][level] log_debug(*args, **options.merge(level: level, lineno: lineno, depth: depth), &block) elsif @options[:logger].respond_to?(:add) args << yield if block_given? @options[:logger].add(level, "[#{lineno}]" + (" " * depth) + args.join(" ")) elsif @options[:logger].respond_to?(:<<) args << yield if block_given? @options[:logger] << "[#{lineno}]" + (" " * depth) + args.join(" ") end end private # Start for production def onStart(prod) handler = self.class.start_handlers[prod] @productions << prod if handler # Create a new production data element, potentially allowing handler # to customize before pushing on the @prod_data stack progress("#{prod}(:start):#{@prod_data.length}") {@prod_data.last} data = {} begin self.class.eval_with_binding(self) { handler.call(@prod_data.last, data, @parse_callback) } rescue ArgumentError, Error => e error("start", "#{e.class}: #{e.message}", production: prod) @recovering = false end @prod_data << data elsif [:merge, :star].include?(@cleanup[prod]) # Save current data to merge later @prod_data << {} progress("#{prod}(:start}:#{@prod_data.length}:cleanup:#{@cleanup[prod]}") { get_token.inspect + (@recovering ? ' recovering' : '')} else # Make sure we push as many was we pop, even if there is no # explicit start handler @prod_data << {} if self.class.production_handlers[prod] progress("#{prod}(:start:#{@prod_data.length})") { get_token.inspect + (@recovering ? ' recovering' : '')} end #puts "prod_data(s): " + @prod_data.inspect end # Finish of production def onFinish #puts "prod_data(f): " + @prod_data.inspect prod = @productions.last handler = self.class.production_handlers[prod] if handler && !@recovering # Pop production data element from stack, potentially allowing handler to use it data = @prod_data.pop begin self.class.eval_with_binding(self) { handler.call(@prod_data.last, data, @parse_callback) } rescue ArgumentError, Error => e error("finish", "#{e.class}: #{e.message}", production: prod) @recovering = false end progress("#{prod}(:finish):#{@prod_data.length}") {@prod_data.last} elsif [:merge, :star].include?(@cleanup[prod]) data = @prod_data.pop input = @prod_data.last # Append every element in data to last prod_data data.each do |k, v| input[k] = case input[k] when nil then v.is_a?(Hash) ? v : Array(v) when Hash then input[k].merge!(v) else Array(input[k]) + Array(v) end end debug("#{prod}(:finish):#{@prod_data.length} cleanup:#{@cleanup[prod]}") {@prod_data.last} else progress("#{prod}(:finish):#{@prod_data.length}") { "recovering" if @recovering } end @productions.pop end # A terminal def onTerminal(prod, token) unless @productions.empty? parentProd = @productions.last handler = self.class.terminal_handlers[prod] # Allows catch-all for simple string terminals handler ||= self.class.terminal_handlers[nil] if prod.is_a?(String) if handler begin self.class.eval_with_binding(self) { handler.call(parentProd, token, @prod_data.last, @parse_callback) } rescue ArgumentError, Error => e error("terminal", "#{e.class}: #{e.message}", token: token, production: prod) @recovering = false end progress("#{prod}(:terminal)", "", depth: (depth + 1)) {"#{token}: #{@prod_data.last}"} else progress("#{prod}(:terminal)", "", depth: (depth + 1)) {token.to_s} end else error("#{parentProd}(:terminal)", "Terminal has no parent production", token: token, production: prod) end end ## # Does first include the specified token # # @param [Symbol] production # @param [Symbol, Lexer::Token] token # A terminal, or symbol or string # @return [Boolean] def first_include?(production, token) if token.is_a?(Lexer::Token) @first.fetch(production, []).any? {|t| token === t} else @first.fetch(production, []).include?(token) end end ## # Does follow include the specified terminal # # @param [Symbol] production # @param [Symbol, Lexer::Token] token # A terminal, or symbol or string # @return [Boolean] def follow_include?(production, token) if token.is_a?(Lexer::Token) @follow.fetch(production, []).any? {|t| token === t} else @follow.fetch(production, []).include?(token) end end ## # Return the next token, raising an error if the token is invalid # # @param [:recover] recover # Recover from errors and go until next valid token or end of file # @return [Token] # @raise [Lexer::Error] def get_token(recover = nil) token = @lexer.first #progress("token") {token.inspect} @lineno = token.lineno if token token rescue Lexer::Error => e if recover # Recover from lexer error so that we can not bail out too early @lexer.recover error("get_token", "With input '#{e.input}': #{e.message}}", token: e.token, lineno: e.lineno) retry end raise end ## # Accept the first token in the input stream if it matches # `type\_or\_value`. Raise Error, otherwise. # # @param [Symbol, String] type_or_value # @return [Token] # @raise [Error, Lexer::Error] def accept(type_or_value) if (token = get_token) && token === type_or_value debug("accept") {"#{token.inspect} === #{type_or_value.inspect}"} @lexer.shift end end public ## # Raised for errors during parsing. # # @example Raising a parser error # raise Error.new( # "invalid token '%' on line 10", # token: '%', lineno: 9, production: :turtleDoc) # # @see https://ruby-doc.org/core/classes/StandardError.html class Error < StandardError ## # The current production. # # @return [Symbol] attr_reader :production ## # The invalid token which triggered the error. # # @return [String] attr_reader :token ## # The line number where the error occurred. # # @return [Integer] attr_reader :lineno ## # Initializes a new lexer error instance. # # @param [String, #to_s] message # @param [Hash{Symbol => Object}] options # @option options [Symbol] :production (nil) # @option options [String] :token (nil) # @option options [Integer] :lineno (nil) def initialize(message, **options) @production = options[:production] @token = options[:token] @lineno = options[:lineno] || (@token.lineno if @token.respond_to?(:lineno)) super(message.to_s) end end # class Error end # class Parser end # module EBNF::LL1
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/ll1/lexer.rb
lib/ebnf/ll1/lexer.rb
module EBNF::LL1 require 'ebnf/ll1/scanner' unless defined?(Scanner) ## # A lexical analyzer # # @example Tokenizing a Turtle string # terminals = [ # [:BLANK_NODE_LABEL, %r(_:(#{PN_LOCAL}))], # ... # ] # ttl = "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> ." # lexer = EBNF::LL1::Lexer.tokenize(ttl, terminals) # lexer.each_token do |token| # puts token.inspect # end # # @example Tokenizing and returning a token stream # lexer = EBNF::LL1::Lexer.tokenize(...) # while :some-condition # token = lexer.first # Get the current token # token = lexer.shift # Get the current token and shift to the next # end # # @example Handling error conditions # begin # EBNF::LL1::Lexer.tokenize(query) # rescue EBNF::LL1::Lexer::Error => error # warn error.inspect # end # # @see https://en.wikipedia.org/wiki/Lexical_analysis class Lexer include Enumerable include ::EBNF::Unescape ## # @return [Regexp] defines whitespace, including comments, otherwise whitespace must be explicit in terminals attr_reader :whitespace ## # Returns a copy of the given `input` string with all `\uXXXX` and # `\UXXXXXXXX` Unicode codepoint escape sequences replaced with their # unescaped UTF-8 character counterparts. # # @param [String] string # @return [String] # @see https://www.w3.org/TR/rdf-sparql-query/#codepointEscape def self.unescape_codepoints(string) ::EBNF::Unescape.unescape_codepoints(string) end ## # Returns a copy of the given `input` string with all string escape # sequences (e.g. `\n` and `\t`) replaced with their unescaped UTF-8 # character counterparts. # # @param [String] input # @return [String] # @see https://www.w3.org/TR/rdf-sparql-query/#grammarEscapes def self.unescape_string(input) ::EBNF::Unescape.unescape_string(input) end ## # Tokenizes the given `input` string or stream. # # @param [String, #to_s] input # @param [Array<Array<Symbol, Regexp>>] terminals # Array of symbol, regexp pairs used to match terminals. # If the symbol is nil, it defines a Regexp to match string terminals. # @param [Hash{Symbol => Object}] options # @yield [lexer] # @yieldparam [Lexer] lexer # @return [Lexer] # @raise [Lexer::Error] on invalid input def self.tokenize(input, terminals, **options, &block) lexer = self.new(input, terminals, **options) block_given? ? block.call(lexer) : lexer end ## # Initializes a new lexer instance. # # @param [String, #to_s] input # @param [Array<Array<Symbol, Regexp>, Terminal>] terminals # Array of symbol, regexp pairs used to match terminals. # If the symbol is nil, it defines a Regexp to match string terminals. # @param [Hash{Symbol => Object}] options # @option options [Regexp] :whitespace # Whitespace between tokens, including comments # @option options[Integer] :high_water passed to scanner # @option options[Integer] :low_water passed to scanner def initialize(input = nil, terminals = nil, **options) @options = options.dup @whitespace = @options[:whitespace] @terminals = terminals.map do |term| if term.is_a?(Array) && term.length ==3 # Last element is options Terminal.new(term[0], term[1], **term[2]) elsif term.is_a?(Array) Terminal.new(*term) else term end end raise Error, "Terminal patterns not defined" unless @terminals && @terminals.length > 0 @scanner = Scanner.new(input, **options) end ## # Any additional options for the lexer. # # @return [Hash] attr_reader :options ## # The current input string being processed. # # @return [String] attr_accessor :input ## # Returns `true` if the input string is lexically valid. # # To be considered valid, the input string must contain more than zero # terminals, and must not contain any invalid terminals. # # @return [Boolean] def valid? begin !count.zero? rescue Error false end end ## # Enumerates each token in the input string. # # @yield [token] # @yieldparam [Token] token # @return [Enumerator] def each_token(&block) if block_given? while token = shift yield token end end enum_for(:each_token) end alias_method :each, :each_token ## # Returns first token in input stream # # @param [Array[Symbol]] types Optional set of types for restricting terminals examined # @return [Token] def first(*types) return nil unless scanner @first ||= begin {} while !scanner.eos? && skip_whitespace return nil if scanner.eos? token = match_token(*types) if token.nil? lexme = (scanner.rest.split(@whitespace || /\s/).first rescue nil) || scanner.rest raise Error.new("Invalid token #{lexme[0..100].inspect}", input: scanner.rest[0..100], token: lexme, lineno: lineno) end token end rescue ArgumentError, Encoding::CompatibilityError => e raise Error.new(e.message, input: (scanner.rest[0..100] rescue '??'), token: lexme, lineno: lineno) rescue Error raise rescue STDERR.puts "Expected ArgumentError, got #{$!.class}" raise end ## # Returns first token and shifts to next # # @return [Token] def shift cur = first @first = nil cur end ## # Skip input until a token is matched # # @param [Array[Symbol]] types Optional set of types for restricting terminals examined # @return [Token] def recover(*types) until scanner.eos? || tok = match_token(*types) if scanner.skip_until(@whitespace || /\s+/m).nil? # Skip past current "token" # No whitespace at the end, must be and end of string scanner.terminate else skip_whitespace end end scanner.unscan if tok first end ## # The current line number (one-based). # # @return [Integer] def lineno scanner.lineno end protected # @return [StringScanner] attr_reader :scanner ## # Skip whitespace, as defined through input options or defaults def skip_whitespace # skip all white space, but keep track of the current line number while @whitespace && !scanner.eos? unless scanner.scan(@whitespace) return end end end ## # Return the matched token. # # If the token was matched with a case-insensitive regexp, # track this with the resulting {Token}, so that comparisons # with that token are also case insensitive # # @param [Array[Symbol]] types Optional set of types for restricting terminals examined # @return [Token] def match_token(*types) @terminals.each do |term| next unless types.empty? || types.include?(term.type) #STDERR.puts "match[#{term.type}] #{scanner.rest[0..100].inspect} against #{term.regexp.inspect}" #if term.type == :STRING_LITERAL_SINGLE_QUOTE if term.partial_regexp && scanner.match?(term.partial_regexp) && !scanner.match?(term.regexp) && scanner.respond_to?(:ensure_buffer_full) scanner.ensure_buffer_full end if matched = scanner.scan(term.regexp) #STDERR.puts " matched #{term.type.inspect}: #{matched.inspect}" tok = token(term.type, term.canonicalize(matched)) return tok end end nil end # Terminal class, representing the terminal identifier and # matching regular expression. Optionally, a Terminal may include # a map to turn case-insensitively matched terminals into their # canonical form class Terminal attr_reader :type attr_reader :regexp attr_reader :partial_regexp # @param [Symbol, nil] type # @param [Regexp] regexp # @param [Hash{Symbol => Object}] options # @option options [Hash{String => String}] :map ({}) # A mapping from terminals, in lower-case form, to # their canonical value # @option options [Boolean] :unescape # Cause strings and codepoints to be unescaped. # @option options [Regexp] :partial_regexp # A regular expression matching the beginning of this terminal; useful for terminals that match things longer than the scanner low water mark. def initialize(type, regexp, **options) @type, @regexp, @options = type, regexp, options @partial_regexp = options[:partial_regexp] @map = options.fetch(:map, {}) end # Map a terminal to it's canonical form. If there is no # map, `value` is returned. `value` is unescaped if there # is no canonical mapping, and the `:unescape` option is set. # # @param [String] value # value to canonicalize # @return [String] def canonicalize(value) @map.fetch(value.downcase, unescape(value)) end def ==(other) case other when Array @type == other.first && @regexp == other.last when Terminal @type == other.type && @regexp == other.regexp end end protected # Perform string and codepoint unescaping if defined for this terminal # @param [String] string # @return [String] def unescape(string) if @options[:unescape] EBNF::Unescape.unescape(string) else string end end end ## # Constructs a new token object annotated with the current line number. # # The parser relies on the type being a symbolized URI and the value being # a string, if there is no type. If there is a type, then the value takes # on the native representation appropriate for that type. # # @param [Symbol] type # @param [String] value # Scanner instance with access to matched groups # @param [Hash{Symbol => Object}] options # @return [Token] def token(type, value, **options) Token.new(type, value, lineno: lineno, **options) end ## # Represents a lexer token. # # @example Creating a new token # token = EBNF::LL1::Lexer::Token.new(:LANGTAG, "en") # token.type #=> :LANGTAG # token.value #=> "en" # # @see https://en.wikipedia.org/wiki/Lexical_analysis#Token class Token ## # The token's symbol type. # # @return [Symbol] attr_reader :type ## # The token's value. # # @return [String] attr_reader :value ## # The line number where the token was encountered. # # @return [Integer] attr_reader :lineno ## # Any additional options for the token. # # @return [Hash] attr_reader :options ## # Initializes a new token instance. # # @param [Symbol] type # @param [String] value # @param [Hash{Symbol => Object}] options # @option options [Integer] :lineno (nil) def initialize(type, value, **options) @type = type.to_s.to_sym if type @value = value.to_s @options = options.dup @lineno = @options.delete(:lineno) end ## # Returns the attribute named by `key`. # # @param [Symbol] key # @return [Object] def [](key) key = key.to_s.to_sym unless key.is_a?(Integer) || key.is_a?(Symbol) case key when 0, :type then @type when 1, :value then @value else nil end end ## # Returns `true` if the given `value` matches either the type or value # of this token. # # @example Matching using the symbolic type # EBNF::LL1::Lexer::Token.new(:NIL) === :NIL #=> true # # @example Matching using the string value # EBNF::LL1::Lexer::Token.new(nil, "{") === "{" #=> true # # @param [Symbol, String] value # @return [Boolean] def ===(value) case value when Symbol value == @type when ::String @value == (@options[:case_insensitive] ? value.to_s.downcase : value.to_s) else value == @value end end ## # Returns a hash table representation of this token. # # @return [Hash] def to_hash {type: @type, value: @value} end ## # Readable version of token def to_s @type ? @type.inspect : @value end ## # Returns type, if not nil, otherwise value def representation @type ? @type : @value end ## # Returns an array representation of this token. # # @return [Array] def to_a [@type, @value] end ## # Returns a developer-friendly representation of this token. # # @return [String] def inspect "#{@value.inspect}#{'(' + @type.to_s + ')' if @type}" end end # class Token ## # Raised for errors during lexical analysis. # # @example Raising a lexer error # raise EBNF::LL1::Lexer::Error.new( # "invalid token '%' on line 10", # input: query, token: '%', lineno: 9) # # @see https://ruby-doc.org/core/classes/StandardError.html class Error < StandardError ## # The input string associated with the error. # # @return [String] attr_reader :input ## # The invalid token which triggered the error. # # @return [String] attr_reader :token ## # The line number where the error occurred. # # @return [Integer] attr_reader :lineno ## # Initializes a new lexer error instance. # # @param [String, #to_s] message # @param [Hash{Symbol => Object}] options # @option options [String] :input (nil) # @option options [String] :token (nil) # @option options [Integer] :lineno (nil) def initialize(message, **options) @input = options[:input] @token = options[:token] @lineno = options[:lineno] super(message.to_s) end end # class Error end # class Lexer end # module EBNF
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/ll1/scanner.rb
lib/ebnf/ll1/scanner.rb
# coding: utf-8 require 'strscan' unless defined?(StringScanner) module EBNF::LL1 ## # Overload StringScanner with file operations and line counting # # * Reloads scanner as required until EOF. # * Loads to a high-water and reloads when remaining size reaches a low-water. # # FIXME: Only implements the subset required by the Lexer for now. class Scanner < StringScanner HIGH_WATER = 512 * 1024 # Hopefully large enough to deal with long multi-line comments LOW_WATER = 4 * 1024 ## # @return [String, IO, StringIO] attr_reader :input ## # The current line number (one-based). # # @return [Integer] attr_accessor :lineno ## # Create a scanner, from an IO # # @param [String, IO, #read] input # @param [Hash{Symbol => Object}] options # @option options[Integer] :high_water (HIGH_WATER) # @option options[Integer] :low_water (LOW_WATER) # @return [Scanner] def initialize(input, **options) @options = options.merge(high_water: HIGH_WATER, low_water: LOW_WATER) @previous_lineno = @lineno = 1 @input = input.is_a?(String) ? encode_utf8(input) : input super(input.is_a?(String) ? @input : "") feed_me self end ## # Ensures that the input buffer is full to the high water mark, or end of file. Useful when matching tokens that may be longer than the low water mark def ensure_buffer_full # Read up to high-water mark ensuring we're at an end of line if @input.respond_to?(:eof?) && !@input.eof? diff = @options[:high_water] - rest_size string = encode_utf8(@input.read(diff)) string << encode_utf8(@input.gets) unless @input.eof? self << string if string end end ## # Returns true if the scan pointer is at the end of the string # # @return [Boolean] def eos? feed_me super end ## # Returns the "rest" of the line, or the next line if at EOL (i.e. everything after the scan pointer). # If there is no more data (eos? = true), it returns "". # # @return [String] def rest feed_me encode_utf8 super end ## # Tries to match with `pattern` at the current position. # # If there is a match, the scanner advances the "scan pointer" and returns the matched string. # Otherwise, the scanner returns nil. # # If the scanner begins with the multi-line start expression # @example # s = StringScanner.new('test string') # p s.scan(/\w+/) # -> "test" # p s.scan(/\w+/) # -> nil # p s.scan(/\s+/) # -> " " # p s.scan(/\w+/) # -> "string" # p s.scan(/./) # -> nil # # @param [Regexp] pattern # @return [String] def scan(pattern) feed_me @previous_lineno = @lineno if matched = encode_utf8(super) @lineno += matched.count("\n") end matched end ## # Scans the string until the pattern is matched. Returns the substring up to and including the end of the match, advancing the scan pointer to that location. If there is no match, nil is returned. # # @example # s = StringScanner.new("Fri Dec 12 1975 14:39") # s.scan_until(/1/) # -> "Fri Dec 1" # s.pre_match # -> "Fri Dec " # s.scan_until(/XYZ/) # -> nil # # @param [Regexp] pattern # @return [String] def scan_until(pattern) feed_me @previous_lineno = @lineno if matched = encode_utf8(super) @lineno += matched.count("\n") end matched end ## # Attempts to skip over the given `pattern` beginning with the scan pointer. # If it matches, the scan pointer is advanced to the end of the match, # and the length of the match is returned. Otherwise, `nil` is returned. # # similar to `scan`, but without returning the matched string. # @param [Regexp] pattern def skip(pattern) scan(pattern) nil end ## # Advances the scan pointer until pattern is matched and consumed. Returns the number of bytes advanced, or nil if no match was found. # # Look ahead to match pattern, and advance the scan pointer to the end of the match. Return the number of characters advanced, or nil if the match was unsuccessful. # # It’s similar to scan_until, but without returning the intervening string. # @param [Regexp] pattern def skip_until(pattern) (matched = scan_until(pattern)) && matched.length end ## # Sets the scan pointer to the previous position. Only one previous position is remembered, and it changes with each scanning operation. def unscan @lineno = @previous_lineno super end ## # Set the scan pointer to the end of the string and clear matching data def terminate feed_me super end private # Maintain low-water mark def feed_me ensure_buffer_full if rest_size < @options[:low_water] end # Perform UTF-8 encoding of input def encode_utf8(string) if string && string.encoding != Encoding::UTF_8 string = string.dup if string.frozen? string.force_encoding(Encoding::UTF_8) end string end end end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/lib/ebnf/ebnf/meta.rb
lib/ebnf/ebnf/meta.rb
# This file is automatically generated by ebnf version 2.5.0 # Derived from etc/ebnf.ebnf module EBNFMeta RULES = [ EBNF::Rule.new(:ebnf, "1", [:star, :_ebnf_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_ebnf_1, "1.1", [:alt, :declaration, :rule]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:declaration, "2", [:alt, "@terminals", :pass]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rule, "3", [:seq, :LHS, :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:expression, "4", [:seq, :alt]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:alt, "5", [:seq, :seq, :_alt_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_2, "5.2", [:seq, "|", :seq]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:seq, "6", [:plus, :diff]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:diff, "7", [:seq, :postfix, :_diff_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_2, "7.2", [:seq, "-", :postfix]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:postfix, "8", [:seq, :primary, :_postfix_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_postfix_1, "8.1", [:opt, :POSTFIX]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:primary, "9", [:alt, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, :_primary_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_primary_1, "9.1", [:seq, "(", :expression, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:pass, "10", [:seq, "@pass", :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LHS, "11", [:seq, :_LHS_1, :SYMBOL, :_LHS_2, "::="], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_1, "11.1", [:opt, :_LHS_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_3, "11.3", [:seq, "[", :SYMBOL, "]", :_LHS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_4, "11.4", [:plus, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_2, "11.2", [:star, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:SYMBOL, "12", [:alt, :_SYMBOL_1, :O_SYMBOL], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_1, "12.1", [:seq, "<", :O_SYMBOL, ">"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:O_SYMBOL, "12a", [:plus, :_O_SYMBOL_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_1, "12a.1", [:alt, :_O_SYMBOL_2, :_O_SYMBOL_3, :_O_SYMBOL_4, "_", "."], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_2, "12a.2", [:range, "a-z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_3, "12a.3", [:range, "A-Z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_4, "12a.4", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HEX, "13", [:seq, "#x", :_HEX_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_1, "13.1", [:plus, :_HEX_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_2, "13.2", [:alt, :_HEX_3, :_HEX_4, :_HEX_5], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_3, "13.3", [:range, "a-f"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_4, "13.4", [:range, "A-F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_5, "13.5", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:RANGE, "14", [:seq, "[", :_RANGE_1, :_RANGE_2, :_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_1, "14.1", [:plus, :_RANGE_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_4, "14.4", [:alt, :_RANGE_5, :_RANGE_6, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_5, "14.5", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_6, "14.6", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_2, "14.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_3, "14.3", [:diff, "]", :LHS], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:O_RANGE, "15", [:seq, "[^", :_O_RANGE_1, :_O_RANGE_2, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_1, "15.1", [:plus, :_O_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_3, "15.3", [:alt, :_O_RANGE_4, :_O_RANGE_5, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_4, "15.4", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_5, "15.5", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_2, "15.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING1, "16", [:seq, "\"", :_STRING1_1, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_1, "16.1", [:star, :_STRING1_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_2, "16.2", [:diff, :CHAR, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING2, "17", [:seq, "'", :_STRING2_1, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_1, "17.1", [:star, :_STRING2_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_2, "17.2", [:diff, :CHAR, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CHAR, "18", [:alt, :_CHAR_1, :_CHAR_2, :_CHAR_3, :_CHAR_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_1, "18.1", [:range, "#x9#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_2, "18.2", [:range, "#x20-#xD7FF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_3, "18.3", [:range, "#xE000-#xFFFD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_4, "18.4", [:range, "#x10000-#x10FFFF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:R_CHAR, "19", [:diff, :CHAR, :_R_CHAR_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_R_CHAR_1, "19.1", [:alt, "]", "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:POSTFIX, "20", [:range, "?*+"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:PASS, "21", [:alt, :_PASS_1, :_PASS_2, :_PASS_3, :_PASS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_1, "21.1", [:range, "#x9#xA#xD#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_2, "21.2", [:seq, :_PASS_5, :_PASS_6], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_5, "21.5", [:alt, :_PASS_7, "//"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_7, "21.7", [:diff, "#", "#x"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_6, "21.6", [:star, :_PASS_8], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_8, "21.8", [:range, "^#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_3, "21.3", [:seq, "/*", :_PASS_9, "*/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_9, "21.9", [:star, :_PASS_10], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_10, "21.10", [:alt, :_PASS_11, :_PASS_12], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_11, "21.11", [:opt, :_PASS_13], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_13, "21.13", [:seq, "*", :_PASS_14], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_14, "21.14", [:range, "^/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_12, "21.12", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_4, "21.4", [:seq, "(*", :_PASS_15, "*)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_15, "21.15", [:star, :_PASS_16], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_16, "21.16", [:alt, :_PASS_17, :_PASS_18], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_17, "21.17", [:opt, :_PASS_19], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_19, "21.19", [:seq, "*", :_PASS_20], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_20, "21.20", [:range, "^)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_18, "21.18", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_pass, nil, [:seq, :PASS], kind: :pass).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/etc/ebnf.peg.rb
etc/ebnf.peg.rb
# This file is automatically generated by ebnf version 2.5.0 # Derived from etc/ebnf.ebnf module EBNFMeta RULES = [ EBNF::Rule.new(:ebnf, "1", [:star, :_ebnf_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_ebnf_1, "1.1", [:alt, :declaration, :rule]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:declaration, "2", [:alt, "@terminals", :pass]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:rule, "3", [:seq, :LHS, :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:expression, "4", [:seq, :alt]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:alt, "5", [:seq, :seq, :_alt_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_1, "5.1", [:star, :_alt_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_alt_2, "5.2", [:seq, "|", :seq]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:seq, "6", [:plus, :diff]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:diff, "7", [:seq, :postfix, :_diff_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_1, "7.1", [:opt, :_diff_2]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_diff_2, "7.2", [:seq, "-", :postfix]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:postfix, "8", [:seq, :primary, :_postfix_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_postfix_1, "8.1", [:opt, :POSTFIX]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:primary, "9", [:alt, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, :_primary_1]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_primary_1, "9.1", [:seq, "(", :expression, ")"]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:pass, "10", [:seq, "@pass", :expression]).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_terminals, nil, [:seq], kind: :terminals).extend(EBNF::PEG::Rule), EBNF::Rule.new(:LHS, "11", [:seq, :_LHS_1, :SYMBOL, :_LHS_2, "::="], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_1, "11.1", [:opt, :_LHS_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_3, "11.3", [:seq, "[", :SYMBOL, "]", :_LHS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_4, "11.4", [:plus, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_LHS_2, "11.2", [:star, " "], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:SYMBOL, "12", [:alt, :_SYMBOL_1, :O_SYMBOL], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_SYMBOL_1, "12.1", [:seq, "<", :O_SYMBOL, ">"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:O_SYMBOL, "12a", [:plus, :_O_SYMBOL_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_1, "12a.1", [:alt, :_O_SYMBOL_2, :_O_SYMBOL_3, :_O_SYMBOL_4, "_", "."], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_2, "12a.2", [:range, "a-z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_3, "12a.3", [:range, "A-Z"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_SYMBOL_4, "12a.4", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:HEX, "13", [:seq, "#x", :_HEX_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_1, "13.1", [:plus, :_HEX_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_2, "13.2", [:alt, :_HEX_3, :_HEX_4, :_HEX_5], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_3, "13.3", [:range, "a-f"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_4, "13.4", [:range, "A-F"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_HEX_5, "13.5", [:range, "0-9"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:RANGE, "14", [:seq, "[", :_RANGE_1, :_RANGE_2, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_1, "14.1", [:plus, :_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_3, "14.3", [:alt, :_RANGE_4, :_RANGE_5, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_4, "14.4", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_5, "14.5", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_RANGE_2, "14.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:O_RANGE, "15", [:seq, "[^", :_O_RANGE_1, :_O_RANGE_2, "]"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_1, "15.1", [:plus, :_O_RANGE_3], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_3, "15.3", [:alt, :_O_RANGE_4, :_O_RANGE_5, :R_CHAR, :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_4, "15.4", [:seq, :R_CHAR, "-", :R_CHAR], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_5, "15.5", [:seq, :HEX, "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_O_RANGE_2, "15.2", [:opt, "-"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING1, "16", [:seq, "\"", :_STRING1_1, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_1, "16.1", [:star, :_STRING1_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING1_2, "16.2", [:diff, :CHAR, "\""], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:STRING2, "17", [:seq, "'", :_STRING2_1, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_1, "17.1", [:star, :_STRING2_2], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_STRING2_2, "17.2", [:diff, :CHAR, "'"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:CHAR, "18", [:alt, :_CHAR_1, :_CHAR_2, :_CHAR_3, :_CHAR_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_1, "18.1", [:range, "#x9#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_2, "18.2", [:range, "#x20-#xD7FF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_3, "18.3", [:range, "#xE000-#xFFFD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_CHAR_4, "18.4", [:range, "#x10000-#x10FFFF"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:R_CHAR, "19", [:diff, :CHAR, :_R_CHAR_1], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_R_CHAR_1, "19.1", [:alt, "]", "-", :HEX], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:POSTFIX, "20", [:range, "?*+"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:PASS, "21", [:alt, :_PASS_1, :_PASS_2, :_PASS_3, :_PASS_4], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_1, "21.1", [:range, "#x9#xA#xD#x20"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_2, "21.2", [:seq, :_PASS_5, :_PASS_6], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_5, "21.5", [:alt, :_PASS_7, "//"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_7, "21.7", [:diff, "#", "#x"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_6, "21.6", [:star, :_PASS_8], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_8, "21.8", [:range, "^#xA#xD"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_3, "21.3", [:seq, "/*", :_PASS_9, "*/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_9, "21.9", [:star, :_PASS_10], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_10, "21.10", [:alt, :_PASS_11, :_PASS_12], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_11, "21.11", [:opt, :_PASS_13], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_13, "21.13", [:seq, "*", :_PASS_14], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_14, "21.14", [:range, "^/"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_12, "21.12", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_4, "21.4", [:seq, "(*", :_PASS_15, "*)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_15, "21.15", [:star, :_PASS_16], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_16, "21.16", [:alt, :_PASS_17, :_PASS_18], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_17, "21.17", [:opt, :_PASS_19], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_19, "21.19", [:seq, "*", :_PASS_20], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_20, "21.20", [:range, "^)"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_PASS_18, "21.18", [:range, "^*"], kind: :terminal).extend(EBNF::PEG::Rule), EBNF::Rule.new(:_pass, nil, [:seq, :PASS], kind: :pass).extend(EBNF::PEG::Rule), ] end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
dryruby/ebnf
https://github.com/dryruby/ebnf/blob/2d26292c12187028cb7a9ac1bdd7a03942d53753/etc/ebnf.ll1.rb
etc/ebnf.ll1.rb
# This file is automatically generated by ebnf version 2.5.0 # Derived from etc/ebnf.ebnf module Meta START = :ebnf BRANCH = { :alt => { "(" => [:seq, :_alt_1], :HEX => [:seq, :_alt_1], :O_RANGE => [:seq, :_alt_1], :RANGE => [:seq, :_alt_1], :STRING1 => [:seq, :_alt_1], :STRING2 => [:seq, :_alt_1], :SYMBOL => [:seq, :_alt_1], }, :_alt_1 => { ")" => [], "@pass" => [], "@terminals" => [], :LHS => [], "|" => [:_alt_3], }, :_alt_2 => { "|" => ["|", :seq], }, :_alt_3 => { "|" => [:_alt_2, :_alt_1], }, :declaration => { "@pass" => [:pass], "@terminals" => ["@terminals"], }, :diff => { "(" => [:postfix, :_diff_1], :HEX => [:postfix, :_diff_1], :O_RANGE => [:postfix, :_diff_1], :RANGE => [:postfix, :_diff_1], :STRING1 => [:postfix, :_diff_1], :STRING2 => [:postfix, :_diff_1], :SYMBOL => [:postfix, :_diff_1], }, :_diff_1 => { "(" => [], ")" => [], "-" => [:_diff_2], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :_diff_2 => { "-" => ["-", :postfix], }, :ebnf => { "@pass" => [:_ebnf_2], "@terminals" => [:_ebnf_2], :LHS => [:_ebnf_2], }, :_ebnf_1 => { "@pass" => [:declaration], "@terminals" => [:declaration], :LHS => [:rule], }, :_ebnf_2 => { "@pass" => [:_ebnf_1, :ebnf], "@terminals" => [:_ebnf_1, :ebnf], :LHS => [:_ebnf_1, :ebnf], }, :expression => { "(" => [:alt], :HEX => [:alt], :O_RANGE => [:alt], :RANGE => [:alt], :STRING1 => [:alt], :STRING2 => [:alt], :SYMBOL => [:alt], }, :pass => { "@pass" => ["@pass", :expression], }, :postfix => { "(" => [:primary, :_postfix_1], :HEX => [:primary, :_postfix_1], :O_RANGE => [:primary, :_postfix_1], :RANGE => [:primary, :_postfix_1], :STRING1 => [:primary, :_postfix_1], :STRING2 => [:primary, :_postfix_1], :SYMBOL => [:primary, :_postfix_1], }, :_postfix_1 => { "(" => [], ")" => [], "-" => [], "@pass" => [], "@terminals" => [], :HEX => [], :LHS => [], :O_RANGE => [], :POSTFIX => [:POSTFIX], :RANGE => [], :STRING1 => [], :STRING2 => [], :SYMBOL => [], "|" => [], }, :primary => { "(" => [:_primary_1], :HEX => [:HEX], :O_RANGE => [:O_RANGE], :RANGE => [:RANGE], :STRING1 => [:STRING1], :STRING2 => [:STRING2], :SYMBOL => [:SYMBOL], }, :_primary_1 => { "(" => ["(", :expression, ")"], }, :rule => { :LHS => [:LHS, :expression], }, :seq => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, :_seq_1 => { "(" => [:_seq_2], ")" => [], "@pass" => [], "@terminals" => [], :HEX => [:_seq_2], :LHS => [], :O_RANGE => [:_seq_2], :RANGE => [:_seq_2], :STRING1 => [:_seq_2], :STRING2 => [:_seq_2], :SYMBOL => [:_seq_2], "|" => [], }, :_seq_2 => { "(" => [:diff, :_seq_1], :HEX => [:diff, :_seq_1], :O_RANGE => [:diff, :_seq_1], :RANGE => [:diff, :_seq_1], :STRING1 => [:diff, :_seq_1], :STRING2 => [:diff, :_seq_1], :SYMBOL => [:diff, :_seq_1], }, }.freeze TERMINALS = [ "(", ")", "-", "@pass", "@terminals", :HEX, :LHS, :O_RANGE, :POSTFIX, :RANGE, :STRING1, :STRING2, :SYMBOL, "|" ].freeze FIRST = { :alt => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_alt_1 => [ :_eps, "|"], :_alt_2 => [ "|"], :_alt_3 => [ "|"], :_alt_4 => [ :_eps, "|"], :_alt_5 => [ :_eps, "|"], :_alt_6 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :declaration => [ "@terminals", "@pass"], :diff => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_diff_1 => [ :_eps, "-"], :_diff_2 => [ "-"], :_diff_3 => [ "-", :_eps], :_diff_4 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :ebnf => [ :_eps, "@terminals", :LHS, "@pass"], :_ebnf_1 => [ "@terminals", :LHS, "@pass"], :_ebnf_2 => [ "@terminals", :LHS, "@pass"], :_ebnf_3 => [ :_eps, "@terminals", :LHS, "@pass"], :_empty => [ :_eps], :expression => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :pass => [ "@pass"], :_pass_1 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :postfix => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_postfix_1 => [ :POSTFIX, :_eps], :_postfix_2 => [ :POSTFIX, :_eps], :primary => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_primary_1 => [ "("], :_primary_2 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_primary_3 => [ ")"], :rule => [ :LHS], :_rule_1 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :seq => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_1 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_2 => [ :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_3 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], :_seq_4 => [ :_eps, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "("], }.freeze FOLLOW = { :alt => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_1 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_2 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_3 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_4 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_5 => [ ")", :_eof, "@terminals", :LHS, "@pass"], :_alt_6 => [ "|", ")", :_eof, "@terminals", :LHS, "@pass"], :declaration => [ :_eof, "@terminals", :LHS, "@pass"], :diff => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_1 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_2 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_3 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_diff_4 => [ ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :ebnf => [ :_eof], :_ebnf_1 => [ :_eof, "@terminals", :LHS, "@pass"], :_ebnf_2 => [ :_eof], :_ebnf_3 => [ :_eof], :expression => [ ")", :_eof, "@terminals", :LHS, "@pass"], :pass => [ :_eof, "@terminals", :LHS, "@pass"], :_pass_1 => [ :_eof, "@terminals", :LHS, "@pass"], :postfix => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_1 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_postfix_2 => [ "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :primary => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_1 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_2 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :_primary_3 => [ :POSTFIX, "-", ")", "|", :_eof, :HEX, :SYMBOL, :O_RANGE, :RANGE, :STRING1, :STRING2, "@terminals", :LHS, "(", "@pass"], :rule => [ :_eof, "@terminals", :LHS, "@pass"], :_rule_1 => [ :_eof, "@terminals", :LHS, "@pass"], :seq => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_1 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_2 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_3 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], :_seq_4 => [ ")", "|", :_eof, "@terminals", :LHS, "@pass"], }.freeze CLEANUP = { :_alt_1 => :star, :_alt_3 => :merge, :_diff_1 => :opt, :ebnf => :star, :_ebnf_2 => :merge, :_postfix_1 => :opt, :seq => :plus, :_seq_1 => :star, :_seq_2 => :merge, }.freeze PASS = [ :PASS ].freeze end
ruby
Unlicense
2d26292c12187028cb7a9ac1bdd7a03942d53753
2026-01-04T17:50:30.524968Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/provider.rb
files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/provider.rb
require "digest/md5" require "fileutils" require "thread" require "log4r" require "vagrant/util/silence_warnings" module VagrantPlugins module DockerProvider class Provider < Vagrant.plugin("2", :provider) @@host_vm_mutex = Mutex.new def initialize(machine) @logger = Log4r::Logger.new("vagrant::provider::docker") @machine = machine if host_vm? # We need to use a special communicator that proxies our # SSH requests over our host VM to the container itself. @machine.config.vm.communicator = :docker_hostvm end end # @see Vagrant::Plugin::V2::Provider#action def action(name) action_method = "action_#{name}" return Action.send(action_method) if Action.respond_to?(action_method) nil end # Returns the driver instance for this provider. def driver return @driver if @driver @driver = Driver.new # If we are running on a host machine, then we set the executor # to execute remotely. if host_vm? @driver.executor = Executor::Vagrant.new(host_vm) end @driver end # This returns the {Vagrant::Machine} that is our host machine. # It does not perform any action on the machine or verify it is # running. # # @return [Vagrant::Machine] def host_vm return @host_vm if @host_vm vf_path = @machine.provider_config.vagrant_vagrantfile host_machine_name = @machine.provider_config.vagrant_machine || :default if !vf_path # We don't have a Vagrantfile path set, so we're going to use # the default but we need to copy it into the data dir so that # we don't write into our installation dir (we can't). default_path = File.expand_path("../hostmachine/Vagrantfile", __FILE__) vf_path = @machine.env.data_dir.join("docker-host", "Vagrantfile") begin @machine.env.lock("docker-provider-hostvm") do vf_path.dirname.mkpath FileUtils.cp(default_path, vf_path) end rescue Vagrant::Errors::EnvironmentLockedError # Lock contention, just retry retry end # Set the machine name since we hardcode that for the default host_machine_name = :default end # Expand it so that the home directories and so on get processed # properly. vf_path = File.expand_path(vf_path, @machine.env.root_path) vf_file = File.basename(vf_path) vf_path = File.dirname(vf_path) # Create the env to manage this machine @host_vm = Vagrant::Util::SilenceWarnings.silence! do host_env = Vagrant::Environment.new( cwd: vf_path, home_path: @machine.env.home_path, ui_class: @machine.env.ui_class, vagrantfile_name: vf_file, ) # If there is no root path, then the Vagrantfile wasn't found # and it is an error... raise Errors::VagrantfileNotFound if !host_env.root_path host_env.machine( host_machine_name, host_env.default_provider( exclude: [:docker], force_default: false, )) end @host_vm end # This acquires a lock on the host VM. def host_vm_lock hash = Digest::MD5.hexdigest(host_vm.data_dir.to_s) # We do a process-level mutex on the outside, since we can # wait for that a short amount of time. Then, we do a process lock # on the inside, which will raise an exception if locked. host_vm_mutex.synchronize do @machine.env.lock(hash) do return yield end end end # This is a process-local mutex that can be used by parallel # providers to lock the host VM access. def host_vm_mutex @@host_vm_mutex end # This says whether or not Docker will be running within a VM # rather than directly on our system. Docker needs to run in a VM # when we're not on Linux, or not on a Linux that supports Docker. def host_vm? if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1" false else @machine.provider_config.force_host_vm || !Vagrant::Util::Platform.linux? end end # Returns the real forwarded SSH port on the host. If no port forwarding # for "22/tcp" is found we raise an exception def forwarded_ssh_host_port network = driver.inspect_container(@machine.id)['NetworkSettings'] forwarded_ssh_ports = network['Ports']['22/tcp'] if forwarded_ssh_ports.nil? || forwarded_ssh_ports.empty? raise "ssh port not forwarded from container!" end # return the first forwarded host port for 22/tcp we find forwarded_ssh_ports[0]['HostPort'] end # Returns the remote docker host by parsing the `DOCKER_HOST` env var def remote_docker_host docker_host_uri = ENV.fetch('DOCKER_HOST', 'tcp://192.168.59.103:2376') docker_host = URI.parse(docker_host_uri).host docker_host end # Returns the SSH info for accessing the Container. def ssh_info # If the container isn't running, we can't SSH into it return nil if state.id != :running network = driver.inspect_container(@machine.id)['NetworkSettings'] ip = network['IPAddress'] # If we were not able to identify the container's IP, we return nil # here and we let Vagrant core deal with it ;) return nil if !ip if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1" { host: remote_docker_host, port: forwarded_ssh_host_port } else { host: ip, port: @machine.config.ssh.guest_port } end end def state state_id = nil state_id = :not_created if !@machine.id begin state_id = :host_state_unknown if !state_id && \ host_vm? && !host_vm.communicate.ready? rescue Errors::VagrantfileNotFound state_id = :host_state_unknown end state_id = :not_created if !state_id && \ (!@machine.id || !driver.created?(@machine.id)) state_id = driver.state(@machine.id) if @machine.id && !state_id state_id = :unknown if !state_id # This is a special pseudo-state so that we don't set the # NOT_CREATED_ID while we're setting up the machine. This avoids # clearing the data dir. state_id = :preparing if @machine.id == "preparing" short = state_id.to_s.gsub("_", " ") long = I18n.t("docker_provider.status.#{state_id}") # If we're not created, then specify the special ID flag if state_id == :not_created state_id = Vagrant::MachineState::NOT_CREATED_ID end Vagrant::MachineState.new(state_id, short, long) end def to_s id = @machine.id ? @machine.id : "new container" "Docker (#{id})" end end end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/synced_folder.rb
files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/synced_folder.rb
module VagrantPlugins module DockerProvider class SyncedFolder < Vagrant.plugin("2", :synced_folder) def usable?(machine, raise_error=false) # These synced folders only work if the provider is Docker if machine.provider_name != :docker if raise_error raise Errors::SyncedFolderNonDocker, provider: machine.provider_name.to_s end return false end true end def prepare(machine, folders, _opts) folders.each do |id, data| next if data[:ignore] host_path = data[:hostpath] guest_path = data[:guestpath] if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1" machine.provider_config.volumes << "#{host_path.sub(/^([A-Z]):\//) {|d| "/#{$1.downcase}/"} }:#{guest_path}" else machine.provider_config.volumes << "#{host_path}:#{guest_path}" end end end end end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/action/create.rb
files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/action/create.rb
module VagrantPlugins module DockerProvider module Action class Create def initialize(app, env) @app = app end def call(env) @env = env @machine = env[:machine] @provider_config = @machine.provider_config @machine_config = @machine.config @driver = @machine.provider.driver params = create_params # If we're running a single command, we modify the params a bit if env[:machine_action] == :run_command # Use the command that is given to us params[:cmd] = env[:run_command] # Don't detach, we want to watch the command run params[:detach] = false # No ports should be shared to the host params[:ports] = [] # Allocate a pty if it was requested params[:pty] = true if env[:run_pty] # Remove container after execution params[:rm] = true if env[:run_rm] # Name should be unique params[:name] = "#{params[:name]}_#{Time.now.to_i}" # We link to our original container # TODO end env[:ui].output(I18n.t("docker_provider.creating")) env[:ui].detail(" Name: #{params[:name]}") env[:ui].detail(" Image: #{params[:image]}") if params[:cmd] && !params[:cmd].empty? env[:ui].detail(" Cmd: #{params[:cmd].join(" ")}") end params[:volumes].each do |volume| env[:ui].detail("Volume: #{volume}") end params[:ports].each do |pair| env[:ui].detail(" Port: #{pair}") end params[:links].each do |name, other| env[:ui].detail(" Link: #{name}:#{other}") end if env[:machine_action] != :run_command # For regular "ups" create it and get the CID cid = @driver.create(params) env[:ui].detail(" \n"+I18n.t( "docker_provider.created", id: cid[0...16])) @machine.id = cid elsif params[:detach] env[:ui].detail(" \n"+I18n.t("docker_provider.running_detached")) else ui_opts = {} # If we're running with a pty, we want the output to look as # authentic as possible. We don't prefix things and we don't # output a newline. if env[:run_pty] ui_opts[:prefix] = false ui_opts[:new_line] = false end # For run commands, we run it and stream back the output env[:ui].detail(" \n"+I18n.t("docker_provider.running")+"\n ") @driver.create(params, stdin: env[:run_pty]) do |type, data| env[:ui].detail(data.chomp, **ui_opts) end end @app.call(env) end def create_params container_name = @provider_config.name if !container_name container_name = "#{@env[:root_path].basename.to_s}_#{@machine.name}" container_name.gsub!(/[^-a-z0-9_]/i, "") container_name << "_#{Time.now.to_i}" end image = @env[:create_image] image ||= @provider_config.image links = [] @provider_config._links.each do |link| parts = link.split(":", 2) links << parts end { cmd: @provider_config.cmd, detach: true, env: @provider_config.env, expose: @provider_config.expose, extra_args: @provider_config.create_args, hostname: @machine_config.vm.hostname, image: image, links: links, name: container_name, ports: forwarded_ports(@provider_config.has_ssh), privileged: @provider_config.privileged, pty: false, volumes: @provider_config.volumes, } end def forwarded_ports(include_ssh=false) mappings = {} random = [] @machine.config.vm.networks.each do |type, options| next if type != :forwarded_port # Don't include SSH if we've explicitly asked not to next if options[:id] == "ssh" && !include_ssh # If the guest port is 0, put it in the random group if options[:guest] == 0 random << options[:host] next end mappings[options[:host]] = options end # Build the results result = random.map(&:to_s) result += mappings.values.map do |fp| if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1" fp[:host_ip] = "0.0.0.0" end protocol = "" protocol = "/udp" if fp[:protocol].to_s == "udp" host_ip = "" host_ip = "#{fp[:host_ip]}:" if fp[:host_ip] "#{host_ip}#{fp[:host]}:#{fp[:guest]}#{protocol}" end.compact result end end end end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/action/host_machine_port_checker.rb
files/tools/vagrant/HashiCorp/Vagrant/embedded/gems/gems/vagrant-1.7.4/plugins/providers/docker/action/host_machine_port_checker.rb
require "log4r" module VagrantPlugins module DockerProvider module Action # This sets up the middleware env var to check for ports in use. class HostMachinePortChecker def initialize(app, env) @app = app @logger = Log4r::Logger.new("vagrant::docker::hostmachineportchecker") end def call(env) if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] != "1" return @app.call(env) if !env[:machine].provider.host_vm? end @machine = env[:machine] env[:port_collision_port_check] = method(:port_check) @app.call(env) end protected def port_check(port) if ENV['VAGRANT_DOCKER_REMOTE_HOST_PATCH'] == "1" `docker ps`.lines.any? { |l| l.include? "0.0.0.0:#{port}->22/tcp" } else host_machine = @machine.provider.host_vm host_machine.guest.capability(:port_open_check, port) end end end end end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/spec/helpers.rb
spec/helpers.rb
BUILD_DIR=File.expand_path('./target/build') CHEFDK_RUBY = "#{BUILD_DIR}/tools/chefdk/embedded" CHEFDK_HOME = "#{BUILD_DIR}/home/.chefdk" VAGRANT_RUBY = "#{BUILD_DIR}/tools/vagrant/HashiCorp/Vagrant/embedded" VAGRANT_HOME = "#{BUILD_DIR}/home/.vagrant.d" # enable :should syntax for rspec 3 RSpec.configure do |config| config.expect_with :rspec do |c| c.syntax = [:should, :expect] end end module Helpers # sets the environment via set-env.bat before running the command # and returns whatever the cmd writes (captures both stdout and stderr) def run_cmd(cmd) `"#{BUILD_DIR}/set-env.bat" >NUL && #{cmd} 2>&1` end def run_cmd_no_redirect(cmd) `"#{BUILD_DIR}/set-env.bat" >NUL && #{cmd}` end # similar to #run_cmd, but runs quietly and returns only the exit code def system_cmd(cmd) system "\"#{BUILD_DIR}/set-env.bat\" >NUL && #{cmd} >NUL" end # runs #system_cmd and checks for success (i.e. exit status 0) def cmd_succeeds(cmd) system_cmd(cmd).should be true end # converts the path to using backslashes def convert_slashes(path) path.gsub('/', '\\').gsub('\\', '\\\\\\\\') #eek end # checks if the given line is contained in the environment def env_match(line) run_cmd("set").should match(/^#{convert_slashes(line)}$/) end # checks if the given gem is installed at version in the CHEFDK_RUBY def gem_installed(name, version) run_cmd("#{CHEFDK_RUBY}/bin/gem list").should match("#{name} \\(#{version}\\)") end # checks if the given gem is installed at version def knife_plugin_installed(name, version) gem_installed name, version end # checks if the given vagrant plugin is installed at version def vagrant_plugin_installed(name, version) run_cmd("vagrant plugin list").should match("#{name} \\(#{version}\\)") end # checks if the given atom plugin is installed at version def atom_plugin_installed(name, version = "") run_cmd_no_redirect("apm list").should match("#{name}@#{version}") end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/spec/integration/bills_kitchen_spec.rb
spec/integration/bills_kitchen_spec.rb
require_relative '../helpers' describe "bills kitchen" do include Helpers describe "tools" do it "installs ChefDK 0.13.21" do run_cmd("chef -v").should match('Chef Development Kit Version: 0.13.21') end it "installs Vagrant 1.8.1" do run_cmd("vagrant -v").should match('1.8.1') end it "installs Terraform 0.6.16" do run_cmd("terraform --version").should match('0.6.16') end it "installs Packer 0.10.1" do run_cmd("packer --version").should match('0.10.1') end it "installs Consul 0.6.4" do run_cmd("consul --version").should match('0.6.4') end it "installs ssh.exe" do run_cmd("ssh -V").should match('OpenSSH_6.7p1, OpenSSL 1.0.1i 6 Aug 2014') end it "installs rsync.exe" do run_cmd("rsync --version").should match('rsync version 3.1.1') end it "installs Git 2.8.2" do run_cmd("git --version").should match('git version 2.8.2') end it "installs kdiff3" do marker_file = "#{BUILD_DIR}/merged.md" begin run_cmd("kdiff3 README.md README.md --auto -cs LineEndStyle=0 -o #{marker_file}") File.exist?(marker_file).should be true ensure File.delete(marker_file) if File.exist?(marker_file) end end it "installs clink 0.4.4" do run_cmd("#{BUILD_DIR}/tools/clink/clink.bat version").should match('Clink v0.4.4') end it "installs atom 1.7.3" do # see https://github.com/atom/atom-shell/issues/683 # so we 1) ensure the atom.cmd is on the PATH and 2) it's the right version cmd_succeeds "#{BUILD_DIR}/tools/atom/Atom/resources/cli/atom.cmd -v" cmd_succeeds "grep 'Package: atom@1.7.3' #{BUILD_DIR}/tools/atom/Atom/resources/LICENSE.md" end it "installs apm 1.9.2" do run_cmd("#{BUILD_DIR}/tools/atom/Atom/resources/app/apm/bin/apm.cmd -v").should match('1.9.2') end it "installs docker 1.7.1" do run_cmd("docker -v").should match('Docker version 1.7.1') end it "installs boot2docker-cli 1.7.1" do run_cmd("boot2docker version").should match('Boot2Docker-cli version: v1.7.1') end end describe "environment" do it "sets BK_ROOT to W:/" do env_match "BK_ROOT=#{BUILD_DIR}/" end it "sets HOME to W:/home" do env_match "HOME=#{BUILD_DIR}/home" end it "sets VAGRANT_HOME to W:/home/.vagrant.d" do env_match "VAGRANT_HOME=#{BUILD_DIR}/home/.vagrant.d" end it "sets CHEFDK_HOME to W:/home/.chefdk" do env_match "CHEFDK_HOME=#{BUILD_DIR}/home/.chefdk" end it "sets VBOX_USER_HOME to %USERPROFILE%" do env_match "VBOX_USER_HOME=#{ENV['USERPROFILE']}" end it "sets TERM=cygwin" do env_match "TERM=cygwin" end it "sets ANSICON=true" do env_match "ANSICON=true" end it "sets SSL_CERT_FILE to W:/home/cacert.pem" do env_match "SSL_CERT_FILE=#{BUILD_DIR}/home/cacert.pem" end it "sets BOOT2DOCKER_DIR to W:/home/.boot2docker" do env_match "BOOT2DOCKER_DIR=#{BUILD_DIR}/home/.boot2docker" end end describe "aliases" do it "aliases `bundle exec` to `be`" do run_cmd("doskey /macros").should match('be=bundle exec $*') end it "aliases `atom` to `vi`" do run_cmd("doskey /macros").should match('vi=atom.cmd $*') end it "aliases `boot2docker` to `b2d`" do run_cmd("doskey /macros").should match('b2d=boot2docker $*') end end describe "ruby installations" do describe "chefdk as the primary ruby" do it "provides the default `ruby` command" do run_cmd("which ruby").should match(convert_slashes("#{CHEFDK_RUBY}/bin/ruby.EXE")) end it "provides the default `gem` command" do run_cmd("which gem").should match(convert_slashes("#{CHEFDK_RUBY}/bin/gem")) end it "does have it's environment properly set" do chef_env = run_cmd("chef env") chef_env.should match("ChefDK Home: #{convert_slashes(BUILD_DIR + '/home/.chefdk')}") chef_env.should match("ChefDK Install Directory: #{BUILD_DIR}/tools/chefdk") chef_env.should match("Ruby Executable: #{BUILD_DIR}/tools/chefdk/embedded/bin/ruby.exe") chef_env.should match("GEM ROOT: #{BUILD_DIR}/tools/chefdk/embedded/lib/ruby/gems/2.1.0") chef_env.should match("GEM HOME: #{convert_slashes(BUILD_DIR + '/home/.chefdk')}/gem/ruby/2.1.0") end end describe "chefdk ruby" do it "installs Chef 12.9.41" do run_cmd("knife -v").should match('Chef: 12.9.41') end it "has RubyGems > 2.4.1 installed (fixes opscode/chef-dk#242)" do run_cmd("gem -v").should match('2.6.3') end it "has bundler >= 1.10.6 installed (fixes chef/omnibus-chef#464)" do gem_installed "bundler", "1.11.2" end it "uses $HOME/.chefdk as the gemdir" do run_cmd("#{CHEFDK_RUBY}/bin/gem environment gemdir").should match("#{CHEFDK_HOME}/gem/ruby/2.1.0") end it "does not have any binaries in the $HOME/.chefdk gemdir preinstalled when we ship it" do # because since RubyGems > 2.4.1 the ruby path in here is absolute! gem_binaries = Dir.glob("#{CHEFDK_HOME}/gem/ruby/2.1.0/bin/*") end it "has ChefDK verified to work via `chef verify`" do # XXX: skip verification of chef-provisioning until chef/chef-dk#470 is fixed components = %w{berkshelf tk-policyfile-provisioner test-kitchen chef-client chef-dk chefspec generated-cookbooks-pass-chefspec rubocop fauxhai knife-spork kitchen-vagrant package\ installation openssl inspec} cmd_succeeds "chef verify #{components.join(' ')}" end it "has 'knife-audit (0.2.0)' plugin installed" do knife_plugin_installed "knife-audit", "0.2.0" end it "has 'knife-server (1.1.0)' plugin installed" do knife_plugin_installed "knife-server", "1.1.0" end end describe "vagrant ruby" do it "has 'vagrant-toplevel-cookbooks (0.2.4)' plugin installed" do vagrant_plugin_installed "vagrant-toplevel-cookbooks", "0.2.4" end it "has 'vagrant-omnibus (1.4.1)' plugin installed" do vagrant_plugin_installed "vagrant-omnibus", "1.4.1" end it "has 'vagrant-cachier (1.2.1)' plugin installed" do vagrant_plugin_installed "vagrant-cachier", "1.2.1" end it "has 'vagrant-proxyconf (1.5.2)' plugin installed" do vagrant_plugin_installed "vagrant-proxyconf", "1.5.2" end it "has 'vagrant-berkshelf (4.1.0)' plugin installed" do vagrant_plugin_installed "vagrant-berkshelf", "4.1.0" end it "has 'vagrant-winrm (0.7.0)' plugin installed" do vagrant_plugin_installed "vagrant-winrm", "0.7.0" end it "installed vagrant plugins $HOME/.vagrant.d" do Dir.entries("#{VAGRANT_HOME}/gems/gems").should include('vagrant-toplevel-cookbooks-0.2.4') end end describe "atom plugins" do it "has 'atom-beautify' plugin installed" do atom_plugin_installed "atom-beautify" end it "has 'minimap' plugin installed" do atom_plugin_installed "minimap" end it "has 'line-ending-converter' plugin installed" do atom_plugin_installed "line-ending-converter" end it "has 'language-chef' plugin installed" do atom_plugin_installed "language-chef" end it "has 'language-batchfile' plugin installed" do atom_plugin_installed "language-batchfile" end it "has 'autocomplete-plus' plugin installed" do atom_plugin_installed "autocomplete-plus" end it "has 'autocomplete-snippets' plugin installed" do atom_plugin_installed "autocomplete-snippets" end end end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
tknerr/bills-kitchen
https://github.com/tknerr/bills-kitchen/blob/561d96d2aa4879b983a33554c3387d3e42436be9/doc/markit.rb
doc/markit.rb
#!/usr/bin/env ruby # based on https://gist.github.com/1919161 require 'rubygems' require 'redcarpet' require 'albino' class SyntaxRenderer < Redcarpet::Render::HTML def initialize(options) super options @style = options[:style] @otoc = options[:toc] @toc = "" @rlevel = nil end def doc_header <<-HTML.gsub /^\s+/, "" <!DOCTYPE HTML> <html lang="en-US"> <head> <meta charset="UTF-8"> --TITLE <link rel="stylesheet" href="#{@style}"> </head> <body> <header> --TOC </header> HTML end def doc_footer <<-HTML.gsub /^\s+/, "" </body> </html> HTML end def header(text, level) header = "" if (@rlevel == nil) @title = text @rlevel = level header << "<section id=\"title\">\n" header << "<h1>#{text}</h1>\n" else if (@rlevel - level >= 0) header << "</section>\n" * (@rlevel - level + 1) @toc << "</li>\n" + "</ul>\n</li>\n" * (@rlevel - level) unless @toc.empty? else @toc << "<ul>\n" + "<li>\n<ul>\n" * (level - @rlevel - 1) end id = text.downcase.gsub(/\ /, '-') header << "<section id=\"#{id}\">\n" header << "<h#{level}>#{text}</h#{level}>\n" @toc << "<li>\n" @toc << "<a href=\"\##{id}\">#{text}</a>" @rlevel = level end header end def block_code(code, language) if language && !language.empty? Albino.colorize(code, language) else "<pre><code>#{code}</code></pre>" end end def preprocess(full_document) @text = full_document end def postprocess(full_document) full_document.gsub(/--TITLE/, self.title) .gsub(/--TOC/, self.toc) end def toc @otoc ? "<nav id=\"toc\"><ul>\n#{@toc}\n</ul></nav>" : "" end def title "<title>#{@title}</title>" end end class MarkIt def self.to_html(text) renderer = SyntaxRenderer.new( :style => "http://tknerr.github.com/bills-kitchen/stylesheets/docs_stylesheet.css", :toc => false, :hard_wrap => true, :xhtml => true ) markdown = Redcarpet::Markdown.new(renderer, :fenced_code_blocks => true, :no_intra_emphasis => true, :tables => true, :autolink => true, :strikethrough => true, :space_after_headers => true ) markdown.render(text); end end
ruby
MIT
561d96d2aa4879b983a33554c3387d3e42436be9
2026-01-04T17:50:23.228346Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/spec/spec_helper.rb
spec/spec_helper.rb
require 'tapp' require 'tapp/turnip' RSpec.configure do |config| config.include Tapp::Turnip::Steps config.expect_with :rspec do |expects| expects.syntax = :should end config.before do Tapp.config.reset end end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp.rb
lib/tapp.rb
require 'tapp/configuration' require 'tapp/deprecated' require 'tapp/object_extension' require 'tapp/printer/pretty_print' require 'tapp/printer/puts' module Tapp extend Deprecated class << self def config @config ||= Tapp::Configuration.new end def configure(&block) config.tap(&block) end end end Object.__send__ :include, Tapp::ObjectExtension
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp/deprecated.rb
lib/tapp/deprecated.rb
module Tapp module Deprecated def verbose=(bool) warn 'DEPRECATION WARNING: Tapp.verbose= is deprecated. Use Tapp.config.report_caller= instead.' config.report_caller = bool end def verbose warn 'DEPRECATION WARNING: Tapp.verbose is deprecated. Use Tapp.config.report_caller instead.' config.report_caller end end end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp/turnip.rb
lib/tapp/turnip.rb
module Tapp module Turnip module Steps step 'I have the following code:' do |code| @code = code end step 'a file named :filename with:' do |filename, code| @filename, @code = filename, code end step 'Ruby it' do stdout = StringIO.new $stdout = stdout begin if @filename eval @code, binding, @filename, 1 else eval @code end ensure $stdout = STDOUT end @output = stdout.string.gsub(/\e\[0.*?m/, '').chop end step 'I should see:' do |output| @output.should == output end end end end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp/command.rb
lib/tapp/command.rb
require 'thor' module Tapp class Command < Thor desc 'grep [<git-grep-options>]', 'Print lines using tapp' def grep(*) opts = ['--word-regexp', '-e', 'tapp', '-e', 'taputs', '-e', 'taap', *ARGV.drop(1)] git_grep = ['git', 'grep', opts].flatten.join(' ') puts `#{git_grep}`.gsub(/^Gemfile(\.lock)?:.+?\n/, '') end end end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp/version.rb
lib/tapp/version.rb
module Tapp VERSION = '1.5.1' end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false
esminc/tapp
https://github.com/esminc/tapp/blob/ddc458011a82db33617bed44c6a64838f8581e13/lib/tapp/object_extension.rb
lib/tapp/object_extension.rb
require 'tapp/printer' require 'tapp/util' module Tapp module ObjectExtension def tapp(printer = Tapp.config.default_printer) Tapp::Util.report_called if Tapp.config.report_caller tap { Tapp::Printer.instance(printer).print block_given? ? yield(self) : self } end def taputs(&block) tapp :puts, &block end end end
ruby
MIT
ddc458011a82db33617bed44c6a64838f8581e13
2026-01-04T17:50:46.572797Z
false