CombinedText stringlengths 4 3.42M |
|---|
# == Issuable concern
#
# Contains common functionality shared between Issues and MergeRequests
#
# Used by Issue, MergeRequest
#
module Issuable
extend ActiveSupport::Concern
include Mentionable
included do
belongs_to :author, class_name: "User"
belongs_to :assignee, class_name: "User"
belongs_to :milestone
has_many :notes, as: :noteable, dependent: :destroy
validates :author, presence: true
validates :title, presence: true, length: { within: 0..255 }
scope :authored, ->(user) { where(author_id: user) }
scope :assigned_to, ->(u) { where(assignee_id: u.id)}
scope :recent, -> { order("created_at DESC") }
scope :assigned, -> { where("assignee_id IS NOT NULL") }
scope :unassigned, -> { where("assignee_id IS NULL") }
scope :of_projects, ->(ids) { where(project_id: ids) }
scope :opened, -> { with_state(:opened, :reopened) }
scope :closed, -> { with_state(:closed) }
delegate :name,
:email,
to: :author,
prefix: true
delegate :name,
:email,
to: :assignee,
allow_nil: true,
prefix: true
attr_mentionable :title, :description
end
module ClassMethods
def search(query)
where("LOWER(title) like :query", query: "%#{query.downcase}%")
end
def sort(method)
case method.to_s
when 'newest' then reorder("#{table_name}.created_at DESC")
when 'oldest' then reorder("#{table_name}.created_at ASC")
when 'recently_updated' then reorder("#{table_name}.updated_at DESC")
when 'last_updated' then reorder("#{table_name}.updated_at ASC")
when 'milestone_due_soon' then joins(:milestone).reorder("milestones.due_date ASC")
when 'milestone_due_later' then joins(:milestone).reorder("milestones.due_date DESC")
else reorder("#{table_name}.created_at DESC")
end
end
end
def today?
Date.today == created_at.to_date
end
def new?
today? && created_at == updated_at
end
def is_assigned?
!!assignee_id
end
def is_being_reassigned?
assignee_id_changed?
end
#
# Votes
#
# Return the number of -1 comments (downvotes)
def downvotes
notes.select(&:downvote?).size
end
def downvotes_in_percent
if votes_count.zero?
0
else
100.0 - upvotes_in_percent
end
end
# Return the number of +1 comments (upvotes)
def upvotes
notes.select(&:upvote?).size
end
def upvotes_in_percent
if votes_count.zero?
0
else
100.0 / votes_count * upvotes
end
end
# Return the total number of votes
def votes_count
upvotes + downvotes
end
# Return all users participating on the discussion
def participants
users = []
users << author
users << assignee if is_assigned?
mentions = []
mentions << self.mentioned_users
notes.each do |note|
users << note.author
mentions << note.mentioned_users
end
users.concat(mentions.reduce([], :|)).uniq
end
def to_hook_data
{
object_kind: self.class.name.underscore,
object_attributes: self.attributes
}
end
end
Add only open/reopen scopes to issues
Signed-off-by: Dmitriy Zaporozhets <be23d75b156792e5acab51b196a2deb155d35d6a@gmail.com>
# == Issuable concern
#
# Contains common functionality shared between Issues and MergeRequests
#
# Used by Issue, MergeRequest
#
module Issuable
extend ActiveSupport::Concern
include Mentionable
included do
belongs_to :author, class_name: "User"
belongs_to :assignee, class_name: "User"
belongs_to :milestone
has_many :notes, as: :noteable, dependent: :destroy
validates :author, presence: true
validates :title, presence: true, length: { within: 0..255 }
scope :authored, ->(user) { where(author_id: user) }
scope :assigned_to, ->(u) { where(assignee_id: u.id)}
scope :recent, -> { order("created_at DESC") }
scope :assigned, -> { where("assignee_id IS NOT NULL") }
scope :unassigned, -> { where("assignee_id IS NULL") }
scope :of_projects, ->(ids) { where(project_id: ids) }
scope :opened, -> { with_state(:opened, :reopened) }
scope :only_opened, -> { with_state(:opened) }
scope :only_reopened, -> { with_state(:reopened) }
scope :closed, -> { with_state(:closed) }
delegate :name,
:email,
to: :author,
prefix: true
delegate :name,
:email,
to: :assignee,
allow_nil: true,
prefix: true
attr_mentionable :title, :description
end
module ClassMethods
def search(query)
where("LOWER(title) like :query", query: "%#{query.downcase}%")
end
def sort(method)
case method.to_s
when 'newest' then reorder("#{table_name}.created_at DESC")
when 'oldest' then reorder("#{table_name}.created_at ASC")
when 'recently_updated' then reorder("#{table_name}.updated_at DESC")
when 'last_updated' then reorder("#{table_name}.updated_at ASC")
when 'milestone_due_soon' then joins(:milestone).reorder("milestones.due_date ASC")
when 'milestone_due_later' then joins(:milestone).reorder("milestones.due_date DESC")
else reorder("#{table_name}.created_at DESC")
end
end
end
def today?
Date.today == created_at.to_date
end
def new?
today? && created_at == updated_at
end
def is_assigned?
!!assignee_id
end
def is_being_reassigned?
assignee_id_changed?
end
#
# Votes
#
# Return the number of -1 comments (downvotes)
def downvotes
notes.select(&:downvote?).size
end
def downvotes_in_percent
if votes_count.zero?
0
else
100.0 - upvotes_in_percent
end
end
# Return the number of +1 comments (upvotes)
def upvotes
notes.select(&:upvote?).size
end
def upvotes_in_percent
if votes_count.zero?
0
else
100.0 / votes_count * upvotes
end
end
# Return the total number of votes
def votes_count
upvotes + downvotes
end
# Return all users participating on the discussion
def participants
users = []
users << author
users << assignee if is_assigned?
mentions = []
mentions << self.mentioned_users
notes.each do |note|
users << note.author
mentions << note.mentioned_users
end
users.concat(mentions.reduce([], :|)).uniq
end
def to_hook_data
{
object_kind: self.class.name.underscore,
object_attributes: self.attributes
}
end
end
|
module Likeable
extend ActiveSupport::Concern
included do
belongs_to :target, polymorphic: true
has_many :likes, -> { where(positive: true) }, dependent: :delete_all, as: :target
has_many :likers, class_name: 'User', through: :likes, source: :user
has_many :dislikes, -> { where(positive: false) }, class_name: 'Like', dependent: :delete_all, as: :target
has_many :dislikers, class_name: 'User', through: :dislikes, source: :user
scope :liked_by, ->(user_id) do
positive_likes_string = if ActiveRecord::Base.connection.instance_of?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter)
"likes.positive = 't'"
else
'likes.positive = 1'
end
select('music_videos.*, likes.created_at AS liked_at').joins("RIGHT JOIN likes ON #{positive_likes_string} AND likes.target_type = 'MusicVideo' AND likes.target_id = music_videos.id").
where('likes.user_id = ? AND music_videos.id IS NOT NULL', user_id)
end
end
def update_likes_counter
self.class.where(id: self.id).update_all likes_count: self.likes.count, dislikes_count: self.dislikes.count
end
module ClassMethods
def likes_or_dislikes_for(user, ids)
user.likes_or_dislikes.for_targets(name, ids).index_by(&:target_id)
end
end
end
Check if PostgreSQL adapter is defined.
module Likeable
extend ActiveSupport::Concern
included do
belongs_to :target, polymorphic: true
has_many :likes, -> { where(positive: true) }, dependent: :delete_all, as: :target
has_many :likers, class_name: 'User', through: :likes, source: :user
has_many :dislikes, -> { where(positive: false) }, class_name: 'Like', dependent: :delete_all, as: :target
has_many :dislikers, class_name: 'User', through: :dislikes, source: :user
scope :liked_by, ->(user_id) do
positive_likes_string = if (
defined?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter) &&
ActiveRecord::Base.connection.instance_of?(ActiveRecord::ConnectionAdapters::PostgreSQLAdapter)
)
"likes.positive = 't'"
else
'likes.positive = 1'
end
select('music_videos.*, likes.created_at AS liked_at').joins("RIGHT JOIN likes ON #{positive_likes_string} AND likes.target_type = 'MusicVideo' AND likes.target_id = music_videos.id").
where('likes.user_id = ? AND music_videos.id IS NOT NULL', user_id)
end
end
def update_likes_counter
self.class.where(id: self.id).update_all likes_count: self.likes.count, dislikes_count: self.dislikes.count
end
module ClassMethods
def likes_or_dislikes_for(user, ids)
user.likes_or_dislikes.for_targets(name, ids).index_by(&:target_id)
end
end
end |
module Rateable
extend ActiveSupport::Concern
included do
has_many :topics, through: :ratings
end
def top_ratings(top_n = 20)
ratings.order(score: :desc).limit(top_n)
end
def top_ratings_and_percent(top_n = 20)
top = top_ratings(top_n)
Rating.transaction do
total_count = User.count
top.map do |r|
count = Rating.where(
'score > ? AND rateable_type = ? AND topic_id = ?',
r.score,
self.class.name,
r.topic_id,
).count
percentile = 100.0 - (100.0 * count.to_f / total_count)
{
rating: r,
percentile: percentile,
}
end
end
end
def orig_rating_list
@orig_ratings_list ||= ratings
end
def iter_idx
@iter_idx ||= 0
end
def reset_ratings_cache
@orig_ratings_list = nil
@iter_idx = nil
end
def pretty_ratings
ratings.map do |rating|
{
score: rating.score,
topic: rating.topic.value,
}
end
end
def ratings_cache(reset = false)
if reset
reset_ratings_cache
end
@ratings_cache ||= Hash.new do |h, topic_value|
rating = nil
while (iter_idx < orig_rating_list.size) do
r = orig_rating_list[iter_idx]
@iter_idx += 1
h[r.topic.value] = r
if topic_value == r.topic.value
rating = r
break
end
end
if rating.nil?
rating = Rating.new(topic: topic_value, rateable: self)
h[topic_value] = rating
self.ratings << rating
end
rating
end
end
def rating_for(topic_value)
ratings_cache[topic_value]
end
end
Update rateable.rb
module Rateable
extend ActiveSupport::Concern
included do
has_many :topics, through: :ratings
end
def top_ratings(top_n = 20)
ratings.order(score: :desc).limit(top_n)
end
def top_ratings_and_percent(top_n = 20)
top = top_ratings(top_n)
Rating.transaction do
total_count = self.class.count
top.map do |r|
count = Rating.where(
'score > ? AND rateable_type = ? AND topic_id = ?',
r.score,
self.class.name,
r.topic_id,
).count
percentile = 100.0 - (100.0 * count.to_f / total_count)
{
rating: r,
percentile: percentile,
}
end
end
end
def orig_rating_list
@orig_ratings_list ||= ratings
end
def iter_idx
@iter_idx ||= 0
end
def reset_ratings_cache
@orig_ratings_list = nil
@iter_idx = nil
end
def pretty_ratings
ratings.map do |rating|
{
score: rating.score,
topic: rating.topic.value,
}
end
end
def ratings_cache(reset = false)
if reset
reset_ratings_cache
end
@ratings_cache ||= Hash.new do |h, topic_value|
rating = nil
while (iter_idx < orig_rating_list.size) do
r = orig_rating_list[iter_idx]
@iter_idx += 1
h[r.topic.value] = r
if topic_value == r.topic.value
rating = r
break
end
end
if rating.nil?
rating = Rating.new(topic: topic_value, rateable: self)
h[topic_value] = rating
self.ratings << rating
end
rating
end
end
def rating_for(topic_value)
ratings_cache[topic_value]
end
end
|
class ContentFormatter
ALLOWLIST_BASE = {}.tap do |hash|
hash[:elements] = %w[
h1 h2 h3 h4 h5 h6 h7 h8 br b i strong em a pre code img tt div ins del sup sub
p ol ul table thead tbody tfoot blockquote dl dt dd kbd q samp var hr ruby rt
rp li tr td th s strike summary details figure figcaption audio video source
small iframe
]
hash[:attributes] = {
"a" => ["href"],
"img" => ["src", "longdesc"],
"div" => ["itemscope", "itemtype"],
"blockquote" => ["cite"],
"del" => ["cite"],
"ins" => ["cite"],
"q" => ["cite"],
"source" => ["src"],
"video" => ["src", "poster", "playsinline", "loop", "muted", "controls", "preload"],
"audio" => ["src"],
"td" => ["align"],
"th" => ["align"],
"iframe" => ["src", "width", "height"],
:all => %w[
abbr accept accept-charset accesskey action alt axis border cellpadding
cellspacing char charoff charset checked clear cols colspan color compact
coords datetime dir disabled enctype for frame headers height hreflang hspace
ismap label lang maxlength media method multiple name nohref noshade nowrap
open prompt readonly rel rev rows rowspan rules scope selected shape size span
start summary tabindex target title type usemap valign value vspace width
itemprop id
]
}
hash[:protocols] = {
"a" => {
"href" => ["http", "https", "mailto", :relative]
},
"blockquote" => {
"cite" => ["http", "https", :relative]
},
"del" => {
"cite" => ["http", "https", :relative]
},
"ins" => {
"cite" => ["http", "https", :relative]
},
"q" => {
"cite" => ["http", "https", :relative]
},
"img" => {
"src" => ["http", "https", :relative, "data"],
"longdesc" => ["http", "https", :relative]
},
"video" => {
"src" => ["http", "https"],
"poster" => ["http", "https"]
},
"audio" => {
"src" => ["http", "https"]
}
}
hash[:remove_contents] = %w[script style iframe object embed]
end
ALLOWLIST_DEFAULT = ALLOWLIST_BASE.clone.tap do |hash|
transformers = Transformers.new
hash[:transformers] = [transformers.class_allowlist, transformers.table_elements, transformers.top_level_li, transformers.video]
end
ALLOWLIST_NEWSLETTER = ALLOWLIST_BASE.clone.tap do |hash|
hash[:elements] = hash[:elements] - %w[table thead tbody tfoot tr td]
end
ALLOWLIST_EVERNOTE = {
elements: %w[
a abbr acronym address area b bdo big blockquote br caption center cite code col colgroup dd
del dfn div dl dt em font h1 h2 h3 h4 h5 h6 hr i img ins kbd li map ol p pre q s samp small
strike strong sub sup table tbody td tfoot th thead tr tt u ul var xmp
],
remove_contents: ["script", "style", "iframe", "object", "embed", "title"],
attributes: {
"a" => ["href"],
"img" => ["src"],
:all => ["align", "alt", "border", "cellpadding", "cellspacing", "cite", "cols", "colspan", "color",
"coords", "datetime", "dir", "disabled", "enctype", "for", "height", "hreflang", "label", "lang",
"longdesc", "name", "rel", "rev", "rows", "rowspan", "selected", "shape", "size", "span", "start",
"summary", "target", "title", "type", "valign", "value", "vspace", "width"]
},
protocols: {
"a" => {"href" => ["http", "https", :relative]},
"img" => {"src" => ["http", "https", :relative]}
}
}
SANITIZE_BASIC = Sanitize::Config.merge(Sanitize::Config::BASIC, remove_contents: ["script", "style", "iframe", "object", "embed", "figure"])
def self.format!(*args)
new._format!(*args)
end
def _format!(content, entry = nil, image_proxy_enabled = true, base_url = nil)
context = {
whitelist: ALLOWLIST_DEFAULT,
embed_url: Rails.application.routes.url_helpers.iframe_embeds_path,
embed_classes: "iframe-placeholder entry-callout system-content"
}
filters = [HTML::Pipeline::SmileyFilter, HTML::Pipeline::SanitizationFilter, HTML::Pipeline::SrcFixer, HTML::Pipeline::IframeFilter]
if ENV["CAMO_HOST"] && ENV["CAMO_KEY"] && image_proxy_enabled
context[:asset_proxy] = ENV["CAMO_HOST"]
context[:asset_proxy_secret_key] = ENV["CAMO_KEY"]
context[:asset_src_attribute] = "data-camo-src"
filters = filters << HTML::Pipeline::CamoFilter
end
if entry || base_url
filters.unshift(HTML::Pipeline::AbsoluteSourceFilter)
filters.unshift(HTML::Pipeline::AbsoluteHrefFilter)
context[:image_base_url] = base_url || entry.feed.site_url
context[:image_subpage_url] = base_url || entry.url || ""
context[:href_base_url] = base_url || entry.feed.site_url
context[:href_subpage_url] = base_url || entry.url || ""
if entry && entry.feed.newsletter?
context[:whitelist] = ALLOWLIST_NEWSLETTER
end
end
filters.unshift(HTML::Pipeline::LazyLoadFilter)
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
if entry&.archived_images?
result[:output] = ImageFallback.new(result[:output]).add_fallbacks
end
result[:output].to_s
end
def self.newsletter_format(*args)
new._newsletter_format(*args)
end
def _newsletter_format(content)
context = {
whitelist: Sanitize::Config::RELAXED
}
filters = [HTML::Pipeline::SanitizationFilter]
if ENV["CAMO_HOST"] && ENV["CAMO_KEY"]
context[:asset_proxy] = ENV["CAMO_HOST"]
context[:asset_proxy_secret_key] = ENV["CAMO_KEY"]
context[:asset_src_attribute] = "data-camo-src"
filters = filters << HTML::Pipeline::CamoFilter
end
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
end
def self.absolute_source(*args)
new._absolute_source(*args)
end
def _absolute_source(content, entry, base_url = nil)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter]
context = {
image_base_url: base_url || entry.feed.site_url,
image_subpage_url: base_url || entry.url || "",
href_base_url: base_url || entry.feed.site_url,
href_subpage_url: base_url || entry.url || ""
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.api_format(*args)
new._api_format(*args)
end
def _api_format(content, entry)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter]
context = {
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || ""
}
if entry.feed.newsletter?
filters.push(HTML::Pipeline::SanitizationFilter)
context[:whitelist] = ALLOWLIST_NEWSLETTER
end
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.app_format(*args)
new._app_format(*args)
end
def _app_format(content, entry)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter, HTML::Pipeline::ImagePlaceholderFilter]
context = {
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || "",
placeholder_url: "",
placeholder_attribute: "data-feedbin-src"
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.evernote_format(*args)
new._evernote_format(*args)
end
def _evernote_format(content, entry)
filters = [HTML::Pipeline::SanitizationFilter, HTML::Pipeline::SrcFixer, HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter]
context = {
whitelist: ALLOWLIST_EVERNOTE,
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || ""
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_xml
rescue
content
end
def self.summary(*args)
new._summary(*args)
end
def _summary(text, length = nil)
text = Loofah.fragment(text)
.scrub!(:prune)
.to_text(encode_special_chars: false)
.gsub(/\s+/, " ")
.squish
text = text.truncate(length, separator: " ", omission: "") if length
text
end
def self.text_email(*args)
new._text_email(*args)
end
def _text_email(content)
markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new(hard_wrap: true), autolink: true)
content = markdown.render(content)
Sanitize.fragment(content, ALLOWLIST_DEFAULT).html_safe
rescue
content
end
end
Evernote allowed attributes fixes.
class ContentFormatter
ALLOWLIST_BASE = {}.tap do |hash|
hash[:elements] = %w[
h1 h2 h3 h4 h5 h6 h7 h8 br b i strong em a pre code img tt div ins del sup sub
p ol ul table thead tbody tfoot blockquote dl dt dd kbd q samp var hr ruby rt
rp li tr td th s strike summary details figure figcaption audio video source
small iframe
]
hash[:attributes] = {
"a" => ["href"],
"img" => ["src", "longdesc"],
"div" => ["itemscope", "itemtype"],
"blockquote" => ["cite"],
"del" => ["cite"],
"ins" => ["cite"],
"q" => ["cite"],
"source" => ["src"],
"video" => ["src", "poster", "playsinline", "loop", "muted", "controls", "preload"],
"audio" => ["src"],
"td" => ["align"],
"th" => ["align"],
"iframe" => ["src", "width", "height"],
:all => %w[
abbr accept accept-charset accesskey action alt axis border cellpadding
cellspacing char charoff charset checked clear cols colspan color compact
coords datetime dir disabled enctype for frame headers height hreflang hspace
ismap label lang maxlength media method multiple name nohref noshade nowrap
open prompt readonly rel rev rows rowspan rules scope selected shape size span
start summary tabindex target title type usemap valign value vspace width
itemprop id
]
}
hash[:protocols] = {
"a" => {
"href" => ["http", "https", "mailto", :relative]
},
"blockquote" => {
"cite" => ["http", "https", :relative]
},
"del" => {
"cite" => ["http", "https", :relative]
},
"ins" => {
"cite" => ["http", "https", :relative]
},
"q" => {
"cite" => ["http", "https", :relative]
},
"img" => {
"src" => ["http", "https", :relative, "data"],
"longdesc" => ["http", "https", :relative]
},
"video" => {
"src" => ["http", "https"],
"poster" => ["http", "https"]
},
"audio" => {
"src" => ["http", "https"]
}
}
hash[:remove_contents] = %w[script style iframe object embed]
end
ALLOWLIST_DEFAULT = ALLOWLIST_BASE.clone.tap do |hash|
transformers = Transformers.new
hash[:transformers] = [transformers.class_allowlist, transformers.table_elements, transformers.top_level_li, transformers.video]
end
ALLOWLIST_NEWSLETTER = ALLOWLIST_BASE.clone.tap do |hash|
hash[:elements] = hash[:elements] - %w[table thead tbody tfoot tr td]
end
ALLOWLIST_EVERNOTE = {
elements: %w[
a abbr acronym address area b bdo big blockquote br caption center cite code col colgroup dd
del dfn div dl dt em font h1 h2 h3 h4 h5 h6 hr i img ins kbd li map ol p pre q s samp small
strike strong sub sup table tbody td tfoot th thead tr tt u ul var xmp
],
remove_contents: ["script", "style", "iframe", "object", "embed", "title"],
attributes: {
"a" => ["href"],
"img" => ["src", "width", "height", "alt"],
"ol" => Sanitize::Config::RELAXED[:attributes]["ol"],
"ul" => Sanitize::Config::RELAXED[:attributes]["ul"],
"table" => Sanitize::Config::RELAXED[:attributes]["table"],
"td" => Sanitize::Config::RELAXED[:attributes]["td"],
"th" => Sanitize::Config::RELAXED[:attributes]["th"]
},
protocols: {
"a" => {"href" => ["http", "https", :relative]},
"img" => {"src" => ["http", "https", :relative]}
}
}
SANITIZE_BASIC = Sanitize::Config.merge(Sanitize::Config::BASIC, remove_contents: ["script", "style", "iframe", "object", "embed", "figure"])
def self.format!(*args)
new._format!(*args)
end
def _format!(content, entry = nil, image_proxy_enabled = true, base_url = nil)
context = {
whitelist: ALLOWLIST_DEFAULT,
embed_url: Rails.application.routes.url_helpers.iframe_embeds_path,
embed_classes: "iframe-placeholder entry-callout system-content"
}
filters = [HTML::Pipeline::SmileyFilter, HTML::Pipeline::SanitizationFilter, HTML::Pipeline::SrcFixer, HTML::Pipeline::IframeFilter]
if ENV["CAMO_HOST"] && ENV["CAMO_KEY"] && image_proxy_enabled
context[:asset_proxy] = ENV["CAMO_HOST"]
context[:asset_proxy_secret_key] = ENV["CAMO_KEY"]
context[:asset_src_attribute] = "data-camo-src"
filters = filters << HTML::Pipeline::CamoFilter
end
if entry || base_url
filters.unshift(HTML::Pipeline::AbsoluteSourceFilter)
filters.unshift(HTML::Pipeline::AbsoluteHrefFilter)
context[:image_base_url] = base_url || entry.feed.site_url
context[:image_subpage_url] = base_url || entry.url || ""
context[:href_base_url] = base_url || entry.feed.site_url
context[:href_subpage_url] = base_url || entry.url || ""
if entry && entry.feed.newsletter?
context[:whitelist] = ALLOWLIST_NEWSLETTER
end
end
filters.unshift(HTML::Pipeline::LazyLoadFilter)
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
if entry&.archived_images?
result[:output] = ImageFallback.new(result[:output]).add_fallbacks
end
result[:output].to_s
end
def self.newsletter_format(*args)
new._newsletter_format(*args)
end
def _newsletter_format(content)
context = {
whitelist: Sanitize::Config::RELAXED
}
filters = [HTML::Pipeline::SanitizationFilter]
if ENV["CAMO_HOST"] && ENV["CAMO_KEY"]
context[:asset_proxy] = ENV["CAMO_HOST"]
context[:asset_proxy_secret_key] = ENV["CAMO_KEY"]
context[:asset_src_attribute] = "data-camo-src"
filters = filters << HTML::Pipeline::CamoFilter
end
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
end
def self.absolute_source(*args)
new._absolute_source(*args)
end
def _absolute_source(content, entry, base_url = nil)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter]
context = {
image_base_url: base_url || entry.feed.site_url,
image_subpage_url: base_url || entry.url || "",
href_base_url: base_url || entry.feed.site_url,
href_subpage_url: base_url || entry.url || ""
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.api_format(*args)
new._api_format(*args)
end
def _api_format(content, entry)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter]
context = {
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || ""
}
if entry.feed.newsletter?
filters.push(HTML::Pipeline::SanitizationFilter)
context[:whitelist] = ALLOWLIST_NEWSLETTER
end
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.app_format(*args)
new._app_format(*args)
end
def _app_format(content, entry)
filters = [HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter, HTML::Pipeline::ImagePlaceholderFilter]
context = {
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || "",
placeholder_url: "",
placeholder_attribute: "data-feedbin-src"
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_s
rescue
content
end
def self.evernote_format(*args)
new._evernote_format(*args)
end
def _evernote_format(content, entry)
filters = [HTML::Pipeline::SanitizationFilter, HTML::Pipeline::SrcFixer, HTML::Pipeline::AbsoluteSourceFilter, HTML::Pipeline::AbsoluteHrefFilter, HTML::Pipeline::ProtocolFilter]
context = {
whitelist: ALLOWLIST_EVERNOTE,
image_base_url: entry.feed.site_url,
image_subpage_url: entry.url || "",
href_base_url: entry.feed.site_url,
href_subpage_url: entry.url || ""
}
pipeline = HTML::Pipeline.new filters, context
result = pipeline.call(content)
result[:output].to_xml
rescue
content
end
def self.summary(*args)
new._summary(*args)
end
def _summary(text, length = nil)
text = Loofah.fragment(text)
.scrub!(:prune)
.to_text(encode_special_chars: false)
.gsub(/\s+/, " ")
.squish
text = text.truncate(length, separator: " ", omission: "") if length
text
end
def self.text_email(*args)
new._text_email(*args)
end
def _text_email(content)
markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new(hard_wrap: true), autolink: true)
content = markdown.render(content)
Sanitize.fragment(content, ALLOWLIST_DEFAULT).html_safe
rescue
content
end
end
|
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
class ContentMigration < ActiveRecord::Base
require 'aws/s3'
include AWS::S3
include Workflow
belongs_to :context, :polymorphic => true
belongs_to :user
belongs_to :attachment
belongs_to :overview_attachment, :class_name => 'Attachment'
belongs_to :exported_attachment, :class_name => 'Attachment'
has_a_broadcast_policy
serialize :migration_settings
before_save :infer_defaults
cattr_accessor :export_file_path
DATE_FORMAT = "%m/%d/%Y"
DEFAULT_TO_EXPORT = {
'all_files' => false,
'announcements' => false,
'assessments' => false,
'assignment_groups' => true,
'assignments' => false,
'calendar_events' => false,
'calendar_start' => 1.year.ago.strftime(DATE_FORMAT),
'calendar_end' => 1.year.from_now.strftime(DATE_FORMAT),
'course_outline' => true,
'discussions' => false,
'discussion_responses' => false,
'goals' => false,
'groups' => false,
'learning_modules' => false,
'question_bank' => false,
'rubrics' => false,
'tasks' => false,
'web_links' => false,
'wikis' => false
}
workflow do
state :created
#The pre_process states can be used by individual plugins as needed
state :pre_processing
state :pre_processed
state :pre_process_error
state :exporting
state :exported
state :importing
state :imported
state :failed
end
set_broadcast_policy do |p|
p.dispatch :migration_export_ready
p.to { [user] }
p.whenever {|record|
record.changed_state(:exported)
}
p.dispatch :migration_import_finished
p.to { [user] }
p.whenever {|record|
record.changed_state(:imported) && !record.migration_settings[:skip_import_notification]
}
p.dispatch :migration_import_failed
p.to { [user] }
p.whenever {|record|
record.changed_state(:failed) && !record.migration_settings[:skip_import_notification]
}
end
# the stream item context is decided by calling asset.context(user), i guess
# to differentiate from the normal asset.context() call that may not give us
# the context we want. in this case, they're one and the same.
alias_method :original_context, :context
def context(user = nil)
self.original_context
end
def migration_settings
read_attribute(:migration_settings) || write_attribute(:migration_settings,{}.with_indifferent_access)
end
def update_migration_settings(new_settings)
new_settings.each do |key, val|
if key == 'only'
process_to_scrape val
else
migration_settings[key] = val
end
end
end
def migration_ids_to_import=(val)
migration_settings[:migration_ids_to_import] = val
end
def infer_defaults
migration_settings[:to_scrape] ||= DEFAULT_TO_EXPORT
end
def process_to_scrape(hash)
migrate_only = migration_settings[:to_scrape] || DEFAULT_TO_EXPORT
hash.each do |key, arg|
migrate_only[key] = arg == '1' ? true : false if arg
if key == 'calendar_events' && migrate_only[key]
migrate_only['calendar_start'] = 1.year.ago.strftime(DATE_FORMAT)
migrate_only['calendar_end'] = 1.year.from_now.strftime(DATE_FORMAT)
end
end
migration_settings[:to_scrape] = migrate_only
end
def zip_path=(val)
migration_settings[:export_archive_path] = val
end
def zip_path
(migration_settings || {})[:export_archive_path]
end
def question_bank_name=(name)
if name && name.strip! != ''
migration_settings[:question_bank_name] = name
end
end
def question_bank_name
migration_settings[:question_bank_name]
end
def course_archive_download_url=(url)
migration_settings[:course_archive_download_url] = url
end
def root_account
self.context.root_account rescue nil
end
def plugin_type
if plugin = Canvas::Plugin.find(migration_settings['migration_type'])
plugin.settings['select_text'] || plugin.name
else
migration_settings['migration_type'].titleize
end
end
# add a non-fatal error/warning to the import. user_message is what will be
# displayed to the end user. exception_or_info can be either an Exception
# object or any other information on the error.
def add_warning(user_message, exception_or_info)
migration_settings[:warnings] ||= []
if exception_or_info.is_a?(Exception)
info = [exception_or_info.to_s, exception_or_info.backtrace]
else
info = exception_or_info
end
migration_settings[:warnings] << [user_message, info]
end
def warnings
(migration_settings[:warnings] || []).map(&:first)
end
def export_content
plugin = Canvas::Plugin.find(migration_settings['migration_type'])
if plugin
begin
if Canvas::MigrationWorker.const_defined?(plugin.settings['worker'])
self.workflow_state = :exporting
Canvas::MigrationWorker.const_get(plugin.settings['worker']).enqueue(self)
self.save
else
raise NameError
end
rescue NameError
self.workflow_state = 'failed'
message = "The migration plugin #{migration_settings['migration_type']} doesn't have a worker."
migration_settings[:last_error] = message
logger.error message
self.save
end
else
self.workflow_state = 'failed'
message = "No migration plugin of type #{migration_settings['migration_type']} found."
migration_settings[:last_error] = message
logger.error message
self.save
end
end
def to_import(val)
migration_settings[:migration_ids_to_import][:copy][val] rescue nil
end
def import_content
self.workflow_state = :importing
self.save
begin
@exported_data_zip = download_exported_data
@zip_file = Zip::ZipFile.open(@exported_data_zip.path)
data = JSON.parse(@zip_file.read('course_export.json'))
data = data.with_indifferent_access if data.is_a? Hash
data['all_files_export'] ||= {}
if @zip_file.find_entry('all_files.zip')
# the file importer needs an actual file to process
all_files_path = create_all_files_path(@exported_data_zip.path)
@zip_file.extract('all_files.zip', all_files_path)
data['all_files_export']['file_path'] = all_files_path
else
data['all_files_export']['file_path'] = nil
end
@zip_file.close
migration_settings[:migration_ids_to_import] ||= {:copy=>{}}
self.context.import_from_migration(data, migration_settings[:migration_ids_to_import], self)
rescue => e
self.workflow_state = :failed
message = "#{e.to_s}: #{e.backtrace.join("\n")}"
migration_settings[:last_error] = message
logger.error message
self.save
raise e
ensure
clear_migration_data
end
end
handle_asynchronously :import_content, :priority => Delayed::LOW_PRIORITY, :max_attempts => 1
named_scope :for_context, lambda{|context|
{:conditions => {:context_id => context.id, :context_type => context.class.to_s} }
}
named_scope :successful, :conditions=>"workflow_state = 'imported'"
named_scope :running, :conditions=>"workflow_state IN ('exporting', 'importing')"
named_scope :waiting, :conditions=>"workflow_state IN ('exported')"
named_scope :failed, :conditions=>"workflow_state IN ('failed', 'pre_process_error')"
def download_exported_data
raise "No exported data to import" unless self.exported_attachment
config = Setting.from_config('external_migration')
if config && config[:data_folder]
@exported_data_zip = Tempfile.new("migration_#{self.id}_", config[:data_folder])
else
@exported_data_zip = Tempfile.new("migration_#{self.id}_")
end
if Attachment.local_storage?
@exported_data_zip.write File.read(self.exported_attachment.full_filename)
elsif Attachment.s3_storage?
att = self.exported_attachment
require 'aws/s3'
AWS::S3::S3Object.stream(att.full_filename, att.bucket_name) do |chunk|
@exported_data_zip.write chunk
end
end
@exported_data_zip.close
@exported_data_zip
end
def create_all_files_path(temp_path)
"#{temp_path}_all_files.zip"
end
def clear_migration_data
begin
@zip_file.close if @zip_file
@zip_file = nil
if @exported_data_zip
all_files_path = create_all_files_path(@exported_data_zip.path)
FileUtils::rm_rf(all_files_path) if File.exists?(all_files_path)
@exported_data_zip.unlink
end
rescue
Rails.logger.warn "Couldn't delete files for content_migration #{self.id}"
end
end
def fast_update_progress(val)
self.progress = val
ContentMigration.update_all({:progress=>val}, "id=#{self.id}")
end
end
don't try to get plugin name if no migration_type is set
refs #4429
Change-Id: I647861957a2bc6454f8e1bbd28b210957ab1e21e
Reviewed-on: https://gerrit.instructure.com/3424
Reviewed-by: Zach Wily <9de8c4480303b5335cd2a33eefe814615ba3612a@instructure.com>
Tested-by: Hudson <dfb64870173313a9a7b56f814c8e3b33e268497a@instructure.com>
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
class ContentMigration < ActiveRecord::Base
require 'aws/s3'
include AWS::S3
include Workflow
belongs_to :context, :polymorphic => true
belongs_to :user
belongs_to :attachment
belongs_to :overview_attachment, :class_name => 'Attachment'
belongs_to :exported_attachment, :class_name => 'Attachment'
has_a_broadcast_policy
serialize :migration_settings
before_save :infer_defaults
cattr_accessor :export_file_path
DATE_FORMAT = "%m/%d/%Y"
DEFAULT_TO_EXPORT = {
'all_files' => false,
'announcements' => false,
'assessments' => false,
'assignment_groups' => true,
'assignments' => false,
'calendar_events' => false,
'calendar_start' => 1.year.ago.strftime(DATE_FORMAT),
'calendar_end' => 1.year.from_now.strftime(DATE_FORMAT),
'course_outline' => true,
'discussions' => false,
'discussion_responses' => false,
'goals' => false,
'groups' => false,
'learning_modules' => false,
'question_bank' => false,
'rubrics' => false,
'tasks' => false,
'web_links' => false,
'wikis' => false
}
workflow do
state :created
#The pre_process states can be used by individual plugins as needed
state :pre_processing
state :pre_processed
state :pre_process_error
state :exporting
state :exported
state :importing
state :imported
state :failed
end
set_broadcast_policy do |p|
p.dispatch :migration_export_ready
p.to { [user] }
p.whenever {|record|
record.changed_state(:exported)
}
p.dispatch :migration_import_finished
p.to { [user] }
p.whenever {|record|
record.changed_state(:imported) && !record.migration_settings[:skip_import_notification]
}
p.dispatch :migration_import_failed
p.to { [user] }
p.whenever {|record|
record.changed_state(:failed) && !record.migration_settings[:skip_import_notification]
}
end
# the stream item context is decided by calling asset.context(user), i guess
# to differentiate from the normal asset.context() call that may not give us
# the context we want. in this case, they're one and the same.
alias_method :original_context, :context
def context(user = nil)
self.original_context
end
def migration_settings
read_attribute(:migration_settings) || write_attribute(:migration_settings,{}.with_indifferent_access)
end
def update_migration_settings(new_settings)
new_settings.each do |key, val|
if key == 'only'
process_to_scrape val
else
migration_settings[key] = val
end
end
end
def migration_ids_to_import=(val)
migration_settings[:migration_ids_to_import] = val
end
def infer_defaults
migration_settings[:to_scrape] ||= DEFAULT_TO_EXPORT
end
def process_to_scrape(hash)
migrate_only = migration_settings[:to_scrape] || DEFAULT_TO_EXPORT
hash.each do |key, arg|
migrate_only[key] = arg == '1' ? true : false if arg
if key == 'calendar_events' && migrate_only[key]
migrate_only['calendar_start'] = 1.year.ago.strftime(DATE_FORMAT)
migrate_only['calendar_end'] = 1.year.from_now.strftime(DATE_FORMAT)
end
end
migration_settings[:to_scrape] = migrate_only
end
def zip_path=(val)
migration_settings[:export_archive_path] = val
end
def zip_path
(migration_settings || {})[:export_archive_path]
end
def question_bank_name=(name)
if name && name.strip! != ''
migration_settings[:question_bank_name] = name
end
end
def question_bank_name
migration_settings[:question_bank_name]
end
def course_archive_download_url=(url)
migration_settings[:course_archive_download_url] = url
end
def root_account
self.context.root_account rescue nil
end
def plugin_type
if plugin = Canvas::Plugin.find(migration_settings['migration_type'])
plugin.settings['select_text'] || plugin.name
elsif migration_settings['migration_type']
migration_settings['migration_type'].titleize
else
'Unknown'
end
end
# add a non-fatal error/warning to the import. user_message is what will be
# displayed to the end user. exception_or_info can be either an Exception
# object or any other information on the error.
def add_warning(user_message, exception_or_info)
migration_settings[:warnings] ||= []
if exception_or_info.is_a?(Exception)
info = [exception_or_info.to_s, exception_or_info.backtrace]
else
info = exception_or_info
end
migration_settings[:warnings] << [user_message, info]
end
def warnings
(migration_settings[:warnings] || []).map(&:first)
end
def export_content
plugin = Canvas::Plugin.find(migration_settings['migration_type'])
if plugin
begin
if Canvas::MigrationWorker.const_defined?(plugin.settings['worker'])
self.workflow_state = :exporting
Canvas::MigrationWorker.const_get(plugin.settings['worker']).enqueue(self)
self.save
else
raise NameError
end
rescue NameError
self.workflow_state = 'failed'
message = "The migration plugin #{migration_settings['migration_type']} doesn't have a worker."
migration_settings[:last_error] = message
logger.error message
self.save
end
else
self.workflow_state = 'failed'
message = "No migration plugin of type #{migration_settings['migration_type']} found."
migration_settings[:last_error] = message
logger.error message
self.save
end
end
def to_import(val)
migration_settings[:migration_ids_to_import][:copy][val] rescue nil
end
def import_content
self.workflow_state = :importing
self.save
begin
@exported_data_zip = download_exported_data
@zip_file = Zip::ZipFile.open(@exported_data_zip.path)
data = JSON.parse(@zip_file.read('course_export.json'))
data = data.with_indifferent_access if data.is_a? Hash
data['all_files_export'] ||= {}
if @zip_file.find_entry('all_files.zip')
# the file importer needs an actual file to process
all_files_path = create_all_files_path(@exported_data_zip.path)
@zip_file.extract('all_files.zip', all_files_path)
data['all_files_export']['file_path'] = all_files_path
else
data['all_files_export']['file_path'] = nil
end
@zip_file.close
migration_settings[:migration_ids_to_import] ||= {:copy=>{}}
self.context.import_from_migration(data, migration_settings[:migration_ids_to_import], self)
rescue => e
self.workflow_state = :failed
message = "#{e.to_s}: #{e.backtrace.join("\n")}"
migration_settings[:last_error] = message
logger.error message
self.save
raise e
ensure
clear_migration_data
end
end
handle_asynchronously :import_content, :priority => Delayed::LOW_PRIORITY, :max_attempts => 1
named_scope :for_context, lambda{|context|
{:conditions => {:context_id => context.id, :context_type => context.class.to_s} }
}
named_scope :successful, :conditions=>"workflow_state = 'imported'"
named_scope :running, :conditions=>"workflow_state IN ('exporting', 'importing')"
named_scope :waiting, :conditions=>"workflow_state IN ('exported')"
named_scope :failed, :conditions=>"workflow_state IN ('failed', 'pre_process_error')"
def download_exported_data
raise "No exported data to import" unless self.exported_attachment
config = Setting.from_config('external_migration')
if config && config[:data_folder]
@exported_data_zip = Tempfile.new("migration_#{self.id}_", config[:data_folder])
else
@exported_data_zip = Tempfile.new("migration_#{self.id}_")
end
if Attachment.local_storage?
@exported_data_zip.write File.read(self.exported_attachment.full_filename)
elsif Attachment.s3_storage?
att = self.exported_attachment
require 'aws/s3'
AWS::S3::S3Object.stream(att.full_filename, att.bucket_name) do |chunk|
@exported_data_zip.write chunk
end
end
@exported_data_zip.close
@exported_data_zip
end
def create_all_files_path(temp_path)
"#{temp_path}_all_files.zip"
end
def clear_migration_data
begin
@zip_file.close if @zip_file
@zip_file = nil
if @exported_data_zip
all_files_path = create_all_files_path(@exported_data_zip.path)
FileUtils::rm_rf(all_files_path) if File.exists?(all_files_path)
@exported_data_zip.unlink
end
rescue
Rails.logger.warn "Couldn't delete files for content_migration #{self.id}"
end
end
def fast_update_progress(val)
self.progress = val
ContentMigration.update_all({:progress=>val}, "id=#{self.id}")
end
end
|
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2016 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: document_templates
#
# active :boolean default(FALSE), not null
# archiving :string not null
# by_default :boolean default(FALSE), not null
# created_at :datetime not null
# creator_id :integer
# formats :string
# id :integer not null, primary key
# language :string not null
# lock_version :integer default(0), not null
# managed :boolean default(FALSE), not null
# name :string not null
# nature :string not null
# updated_at :datetime not null
# updater_id :integer
#
# Sources are stored in :private/reporting/:id/content.xml
class DocumentTemplate < Ekylibre::Record::Base
enumerize :archiving, in: [:none_of_template, :first_of_template, :last_of_template, :none, :first, :last], default: :none, predicates: { prefix: true }
refers_to :language
refers_to :nature, class_name: 'DocumentNature'
has_many :documents, class_name: 'Document', foreign_key: :template_id, dependent: :nullify, inverse_of: :template
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :active, :by_default, :managed, inclusion: { in: [true, false] }
validates :archiving, :language, :nature, presence: true
validates :formats, length: { maximum: 500 }, allow_blank: true
validates :name, presence: true, length: { maximum: 500 }
# ]VALIDATORS]
validates :language, length: { allow_nil: true, maximum: 3 }
validates :archiving, :nature, length: { allow_nil: true, maximum: 60 }
validates :nature, inclusion: { in: nature.values }
selects_among_all scope: :nature
# default_scope order(:name)
scope :of_nature, lambda { |*natures|
natures.flatten!
natures.compact!
return none unless natures.respond_to?(:any?) && natures.any?
invalids = natures.select { |nature| Nomen::DocumentNature[nature].nil? }
if invalids.any?
raise ArgumentError, "Unknown nature(s) for a DocumentTemplate: #{invalids.map(&:inspect).to_sentence}"
end
where(nature: natures, active: true).order(:name)
}
protect(on: :destroy) do
documents.any?
end
before_validation do
# Check that given formats are all known
unless formats.empty?
self.formats = formats.to_s.downcase.strip.split(/[\s\,]+/).delete_if do |f|
!Ekylibre::Reporting.formats.include?(f)
end.join(', ')
end
end
after_save do
# Install file after save only
if @source
FileUtils.mkdir_p(source_path.dirname)
File.open(source_path, 'wb') do |f|
# Updates source to make it working
document = Nokogiri::XML(@source) do |config|
config.noblanks.nonet.strict
end
# Removes comments
document.xpath('//comment()').remove
# Updates template
if document.root && document.root.namespace && document.root.namespace.href == 'http://jasperreports.sourceforge.net/jasperreports'
if template = document.root.xpath('xmlns:template').first
logger.info "Update <template> for document template #{nature}"
template.children.remove
style_file = Ekylibre::Tenant.private_directory.join('corporate_identity', 'reporting_style.xml')
# TODO: find a way to permit customization for users to restore that
if true # unless style_file.exist?
FileUtils.mkdir_p(style_file.dirname)
FileUtils.cp(Rails.root.join('config', 'corporate_identity', 'reporting_style.xml'), style_file)
end
template.add_child(Nokogiri::XML::CDATA.new(document, style_file.relative_path_from(source_path.dirname).to_s.inspect))
else
logger.info "Cannot find and update <template> in document template #{nature}"
end
end
# Writes source
f.write(document.to_s)
end
# Remove .jasper file to force reloading
Dir.glob(source_path.dirname.join('*.jasper')).each do |file|
FileUtils.rm_f(file)
end
end
end
# Updates archiving methods of other templates of same nature
after_save do
if archiving.to_s =~ /\_of\_template$/
self.class.where('nature = ? AND NOT archiving LIKE ? AND id != ?', nature, '%_of_template', id).update_all("archiving = archiving || '_of_template'")
else
self.class.where('nature = ? AND id != ?', nature, id).update_all(archiving: archiving)
end
end
# Always after protect on destroy
after_destroy do
FileUtils.rm_rf(source_dir) if source_dir.exist?
end
# Install the source of a document template
# with all its dependencies
attr_writer :source
# Returns source value
attr_reader :source
# Returns the expected dir for the source file
def source_dir
self.class.sources_root.join(id.to_s)
end
# Returns the expected path for the source file
def source_path
source_dir.join('content.xml')
end
# Print a document with the given datasource and return raw data
# Store if needed by template
# @param datasource XML representation of data used by the template
def print(datasource, key, format = :pdf, options = {})
# Load the report
report = Beardley::Report.new(source_path, locale: 'i18n.iso2'.t)
# Call it with datasource
data = report.send("to_#{format}", datasource)
# Archive the document according to archiving method. See #document method.
document(data, key, format, options)
# Returns only the data (without filename)
data
end
# Export a document with the given datasource and return path file
# Store if needed by template
# @param datasource XML representation of data used by the template
def export(datasource, key, format = :pdf, options = {})
# Load the report
report = Beardley::Report.new(source_path, locale: 'i18n.iso2'.t)
# Call it with datasource
path = Pathname.new(report.to_file(format, datasource))
# Archive the document according to archiving method. See #document method.
if document = self.document(path, key, format, options)
FileUtils.rm_rf(path)
path = document.file.path(:original)
end
# Returns only the path
path
end
# Returns the list of formats of the templates
def formats
(self['formats'].blank? ? Ekylibre::Reporting.formats : self['formats'].strip.split(/[\s\,]+/))
end
def formats=(value)
self['formats'] = (value.is_a?(Array) ? value.join(', ') : value.to_s)
end
# Archive the document using the given archiving method
def document(data_or_path, key, _format, options = {})
return nil if archiving_none? || archiving_none_of_template?
# Gets historic of document
archives = Document.where(nature: nature, key: key).where.not(template_id: nil)
archives_of_template = archives.where(template_id: id)
# Checks if archiving is expected
return nil unless (archiving_first? && archives.empty?) ||
(archiving_first_of_template? && archives_of_template.empty?) ||
archiving.to_s =~ /\A(last|all)(\_of\_template)?\z/
# Lists last documents to remove after archiving
removables = []
if archiving_last?
removables = archives.pluck(:id)
elsif archiving_last_of_template?
removables = archives_of_template.pluck(:id)
end
# Creates document if not exist
document = Document.create!(nature: nature, key: key, name: (options[:name] || tc('document_name', nature: nature.l, key: key)), file: File.open(data_or_path), template_id: id)
# Removes useless docs
Document.destroy removables
document
end
@@load_path = []
mattr_accessor :load_path
class << self
# Print document with default active template for the given nature
# Returns nil if no template found.
def print(nature, datasource, key, format = :pdf, options = {})
if template = find_by(nature: nature, by_default: true, active: true)
return template.print(datasource, key, format, options)
end
nil
end
# Returns the root directory for the document templates's sources
def sources_root
Ekylibre::Tenant.private_directory.join('reporting')
end
# Compute fallback chain for a given document nature
def template_fallbacks(nature, locale)
stack = []
load_path.each do |path|
root = path.join(locale, 'reporting')
stack << root.join("#{nature}.xml")
stack << root.join("#{nature}.jrxml")
fallback = {
sales_order: :sale,
sales_estimate: :sale,
sales_invoice: :sale,
purchases_order: :purchase,
purchases_estimate: :purchase,
purchases_invoice: :purchase
}[nature.to_sym]
if fallback
stack << root.join("#{fallback}.xml")
stack << root.join("#{fallback}.jrxml")
end
end
stack
end
# Loads in DB all default document templates
def load_defaults(options = {})
locale = (options[:locale] || Preference[:language] || I18n.locale).to_s
Ekylibre::Record::Base.transaction do
manageds = where(managed: true).select(&:destroyable?)
for nature in self.nature.values
if source = template_fallbacks(nature, locale).detect(&:exist?)
File.open(source, 'rb:UTF-8') do |f|
unless template = find_by(nature: nature, managed: true)
template = new(nature: nature, managed: true, active: true, by_default: false, archiving: 'last')
end
manageds.delete(template)
template.attributes = { source: f, language: locale }
template.name ||= template.nature.l
template.save!
end
Rails.logger.info "Load a default document template #{nature}"
else
Rails.logger.warn "Cannot load a default document template #{nature}: No file found at #{source}"
end
end
destroy(manageds.map(&:id))
end
true
end
end
end
# Load default path
DocumentTemplate.load_path << Rails.root.join('config', 'locales')
Handles errors if document template is invalid. Fixes #1222.
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2016 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: document_templates
#
# active :boolean default(FALSE), not null
# archiving :string not null
# by_default :boolean default(FALSE), not null
# created_at :datetime not null
# creator_id :integer
# formats :string
# id :integer not null, primary key
# language :string not null
# lock_version :integer default(0), not null
# managed :boolean default(FALSE), not null
# name :string not null
# nature :string not null
# updated_at :datetime not null
# updater_id :integer
#
# Sources are stored in :private/reporting/:id/content.xml
class DocumentTemplate < Ekylibre::Record::Base
enumerize :archiving, in: [:none_of_template, :first_of_template, :last_of_template, :none, :first, :last], default: :none, predicates: { prefix: true }
refers_to :language
refers_to :nature, class_name: 'DocumentNature'
has_many :documents, class_name: 'Document', foreign_key: :template_id, dependent: :nullify, inverse_of: :template
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :active, :by_default, :managed, inclusion: { in: [true, false] }
validates :archiving, :language, :nature, presence: true
validates :formats, length: { maximum: 500 }, allow_blank: true
validates :name, presence: true, length: { maximum: 500 }
# ]VALIDATORS]
validates :language, length: { allow_nil: true, maximum: 3 }
validates :archiving, :nature, length: { allow_nil: true, maximum: 60 }
validates :nature, inclusion: { in: nature.values }
selects_among_all scope: :nature
# default_scope order(:name)
scope :of_nature, lambda { |*natures|
natures.flatten!
natures.compact!
return none unless natures.respond_to?(:any?) && natures.any?
invalids = natures.select { |nature| Nomen::DocumentNature[nature].nil? }
if invalids.any?
raise ArgumentError, "Unknown nature(s) for a DocumentTemplate: #{invalids.map(&:inspect).to_sentence}"
end
where(nature: natures, active: true).order(:name)
}
protect(on: :destroy) do
documents.any?
end
before_validation do
# Check that given formats are all known
unless formats.empty?
self.formats = formats.to_s.downcase.strip.split(/[\s\,]+/).delete_if do |f|
!Ekylibre::Reporting.formats.include?(f)
end.join(', ')
end
end
after_save do
# Install file after save only
if @source
FileUtils.mkdir_p(source_path.dirname)
File.open(source_path, 'wb') do |f|
# Updates source to make it working
begin
document = Nokogiri::XML(@source) do |config|
config.noblanks.nonet.strict
end
# Removes comments
document.xpath('//comment()').remove
# Updates template
if document.root && document.root.namespace && document.root.namespace.href == 'http://jasperreports.sourceforge.net/jasperreports'
if template = document.root.xpath('xmlns:template').first
logger.info "Update <template> for document template #{nature}"
template.children.remove
style_file = Ekylibre::Tenant.private_directory.join('corporate_identity', 'reporting_style.xml')
# TODO: find a way to permit customization for users to restore that
if true # unless style_file.exist?
FileUtils.mkdir_p(style_file.dirname)
FileUtils.cp(Rails.root.join('config', 'corporate_identity', 'reporting_style.xml'), style_file)
end
template.add_child(Nokogiri::XML::CDATA.new(document, style_file.relative_path_from(source_path.dirname).to_s.inspect))
else
logger.info "Cannot find and update <template> in document template #{nature}"
end
end
# Writes source
f.write(document.to_s)
end
end
# Remove .jasper file to force reloading
Dir.glob(source_path.dirname.join('*.jasper')).each do |file|
FileUtils.rm_f(file)
end
end
end
# Updates archiving methods of other templates of same nature
after_save do
if archiving.to_s =~ /\_of\_template$/
self.class.where('nature = ? AND NOT archiving LIKE ? AND id != ?', nature, '%_of_template', id).update_all("archiving = archiving || '_of_template'")
else
self.class.where('nature = ? AND id != ?', nature, id).update_all(archiving: archiving)
end
end
# Always after protect on destroy
after_destroy do
FileUtils.rm_rf(source_dir) if source_dir.exist?
end
# Install the source of a document template
# with all its dependencies
attr_writer :source
# Returns source value
attr_reader :source
# Returns the expected dir for the source file
def source_dir
self.class.sources_root.join(id.to_s)
end
# Returns the expected path for the source file
def source_path
source_dir.join('content.xml')
end
# Print a document with the given datasource and return raw data
# Store if needed by template
# @param datasource XML representation of data used by the template
def print(datasource, key, format = :pdf, options = {})
# Load the report
report = Beardley::Report.new(source_path, locale: 'i18n.iso2'.t)
# Call it with datasource
data = report.send("to_#{format}", datasource)
# Archive the document according to archiving method. See #document method.
document(data, key, format, options)
# Returns only the data (without filename)
data
end
# Export a document with the given datasource and return path file
# Store if needed by template
# @param datasource XML representation of data used by the template
def export(datasource, key, format = :pdf, options = {})
# Load the report
report = Beardley::Report.new(source_path, locale: 'i18n.iso2'.t)
# Call it with datasource
path = Pathname.new(report.to_file(format, datasource))
# Archive the document according to archiving method. See #document method.
if document = self.document(path, key, format, options)
FileUtils.rm_rf(path)
path = document.file.path(:original)
end
# Returns only the path
path
end
# Returns the list of formats of the templates
def formats
(self['formats'].blank? ? Ekylibre::Reporting.formats : self['formats'].strip.split(/[\s\,]+/))
end
def formats=(value)
self['formats'] = (value.is_a?(Array) ? value.join(', ') : value.to_s)
end
# Archive the document using the given archiving method
def document(data_or_path, key, _format, options = {})
return nil if archiving_none? || archiving_none_of_template?
# Gets historic of document
archives = Document.where(nature: nature, key: key).where.not(template_id: nil)
archives_of_template = archives.where(template_id: id)
# Checks if archiving is expected
return nil unless (archiving_first? && archives.empty?) ||
(archiving_first_of_template? && archives_of_template.empty?) ||
archiving.to_s =~ /\A(last|all)(\_of\_template)?\z/
# Lists last documents to remove after archiving
removables = []
if archiving_last?
removables = archives.pluck(:id)
elsif archiving_last_of_template?
removables = archives_of_template.pluck(:id)
end
# Creates document if not exist
document = Document.create!(nature: nature, key: key, name: (options[:name] || tc('document_name', nature: nature.l, key: key)), file: File.open(data_or_path), template_id: id)
# Removes useless docs
Document.destroy removables
document
end
@@load_path = []
mattr_accessor :load_path
class << self
# Print document with default active template for the given nature
# Returns nil if no template found.
def print(nature, datasource, key, format = :pdf, options = {})
if template = find_by(nature: nature, by_default: true, active: true)
return template.print(datasource, key, format, options)
end
nil
end
# Returns the root directory for the document templates's sources
def sources_root
Ekylibre::Tenant.private_directory.join('reporting')
end
# Compute fallback chain for a given document nature
def template_fallbacks(nature, locale)
stack = []
load_path.each do |path|
root = path.join(locale, 'reporting')
stack << root.join("#{nature}.xml")
stack << root.join("#{nature}.jrxml")
fallback = {
sales_order: :sale,
sales_estimate: :sale,
sales_invoice: :sale,
purchases_order: :purchase,
purchases_estimate: :purchase,
purchases_invoice: :purchase
}[nature.to_sym]
if fallback
stack << root.join("#{fallback}.xml")
stack << root.join("#{fallback}.jrxml")
end
end
stack
end
# Loads in DB all default document templates
def load_defaults(options = {})
locale = (options[:locale] || Preference[:language] || I18n.locale).to_s
Ekylibre::Record::Base.transaction do
manageds = where(managed: true).select(&:destroyable?)
for nature in self.nature.values
if source = template_fallbacks(nature, locale).detect(&:exist?)
File.open(source, 'rb:UTF-8') do |f|
unless template = find_by(nature: nature, managed: true)
template = new(nature: nature, managed: true, active: true, by_default: false, archiving: 'last')
end
manageds.delete(template)
template.attributes = { source: f, language: locale }
template.name ||= template.nature.l
template.save!
end
Rails.logger.info "Load a default document template #{nature}"
else
Rails.logger.warn "Cannot load a default document template #{nature}: No file found at #{source}"
end
end
destroy(manageds.map(&:id))
end
true
end
end
end
# Load default path
DocumentTemplate.load_path << Rails.root.join('config', 'locales')
|
# frozen_string_literal: true
class LeaveApplication < ApplicationRecord
before_save :calculate_hours
belongs_to :user
belongs_to :manager, class_name: "User", foreign_key: "manager_id"
validates_presence_of :leave_type, :description
acts_as_paranoid
LEAVE_TYPE = %i(annual bonus personal sick).freeze
include AASM
include SignatureConcern
aasm column: :status do
state :pending, initial: true
state :approved
state :rejected
state :closed
event :approve, after: proc { |manager| sign(manager) } do
transitions to: :approved, from: [:pending, :rejected]
end
event :reject, after: proc { |manager| sign(manager) } do
transitions to: :rejected, from: [:pending, :approved]
end
event :revise do
transitions to: :pending, from: [:pending, :approved, :rejected]
end
event :close do
transitions to: :closed, from: [:pending, :approved, :rejected]
end
end
scope :is_manager, -> { all }
scope :is_employee, ->(current_user) { where(user_id: current_user.id) }
private
def calculate_hours
self.hours = ( end_time - start_time ) / 3600.0
puts end_time
end
end
revised aasm transition method
# frozen_string_literal: true
class LeaveApplication < ApplicationRecord
before_save :calculate_hours
belongs_to :user
belongs_to :manager, class_name: "User", foreign_key: "manager_id"
validates_presence_of :leave_type, :description
acts_as_paranoid
LEAVE_TYPE = %i(annual bonus personal sick).freeze
include AASM
include SignatureConcern
aasm column: :status do
state :pending, initial: true
state :approved
state :rejected
state :canceled
state :closed
event :approve, after: proc { |manager| sign(manager) } do
transitions to: :approved, from: [:pending]
end
event :reject, after: proc { |manager| sign(manager) } do
transitions to: :rejected, from: [:pending]
end
event :revise do
transitions to: :pending, from: [:pending, :rejected]
end
event :cancel do
transition to: :canceled, from: [:pending, :approved, :rejected]
end
event :close do
transitions to: :closed, from: [:canceled, :approved, :rejected]
end
end
private
def calculate_hours
self.hours = ( end_time - start_time ) / 3600.0
puts end_time
end
end
|
class MajorRequirement < ActiveRecord::Base
attr_accessible :cs_sixtyonea
attr_accessible :cs_sixtyoneb
attr_accessible :cs_sixtyonec
attr_accessible :math_onea
attr_accessible :math_oneb
attr_accessible :math_fiftyfour
attr_accessible :cs_seventy
attr_accessible :ee_fourty
attr_accessible :ee_twenty
attr_accessible :ee_fourtytwo
attr_accessible :cs_oneseventy
attr_accessible :cs_onesixtytwo
attr_accessible :cs_onefifty
attr_accessible :cs_onefiftytwo
attr_accessible :cs_onesixty
attr_accessible :cs_onesixtyone
attr_accessible :cs_onesixtyfour
attr_accessible :cs_onesixtynine
attr_accessible :cs_oneeightyfour
attr_accessible :cs_oneeightysix
attr_accessible :cs_oneeightyeight
# This method takes a hash and a user object
# And it will set the requirements based on the user object
def self.set_requirements(output_requirements, input_requirements)
output_requirements[:cs_sixtyonea] = input_requirements[:cs_sixtyonea]
output_requirements[:cs_sixtyoneb] = input_requirements[:cs_sixtyoneb]
output_requirements[:cs_sixtyonec] = input_requirements[:cs_sixtyonec]
output_requirements[:math_onea] = input_requirements[:math_onea]
output_requirements[:math_oneb] = input_requirements[:math_oneb]
output_requirements[:math_fiftyfour] = input_requirements[:math_fiftyfour]
output_requirements[:cs_seventy] = input_requirements[:cs_seventy]
output_requirements[:ee_fourty] = input_requirements[:ee_fourty]
output_requirements[:ee_twenty] = input_requirements[:ee_twenty]
output_requirements[:ee_fourtytwo] = input_requirements[:ee_fourtytwo]
output_requirements[:cs_oneseventy] = input_requirements[:cs_oneseventy]
output_requirements[:cs_onesixtytwo] = input_requirements[:cs_onesixtytwo]
output_requirements[:cs_onefifty] = input_requirements[:cs_onefifty]
output_requirements[:cs_onefiftytwo ] = input_requirements[:cs_onefiftytwo]
output_requirements[:cs_onesixty] = input_requirements[:cs_onesixty]
output_requirements[:cs_onesixtyone] = input_requirements[:cs_onesixtyone]
output_requirements[:cs_onesixtyfour] = input_requirements[:cs_onesixtyfour]
output_requirements[:cs_onesixtynine] = input_requirements[:cs_onesixtynine]
output_requirements[:cs_oneeightyfour] = input_requirements[:cs_oneeightyfour]
output_requirements[:cs_oneeightysix] = input_requirements[:cs_oneeightysix]
output_requirements[:cs_oneeightyeight] = input_requirements[:cs_oneeightyeight]
end
def self.progress(user)
finished_rate = 0
#TODO
finished_rate += 1 if user[:cs_sixtyonea] == true
finished_rate += 1 if user[:cs_sixtyoneb] == true
finished_rate += 1 if user[:cs_sixtyonec] == true
finished_rate += 1 if user[:math_onea] == true
finished_rate += 1 if user[:math_oneb] == true
finished_rate += 1 if user[:math_fiftyfour] == true
finished_rate += 1 if user[:cs_seventy] == true
finished_rate += 1 if user[:ee_fourty] == true
finished_rate += 1 if user[:ee_twenty] == true
finished_rate += 1 if user[:ee_fourtytwo] == true
finished_rate += 1 if user[:cs_oneseventy] == true
finished_rate += 1 if user[:cs_onesixtytwo] == true
finished_rate += 1 if user[:cs_onefifty] == true
finished_rate += 1 if user[:cs_onefiftytwo] == true
finished_rate += 1 if user[:cs_onesixty] == true
finished_rate += 1 if user[:cs_onesixtyone] == true
finished_rate += 1 if user[:cs_onesixtyfour] == true
finished_rate += 1 if user[:cs_onesixtynine] == true
finished_rate += 1 if user[:cs_oneeightyfour] == true
finished_rate += 1 if user[:cs_oneeightysix] == true
finished_rate += 1 if user[:cs_oneeightyeight] == true
return finished_rate*100/21
end
def self.get_courses(fufilled_requirements)
course_ids = []
if (fufilled_requirements[:cs_sixtyonea] == false)
course_ids << Course.find_by_name("CS 61A")
end
if (fufilled_requirements[:cs_sixtyoneb] == false)
course_ids << Course.find_by_name("CS 61B")
end
if (fufilled_requirements[:cs_sixtyonec] == false)
course_ids << Course.find_by_name("CS 61C")
end
if (fufilled_requirements[:math_onea] == false)
course_ids << Course.find_by_name("Math 1A")
end
if (fufilled_requirements[:math_oneb] == false)
course_ids << Course.find_by_name("Math 1B")
end
if (fufilled_requirements[:math_fiftyfour] == false)
course_ids << Course.find_by_name("Math 54")
end
if (fufilled_requirements[:cs_seventy] == false)
course_ids << Course.find_by_name("CS 70")
end
if (fufilled_requirements[:ee_fourty] == false)
course_ids << Course.find_by_name("EE 40")
end
if (fufilled_requirements[:ee_twenty] == false)
course_ids << Course.find_by_name("EE 20")
end
if (fufilled_requirements[:ee_fourtytwo] == false)
course_ids << Course.find_by_name("EE 42")
end
if (fufilled_requirements[:cs_oneseventy] == false)
course_ids << Course.find_by_name("CS 170")
end
if (fufilled_requirements[:cs_onesixtytwo] == false)
course_ids << Course.find_by_name("CS 162")
end
if (fufilled_requirements[:cs_onefifty] == false)
course_ids << Course.find_by_name("CS 150")
end
if (fufilled_requirements[:cs_onefiftytwo] == false)
course_ids << Course.find_by_name("CS 152")
end
if (fufilled_requirements[:cs_onesixty] == false)
course_ids << Course.find_by_name("CS 160")
end
if (fufilled_requirements[:cs_onesixtyone] == false)
course_ids << Course.find_by_name("CS 161")
end
if (fufilled_requirements[:cs_onesixtyfour] == false)
course_ids << Course.find_by_name("CS 164")
end
if (fufilled_requirements[:cs_onesixtynine] == false)
course_ids << Course.find_by_name("CS 169")
end
if (fufilled_requirements[:cs_oneeightyfour] == false)
course_ids << Course.find_by_name("CS 184")
end
if (fufilled_requirements[:cs_oneeightysix] == false)
course_ids << Course.find_by_name("CS 186")
end
if (fufilled_requirements[:cs_oneeightyeight] == false)
course_ids << Course.find_by_name("CS 188")
end
return course_ids
end
end
give me a smile
class MajorRequirement < ActiveRecord::Base
@cs_majors = {:cs_sixtyonea => "CS 61A", :cs_sixtyoneb => "CS 61B", :cs_sixtyonec => "CS 61C", :math_onea => "Math 1A", :math_oneb => "Math 1B", :math_fiftyfour => "Math 54", :cs_seventy => "CS 70", :ee_fourty => "EE 40", :ee_twenty => "EE 20", :ee_fourtytwo => "EE 42", :cs_oneseventy => "CS 170", :cs_onesixtytwo => "CS 162", :cs_onefifty => "CS 150", :cs_onefiftytwo => "CS 152", :cs_onesixty => "CS 160", :cs_onesixtyone=> "CS 161", :cs_onesixtyfour => "CS 164", :cs_onesixtynine => "CS 169", :cs_oneeightyfour => "CS 184", :cs_oneeightysix => "CS 186", :cs_oneeightyeight => "CS 188"}
# This method takes a hash and a user object
# And it will set the requirements based on the user object
def self.set_requirements(output_requirements, input_requirements)
@cs_majors.each_key{|key| output_requirements[key] = input_requirements[key]}
end
def self.progress(user)
finished_rate = 0
#TODO
@cs_majors.each_key{|key| finished_rate += 1 if user[key] == true}
return finished_rate*100/21
end
def self.get_courses(fufilled_requirements)
course_ids = []
@cs_majors.each{|key, value| course_ids << Course.find_by_name(value) if (fufilled_requirements[key] == false)}
return course_ids
end
end
|
#!/usr/local/bin/ruby -w
def mapper(source, destination)
previous = ''
src = []
dest = []
source.class.instance_methods(false).each do |t|
if previous +'=' == t.to_s then
src.push(previous)
end
previous = t.to_s
end
destination.class.instance_methods(false).each do |t|
if previous +'=' == t.to_s then
dest.push(previous)
end
previous = t.to_s
end
final = src & dest
final.each do |v|
destination.instance_variable_set('@'+v,source.instance_variable_get('@'+v))
end
return destination
end
Added active record object to domain mapping and vice versa
#!/usr/local/bin/ruby -w
def domain_to_domain_map(source, destination)
src = []
dest = []
src = get_members(source)
dest = get_members(destination)
final = src & dest
final.each do |val|
destination.instance_variable_set('@'+val,source.instance_variable_get('@'+val))
end
return destination
end
def model_to_view_model_mapping(model, view_model)
source = model.attribute_names
destination = get_members(view_model)
final = source & destination
final.each do |val|
view_model.instance_variable_set('@'+val,model[val])
end
return view_model
end
def view_model_mapping_to_model(view_model, model)
source = get_members(view_model)
destination = model.attribute_names
final = source & destination
final.each do |val|
model[val] = view_model.instance_variable_get('@'+val)
end
return model
end
# return the array of all properties of class
def get_members(source)
attributes = []
previous =''
source.class.instance_methods(false).each do |t|
if previous +'=' == t.to_s then
attributes.push(previous)
end
previous = t.to_s
end
return attributes
end
def model_to_domain_map(source, destination)
end
def domain_to_model_map(source, destination)
end |
class SpendingProposal < ActiveRecord::Base
include Measurable
include Sanitizable
include Taggable
include Searchable
apply_simple_captcha
acts_as_votable
acts_as_paranoid column: :hidden_at
include ActsAsParanoidAliases
belongs_to :author, -> { with_hidden }, class_name: 'User', foreign_key: 'author_id'
belongs_to :geozone
belongs_to :administrator
has_many :valuation_assignments, dependent: :destroy
has_many :valuators, through: :valuation_assignments
has_many :comments, as: :commentable
validates :title, presence: true
validates :author, presence: true
validates :description, presence: true
validates :title, length: { in: 4..SpendingProposal.title_max_length }
validates :description, length: { maximum: 10000 }
validates :terms_of_service, acceptance: { allow_nil: false }, on: :create
scope :sort_by_confidence_score, -> (default=nil) { reorder(confidence_score: :desc, id: :desc) }
scope :sort_by_random, -> (seed) { reorder("RANDOM()") }
scope :valuation_open, -> { where(valuation_finished: false) }
scope :without_admin, -> { valuation_open.where(administrator_id: nil) }
scope :managed, -> { valuation_open.where(valuation_assignments_count: 0).where("administrator_id IS NOT ?", nil) }
scope :valuating, -> { valuation_open.where("valuation_assignments_count > 0 AND valuation_finished = ?", false) }
scope :valuation_finished, -> { where(valuation_finished: true) }
scope :feasible, -> { where(feasible: true) }
scope :unfeasible, -> { valuation_finished.where(feasible: false) }
scope :not_unfeasible, -> { where("feasible IS ? OR feasible = ?", nil, true) }
scope :by_forum, -> { where(forum: true) }
scope :by_admin, -> (admin) { where(administrator_id: admin.presence) }
scope :by_tag, -> (tag_name) { tagged_with(tag_name) }
scope :by_valuator, -> (valuator) { where("valuation_assignments.valuator_id = ?", valuator.presence).joins(:valuation_assignments) }
scope :for_render, -> { includes(:geozone) }
scope :district_wide, -> { where.not(geozone_id: nil) }
scope :city_wide, -> { where(geozone_id: nil) }
before_save :calculate_confidence_score
before_validation :set_responsible_name
def description
super.try :html_safe
end
def self.filter_params(params)
params.select{|x,_| %w{geozone_id administrator_id tag_name valuator_id}.include? x.to_s }
end
def self.scoped_filter(params, current_filter)
results = self
results = results.by_geozone(params[:geozone_id]) if params[:geozone_id].present?
results = results.by_admin(params[:administrator_id]) if params[:administrator_id].present?
results = results.by_tag(params[:tag_name]) if params[:tag_name].present?
results = results.by_valuator(params[:valuator_id]) if params[:valuator_id].present?
results = results.send(current_filter) if current_filter.present?
results.includes(:geozone, administrator: :user, valuators: :user)
end
def searchable_values
{ title => 'A',
author.username => 'B',
geozone.try(:name) => 'B',
description => 'C'
}
end
def self.search(terms)
self.pg_search(terms)
end
def self.by_geozone(geozone)
if geozone == 'all'
where(geozone_id: nil)
else
where(geozone_id: geozone.presence)
end
end
def feasibility
case feasible
when true
"feasible"
when false
"not_feasible"
else
"undefined"
end
end
def unfeasible_email_pending?
unfeasible_email_sent_at.blank? && unfeasible? && valuation_finished?
end
def unfeasible?
feasible == false && valuation_finished == true
end
def valuation_finished?
valuation_finished
end
def total_votes
cached_votes_up + physical_votes + delegated_votes - forum_votes
end
def delegated_votes
count = 0
representative_voters.each do |voter|
count += voter.forum.represented_users.select { |u| !u.voted_for?(self) }.count
end
return count
end
def representative_voters
Vote.representative_votes.for_spending_proposals(self).collect(&:voter)
end
def forum_votes
Vote.representative_votes.for_spending_proposals(self).count
end
def code
"#{created_at.strftime('%Y')}-#{id}" + (administrator.present? ? "-A#{administrator.id}" : "")
end
def send_unfeasible_email
Mailer.unfeasible_spending_proposal(self).deliver_later
update(unfeasible_email_sent_at: Time.now)
end
def reason_for_not_being_votable_by(user)
return :not_logged_in unless user
return :not_verified unless user.can?(:vote, SpendingProposal)
return :unfeasible if unfeasible?
return :organization if user.organization?
return :not_voting_allowed if Setting["feature.spending_proposal_features.voting_allowed"].blank?
if city_wide?
return :no_city_supports_available unless user.city_wide_spending_proposals_supported_count > 0
else # district_wide
if user.supported_spending_proposals_geozone_id.present? &&
geozone_id != user.supported_spending_proposals_geozone_id
return :different_district_assigned
end
return :no_district_supports_available unless user.district_wide_spending_proposals_supported_count > 0
end
end
def votable_by?(user)
reason_for_not_being_votable_by(user).blank?
end
def register_vote(user, vote_value)
if votable_by?(user)
vote_by(voter: user, vote: vote_value)
if vote_registered?
if city_wide?
count = user.city_wide_spending_proposals_supported_count
user.update(city_wide_spending_proposals_supported_count: count - 1)
else
count = user.district_wide_spending_proposals_supported_count
user.update(district_wide_spending_proposals_supported_count: count - 1,
supported_spending_proposals_geozone_id: self.geozone_id)
end
end
end
end
def district_wide?
geozone.present?
end
def city_wide?
!district_wide?
end
def calculate_confidence_score
self.confidence_score = ScoreCalculator.confidence_score(total_votes, total_votes)
end
def set_responsible_name
self.responsible_name = author.try(:document_number) if author.try(:document_number).present?
end
end
changes reason order (not_voting_allowed first)
class SpendingProposal < ActiveRecord::Base
include Measurable
include Sanitizable
include Taggable
include Searchable
apply_simple_captcha
acts_as_votable
acts_as_paranoid column: :hidden_at
include ActsAsParanoidAliases
belongs_to :author, -> { with_hidden }, class_name: 'User', foreign_key: 'author_id'
belongs_to :geozone
belongs_to :administrator
has_many :valuation_assignments, dependent: :destroy
has_many :valuators, through: :valuation_assignments
has_many :comments, as: :commentable
validates :title, presence: true
validates :author, presence: true
validates :description, presence: true
validates :title, length: { in: 4..SpendingProposal.title_max_length }
validates :description, length: { maximum: 10000 }
validates :terms_of_service, acceptance: { allow_nil: false }, on: :create
scope :sort_by_confidence_score, -> (default=nil) { reorder(confidence_score: :desc, id: :desc) }
scope :sort_by_random, -> (seed) { reorder("RANDOM()") }
scope :valuation_open, -> { where(valuation_finished: false) }
scope :without_admin, -> { valuation_open.where(administrator_id: nil) }
scope :managed, -> { valuation_open.where(valuation_assignments_count: 0).where("administrator_id IS NOT ?", nil) }
scope :valuating, -> { valuation_open.where("valuation_assignments_count > 0 AND valuation_finished = ?", false) }
scope :valuation_finished, -> { where(valuation_finished: true) }
scope :feasible, -> { where(feasible: true) }
scope :unfeasible, -> { valuation_finished.where(feasible: false) }
scope :not_unfeasible, -> { where("feasible IS ? OR feasible = ?", nil, true) }
scope :by_forum, -> { where(forum: true) }
scope :by_admin, -> (admin) { where(administrator_id: admin.presence) }
scope :by_tag, -> (tag_name) { tagged_with(tag_name) }
scope :by_valuator, -> (valuator) { where("valuation_assignments.valuator_id = ?", valuator.presence).joins(:valuation_assignments) }
scope :for_render, -> { includes(:geozone) }
scope :district_wide, -> { where.not(geozone_id: nil) }
scope :city_wide, -> { where(geozone_id: nil) }
before_save :calculate_confidence_score
before_validation :set_responsible_name
def description
super.try :html_safe
end
def self.filter_params(params)
params.select{|x,_| %w{geozone_id administrator_id tag_name valuator_id}.include? x.to_s }
end
def self.scoped_filter(params, current_filter)
results = self
results = results.by_geozone(params[:geozone_id]) if params[:geozone_id].present?
results = results.by_admin(params[:administrator_id]) if params[:administrator_id].present?
results = results.by_tag(params[:tag_name]) if params[:tag_name].present?
results = results.by_valuator(params[:valuator_id]) if params[:valuator_id].present?
results = results.send(current_filter) if current_filter.present?
results.includes(:geozone, administrator: :user, valuators: :user)
end
def searchable_values
{ title => 'A',
author.username => 'B',
geozone.try(:name) => 'B',
description => 'C'
}
end
def self.search(terms)
self.pg_search(terms)
end
def self.by_geozone(geozone)
if geozone == 'all'
where(geozone_id: nil)
else
where(geozone_id: geozone.presence)
end
end
def feasibility
case feasible
when true
"feasible"
when false
"not_feasible"
else
"undefined"
end
end
def unfeasible_email_pending?
unfeasible_email_sent_at.blank? && unfeasible? && valuation_finished?
end
def unfeasible?
feasible == false && valuation_finished == true
end
def valuation_finished?
valuation_finished
end
def total_votes
cached_votes_up + physical_votes + delegated_votes - forum_votes
end
def delegated_votes
count = 0
representative_voters.each do |voter|
count += voter.forum.represented_users.select { |u| !u.voted_for?(self) }.count
end
return count
end
def representative_voters
Vote.representative_votes.for_spending_proposals(self).collect(&:voter)
end
def forum_votes
Vote.representative_votes.for_spending_proposals(self).count
end
def code
"#{created_at.strftime('%Y')}-#{id}" + (administrator.present? ? "-A#{administrator.id}" : "")
end
def send_unfeasible_email
Mailer.unfeasible_spending_proposal(self).deliver_later
update(unfeasible_email_sent_at: Time.now)
end
def reason_for_not_being_votable_by(user)
return :not_voting_allowed if Setting["feature.spending_proposal_features.voting_allowed"].blank?
return :not_logged_in unless user
return :not_verified unless user.can?(:vote, SpendingProposal)
return :unfeasible if unfeasible?
return :organization if user.organization?
if city_wide?
return :no_city_supports_available unless user.city_wide_spending_proposals_supported_count > 0
else # district_wide
if user.supported_spending_proposals_geozone_id.present? &&
geozone_id != user.supported_spending_proposals_geozone_id
return :different_district_assigned
end
return :no_district_supports_available unless user.district_wide_spending_proposals_supported_count > 0
end
end
def votable_by?(user)
reason_for_not_being_votable_by(user).blank?
end
def register_vote(user, vote_value)
if votable_by?(user)
vote_by(voter: user, vote: vote_value)
if vote_registered?
if city_wide?
count = user.city_wide_spending_proposals_supported_count
user.update(city_wide_spending_proposals_supported_count: count - 1)
else
count = user.district_wide_spending_proposals_supported_count
user.update(district_wide_spending_proposals_supported_count: count - 1,
supported_spending_proposals_geozone_id: self.geozone_id)
end
end
end
end
def district_wide?
geozone.present?
end
def city_wide?
!district_wide?
end
def calculate_confidence_score
self.confidence_score = ScoreCalculator.confidence_score(total_votes, total_votes)
end
def set_responsible_name
self.responsible_name = author.try(:document_number) if author.try(:document_number).present?
end
end
|
require 'open-uri'
require 'fileutils'
# require 'zip'
# run incremental load with: bundle exec rake db:beta_load[1,incremental]
# run full load with: bundle exec rake db:beta_loadload[1,full]
include ActionView::Helpers::DateHelper
class StudyJsonRecord < ActiveRecord::Base
self.table_name = 'ctgov_beta.study_json_records'
def self.db_mgr
@db_mgr ||= Util::DbManager.new({search_path: 'ctgov_beta'})
end
def self.updater(params={})
@updater ||= Util::Updater.new(params)
end
def self.run(params={})
set_table_schema('ctgov_beta')
@broken_batch = {}
@study_build_failures = []
@full_featured = params[:full_featured] || false
@params = params
@type = params[:event_type] ? params[:event_type] : 'incremental'
@days_back = (params[:days_back] ? params[:days_back] : 2)
puts 'params set...'
print "now running #{@type}, #{@days_back} days back"
begin
@type == 'full' ? full : incremental
rescue => error
msg="#{error.message} (#{error.class} #{error.backtrace}"
puts"#{@type} load failed in run: #{msg}"
end
puts "broken----- #{@broken_batch}" if @type == 'incremental'
puts "failed to build #{@study_build_failures.uniq}"
puts "about to rerun batches"
sleep 5
rerun_batches(@broken_batch)
puts "still broken----- #{@broken_batch}" if @type == 'incremental'
puts "failed to build #{@study_build_failures.uniq}"
set_table_schema('ctgov')
puts comparison
end
def self.root_dir
"#{Rails.public_path}/static"
end
def self.json_file_directory
FileUtils.mkdir_p "#{root_dir}/json_downloads"
"#{root_dir}/json_downloads"
end
def self.download_all_studies(url='https://ClinicalTrials.gov/AllAPIJSON.zip')
tries ||= 5
file_name="#{json_file_directory}/#{Time.zone.now.strftime("%Y%m%d-%H")}.zip"
file = File.new file_name, 'w'
begin
if tries < 5
`wget -c #{url} -O #{file.path}`
else
`wget #{url} -O #{file.path}`
end
rescue Errno::ECONNRESET => e
if (tries -=1) > 0
puts " download failed. trying again..."
retry
end
end
file.binmode
file.size
file
end
def self.full
start_time = Time.current
study_download = download_all_studies
# finshed in about 12 hours
# total number we have 326614
# finshed in about 1 hour
# total number we should have 3131
# total number we have 1578
nct_ids = StudyJsonRecord.all.map(&:nct_id)
clear_out_data_for(nct_ids)
Zip::File.open(study_download.path) do |unzipped_folders|
puts "unzipped folders"
original_count = unzipped_folders.size
count_down = original_count
unzipped_folders.each do |file|
begin
contents = file.get_input_stream.read
json = JSON.parse(contents)
rescue
next unless json
end
study = json['FullStudy']
next unless study
save_single_study(study)
nct_id = study['Study']['ProtocolSection']['IdentificationModule']['NCTId']
puts "added NCTId #{nct_id} study_json_record: #{count_down} of #{original_count}"
count_down -= 1
end
end
seconds = Time.now - start_time
puts "finshed in #{time_ago_in_words(start_time)}"
puts "total number we have #{StudyJsonRecord.count}"
end
def self.incremental
start_time = Time.current
first_batch = json_data
# total_number is the number of studies available, meaning the total number in their database
total_number = first_batch['FullStudiesResponse']['NStudiesFound']
limit = (total_number/100.0).ceil
puts "batch 1 of #{limit}"
sleep 5
save_study_records(first_batch['FullStudiesResponse']['FullStudies'])
# since I already saved the first hundred studies I start the loop after that point
# studies must be retrieved in batches of 99,
# using min and max to determine the study to start with and the study to end with respectively (in that batch)
min = 101
max = 200
for x in 1..limit
puts "batch #{x + 1} of #{limit}"
sleep 5
fetch_studies(min, max)
min += 100
max += 100
end
seconds = Time.now - start_time
puts "finshed in #{time_ago_in_words(start_time)}"
puts "total number number of studies updated #{total_number}"
puts "total number of studies we have #{StudyJsonRecord.count}"
end
def self.fetch_studies(min=1, max=100)
begin
retries ||= 0
puts "try ##{ retries }"
# "https://clinicaltrials.gov/api/query/full_studies?expr=AREA[LastUpdatePostDate]RANGE[01/01/2020,%20MAX]&fmt=json"
url = "https://clinicaltrials.gov/api/query/full_studies?expr=#{time_range}&min_rnk=#{min}&max_rnk=#{max}&fmt=json"
puts url
data = json_data(url) || {}
data = data.dig('FullStudiesResponse', 'FullStudies')
save_study_records(data) if data
rescue
retry if (retries += 1) < 6
if retries >= 6
@broken_batch ||= {}
@broken_batch[url] = { min: min, max: max }
end
end
end
def self.rerun_batches(url_hash)
set_table_schema('ctgov_beta')
url_hash.each do |url, min_max|
puts "running #{url}"
fetch_studies(min_max[:min], min_max[:max])
end
end
def self.save_study_records(study_batch)
return unless study_batch
nct_id_array = study_batch.map{|study_data| study_data['Study']['ProtocolSection']['IdentificationModule']['NCTId'] }
clear_out_data_for(nct_id_array)
study_batch.each do |study_data|
save_single_study(study_data)
end
end
def self.save_single_study(study_data)
nct_id = study_data['Study']['ProtocolSection']['IdentificationModule']['NCTId']
record = StudyJsonRecord.find_by(nct_id: nct_id) || StudyJsonRecord.new(nct_id: nct_id)
record.content = study_data
record.saved_study_at = nil
record.download_date = Time.current
if record.save
record.build_study
else
puts "failed to save #{nct_id}"
byebug
end
end
def self.clear_out_data_for(nct_ids)
return if nct_ids.nil? || nct_ids.empty?
db_mgr.remove_indexes_and_constraints # Index significantly slow the load process.
db_mgr.clear_out_data_for(nct_ids)
delete_json_records(nct_ids)
db_mgr.add_indexes_and_constraints
end
def self.delete_json_records(nct_ids)
return if nct_ids.nil? || nct_ids.empty?
ids = nct_ids.map { |i| "'" + i.to_s + "'" }.join(",")
ActiveRecord::Base.connection.execute("DELETE FROM #{self.table_name} WHERE nct_id IN (#{ids})")
end
def self.json_data(url="https://clinicaltrials.gov/api/query/full_studies?expr=#{time_range}&min_rnk=1&max_rnk=100&fmt=json")
page = open(url)
JSON.parse(page.read)
end
def self.time_range
return nil if @type == 'full'
return nil unless @days_back != 'nil'
date = (Date.current - @days_back.to_i).strftime('%m/%d/%Y')
"AREA[LastUpdatePostDate]RANGE[#{date},%20MAX]"
end
def key_check(key)
key ||= {}
end
def get_boolean(val)
return nil unless val
return true if val.downcase=='yes'||val.downcase=='y'||val.downcase=='true'
return false if val.downcase=='no'||val.downcase=='n'||val.downcase=='false'
end
def get_date(str)
Date.parse(str) if str
end
def convert_date(str)
return nil unless str
return str.to_date.end_of_month if is_missing_the_day?(str)
get_date(str)
end
def is_missing_the_day?(str)
# use this method on string representations of dates. If only one space in the string, then the day is not provided.
(str.count ' ') == 1
end
def protocol_section
key_check(content['Study']['ProtocolSection'])
end
def results_section
key_check(content['Study']['ResultsSection'])
end
def derived_section
key_check(content['Study']['DerivedSection'])
end
def annotation_section
key_check(content['Study']['AnnotationSection'])
end
def document_section
key_check(content['Study']['DocumentSection'])
end
def contacts_location_module
key_check(protocol_section['ContactsLocationsModule'])
end
def locations_array
locations_list = key_check(contacts_location_module['LocationList'])
locations_list['Location'] || []
end
def study_data
protocol = protocol_section
return nil if protocol.empty?
status = protocol['StatusModule']
ident = protocol['IdentificationModule']
design = key_check(protocol['DesignModule'])
oversight = key_check(protocol['OversightModule'])
ipd_sharing = key_check(protocol['IPDSharingStatementModule'])
study_posted = status['StudyFirstPostDateStruct']
results_posted = key_check(status['ResultsFirstPostDateStruct'])
disp_posted = key_check(status['DispFirstPostDateStruct'])
last_posted = status['LastUpdatePostDateStruct']
start_date = key_check(status['StartDateStruct'])
completion_date = key_check(status['CompletionDateStruct'])
primary_completion_date = key_check(status['PrimaryCompletionDateStruct'])
results = key_check(content['Study']['ResultsSection'])
baseline = key_check(results['BaselineCharacteristicsModule'])
enrollment = key_check(design['EnrollmentInfo'])
expanded_access = status.dig('ExpandedAccessInfo', 'HasExpandedAccess')
expanded = key_check(design['ExpandedAccessTypes'])
biospec = key_check(design['BioSpec'])
arms_intervention = key_check(protocol['ArmsInterventionsModule'])
arms_group_list = key_check(arms_intervention['ArmGroupList'])
arms_groups = arms_group_list['ArmGroup'] || []
{
nct_id: nct_id,
nlm_download_date_description: download_date,
study_first_submitted_date: get_date(status['StudyFirstSubmitDate']),
results_first_submitted_date: get_date(status['ResultsFirstSubmitDate']),
disposition_first_submitted_date: get_date(status['DispFirstSubmitDate']),
last_update_submitted_date: get_date(status['LastUpdateSubmitDate']),
study_first_submitted_qc_date: status['StudyFirstSubmitQCDate'],
study_first_posted_date: study_posted['StudyFirstPostDate'],
study_first_posted_date_type: study_posted['StudyFirstPostDateType'],
results_first_submitted_qc_date: status['ResultsFirstSubmitQCDate'],
results_first_posted_date: results_posted['ResultsFirstPostDate'],
results_first_posted_date_type: results_posted['ResultsFirstPostDateType'],
disposition_first_submitted_qc_date: status['DispFirstSubmitQCDate'],
disposition_first_posted_date: disp_posted['DispFirstPostDate'],
disposition_first_posted_date_type: disp_posted['DispFirstPostDateType'],
last_update_submitted_qc_date: status['LastUpdateSubmitDate'],
last_update_posted_date: last_posted['LastUpdatePostDate'],
last_update_posted_date_type: last_posted['LastUpdatePostDateType'],
start_month_year: start_date['StartDate'],
start_date_type: start_date['StartDateType'],
start_date: convert_date(start_date['StartDate']),
verification_month_year: status['StatusVerifiedDate'],
verification_date: convert_date(status['StatusVerifiedDate']),
completion_month_year: completion_date['CompletionDate'],
completion_date_type: completion_date['CompletionDateType'],
completion_date: convert_date(completion_date['CompletionDate']),
primary_completion_month_year: primary_completion_date['PrimaryCompletionDate'],
primary_completion_date_type: primary_completion_date['PrimaryCompletionDateType'],
primary_completion_date: convert_date(primary_completion_date['PrimaryCompletionDate']),
target_duration: design['TargetDuration'],
study_type: design['StudyType'],
acronym: ident['Acronym'],
baseline_population: baseline['BaselinePopulationDescription'],
brief_title: ident['BriefTitle'],
official_title: ident['OfficialTitle'],
overall_status: status['OverallStatus'],
last_known_status: status['LastKnownStatus'],
phase: key_check(design['PhaseList'])['Phase'],
enrollment: enrollment['EnrollmentCount'],
enrollment_type: enrollment['EnrollmentType'],
source: ident.dig('Organization', 'OrgFullName'),
limitations_and_caveats: key_check(results['MoreInfoModule'])['LimitationsAndCaveats'],
number_of_arms: arms_groups.count,
number_of_groups: arms_groups.count,
why_stopped: status['WhyStopped'],
has_expanded_access: get_boolean(expanded_access),
expanded_access_type_individual: get_boolean(expanded['ExpAccTypeIndividual']),
expanded_access_type_intermediate: get_boolean(expanded['ExpAccTypeIntermediate']),
expanded_access_type_treatment: get_boolean(expanded['ExpAccTypeTreatment']),
has_dmc: get_boolean(oversight['OversightHasDMC']),
is_fda_regulated_drug: get_boolean(oversight['IsFDARegulatedDrug']),
is_fda_regulated_device: get_boolean(oversight['IsFDARegulatedDevice']),
is_unapproved_device: get_boolean(oversight['IsUnapprovedDevice']),
is_ppsd: get_boolean(oversight['IsPPSD']),
is_us_export: get_boolean(oversight['IsUSExport']),
biospec_retention: biospec['BioSpecRetention'],
biospec_description: biospec['BioSpecDescription'],
ipd_time_frame: ipd_sharing['IPDSharingTimeFrame'],
ipd_access_criteria: ipd_sharing['IPDSharingAccessCriteria'],
ipd_url: ipd_sharing['IPDSharingURL'],
plan_to_share_ipd: ipd_sharing['IPDSharing'],
plan_to_share_ipd_description: ipd_sharing['IPDSharingDescription']
}
end
def design_groups_data
arms_intervention = key_check(protocol_section['ArmsInterventionsModule'])
arms_group_list = key_check(arms_intervention['ArmGroupList'])
arms_groups = arms_group_list['ArmGroup'] || []
collection = []
return nil if arms_groups.empty?
arms_groups.each do |group|
collection.push(
design_group: {
nct_id: nct_id,
group_type: group['ArmGroupType'],
title: group['ArmGroupLabel'],
description: group['ArmGroupDescription']
},
design_group_interventions: design_group_interventions_data(group)
)
end
collection
end
def design_group_interventions_data(arms_group)
collection = []
intervention_list = key_check(arms_group['ArmGroupInterventionList'])
intervention_names = intervention_list['ArmGroupInterventionName'] || []
return nil if intervention_names.empty?
intervention_names.each do |name|
# I collect the info I need to do queries later so I can create or find the links
# between design groups and interventions in the database
# Foo.where("bar LIKE ?", "%#{query}%")
divide = name.split(': ')
intervention_type = divide[0]
divide.shift
intervention_name = divide.join(': ')
collection.push(
nct_id: nct_id,
name: intervention_name,
type: intervention_type,
design_group: arms_group['ArmGroupLabel']
)
end
collection
end
def interventions_data
arms_intervention = key_check(protocol_section['ArmsInterventionsModule'])
intervention_list = key_check(arms_intervention['InterventionList'])
interventions = intervention_list['Intervention'] || []
collection = []
return nil if interventions.empty?
interventions.each do |intervention|
collection.push(
intervention: {
nct_id: nct_id,
intervention_type: intervention['InterventionType'],
name: intervention['InterventionName'],
description: intervention['InterventionDescription']
},
intervention_other_names: intervention_other_names_data(intervention)
)
end
collection
end
def intervention_other_names_data(intervention)
other_name_list = key_check(intervention['InterventionOtherNameList'])
collection = []
other_names = other_name_list['InterventionOtherName'] || []
return nil if other_names.empty?
other_names.each do |name|
collection.push(nct_id: nct_id, intervention_id: nil, name: name)
end
collection
end
def detailed_description_data
protocol = protocol_section
description = key_check(protocol['DescriptionModule'])['DetailedDescription']
return nil unless description
{ nct_id: nct_id, description: description }
end
def brief_summary_data
protocol = protocol_section
description = key_check(protocol['DescriptionModule'])['BriefSummary']
return nil unless description
{ nct_id: nct_id, description: description }
end
def self.make_list(array)
array.join(', ')
end
def designs_data
protocol = protocol_section
design = key_check(protocol['DesignModule'])
info = key_check(design['DesignInfo'])
masking = key_check(info['DesignMaskingInfo'])
masked_list = key_check(masking['DesignWhoMaskedList'])
who_masked = masked_list['DesignWhoMasked'] || []
observation_list = key_check(info['DesignObservationalModelList'])
observations = observation_list['DesignObservationalModel'] || []
time_perspective_list = key_check(info['DesignTimePerspectiveList'])
time_perspectives = time_perspective_list['DesignTimePerspective'] || []
return nil if info.empty?
{
nct_id: nct_id,
allocation: info['DesignAllocation'],
observational_model: StudyJsonRecord.make_list(observations),
intervention_model: info['DesignInterventionModel'],
intervention_model_description: info['DesignInterventionModelDescription'],
primary_purpose: info['DesignPrimaryPurpose'],
time_perspective: StudyJsonRecord.make_list(time_perspectives),
masking: masking['DesignMasking'],
masking_description: masking['DesignMaskingDescription'],
subject_masked: is_masked?(who_masked, ['Subject','Participant']),
caregiver_masked: is_masked?(who_masked, ['Caregiver','Care Provider']),
investigator_masked: is_masked?(who_masked, ['Investigator']),
outcomes_assessor_masked: is_masked?(who_masked, ['Outcomes Assessor']),
}
end
def is_masked?(who_masked_array, query_array)
# example who_masked array ["Participant", "Care Provider", "Investigator", "Outcomes Assessor"]
query_array.each do |term|
return true if who_masked_array.try(:include?, term)
end
nil
end
def eligibility_data
protocol = protocol_section
eligibility = key_check(protocol['EligibilityModule'])
return nil if eligibility.empty?
{
nct_id: nct_id,
sampling_method: eligibility['SamplingMethod'],
population: eligibility['StudyPopulation'],
maximum_age: eligibility['MaximumAge'] || 'N/A',
minimum_age: eligibility['MinimumAge'] || 'N/A',
gender: eligibility['Gender'],
gender_based: get_boolean(eligibility['GenderBased']),
gender_description: eligibility['GenderDescription'],
healthy_volunteers: eligibility['HealthyVolunteers'],
criteria: eligibility['EligibilityCriteria']
}
end
def participant_flow_data
results = key_check(results_section)
participant_flow = key_check(results['ParticipantFlowModule'])
return nil if participant_flow.empty?
{
nct_id: nct_id,
recruitment_details: participant_flow['FlowRecruitmentDetails'],
pre_assignment_details: participant_flow['FlowPreAssignmentDetails'],
}
end
def baseline_measurements_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
return nil if baseline_characteristics_module.empty?
baseline_measure_list = key_check(baseline_characteristics_module['BaselineMeasureList'])
baseline_measures = baseline_measure_list['BaselineMeasure'] || []
collection = {result_groups: baseline_result_groups_data, baseline_counts: baseline_counts_data, measurements: []}
return if baseline_measures.empty?
baseline_measures.each do |measure|
baseline_class_list = key_check(measure['BaselineClassList'])
baseline_classes = baseline_class_list['BaselineClass'] || []
baseline_classes.each do |baseline_class|
baseline_category_list = key_check(baseline_class['BaselineCategoryList'])
baseline_categories = baseline_category_list['BaselineCategory'] || []
baseline_categories.each do |baseline_category|
measurement_list = key_check(baseline_category['BaselineMeasurementList'])
measurements = measurement_list['BaselineMeasurement'] || []
measurements.each do |measurement|
param_value = measurement['BaselineMeasurementValue']
dispersion_value = measurement['BaselineMeasurementSpread']
collection[:measurements].push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: measurement['BaselineMeasurementGroupId'],
classification: baseline_class['BaselineClassTitle'],
category: baseline_category['BaselineCategoryTitle'],
title: measure['BaselineMeasureTitle'],
description: measure['BaselineMeasureDescription'],
units: measure['BaselineMeasureUnitOfMeasure'],
param_type: measure['BaselineMeasureParamType'],
param_value: param_value,
param_value_num: StudyJsonRecord.float(param_value),
dispersion_type: measure['BaselineMeasureDispersionType'],
dispersion_value: dispersion_value,
dispersion_value_num: StudyJsonRecord.float(dispersion_value),
dispersion_lower_limit: StudyJsonRecord.float(measurement['BaselineMeasurementLowerLimit']),
dispersion_upper_limit: StudyJsonRecord.float(measurement['BaselineMeasurementUpperLimit']),
explanation_of_na: measurement['BaselineMeasurementComment']
)
end
end
end
end
collection
end
def self.float(string)
Float(string) rescue nil
end
def baseline_result_groups_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
baseline_group_list = key_check(baseline_characteristics_module['BaselineGroupList'])
baseline_group = baseline_group_list['BaselineGroup'] || []
StudyJsonRecord.result_groups(baseline_group, 'Baseline', 'Baseline', nct_id)
end
def baseline_counts_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
baseline_denom_list = key_check(baseline_characteristics_module['BaselineDenomList'])
baseline_denoms = key_check(baseline_denom_list['BaselineDenom'])
collection = []
return nil if baseline_denoms.empty?
baseline_denoms.each do |denom|
baseline_denom_count_list = denom['BaselineDenomCountList']
baseline_denom_count = baseline_denom_count_list['BaselineDenomCount'] || []
baseline_denom_count.each do |count|
collection.push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: count['BaselineDenomCountGroupId'],
units: denom['BaselineDenomUnits'],
scope: 'overall',
count: count['BaselineDenomCountValue']
)
end
end
collection
end
def browse_conditions_data
browse('Condition')
end
def browse_interventions_data
browse('Intervention')
end
def browse(type='Condition')
derived = derived_section
browse_module = key_check(derived["#{type}BrowseModule"])
mesh_list = key_check(browse_module["#{type}MeshList"])
meshes = mesh_list["#{type}Mesh"] || []
collection = []
return nil if meshes.empty?
meshes.each do |mesh|
collection.push(
nct_id: nct_id, mesh_term: mesh["#{type}MeshTerm"], downcase_mesh_term: mesh["#{type}MeshTerm"].try(:downcase)
)
end
collection
end
def central_contacts_data
central_contacts_list = key_check(contacts_location_module['CentralContactList'])
central_contacts = central_contacts_list['CentralContact'] || []
collection = []
return nil if central_contacts.empty?
central_contacts.each_with_index do |contact, index|
collection.push(
nct_id: nct_id,
contact_type: index == 0 ? 'primary' : 'backup',
name: contact['CentralContactName'],
phone: contact['CentralContactPhone'],
email: contact['CentralContactEMail']
)
end
collection
end
def conditions_data
conditions_module = key_check(protocol_section['ConditionsModule'])
conditions_list = key_check(conditions_module['ConditionList'])
conditions = conditions_list['Condition'] || []
collection = []
return nil if conditions.empty?
conditions.each do |condition|
collection.push(nct_id: nct_id, name: condition, downcase_name: condition.try(:downcase))
end
collection
end
def countries_data
misc_module = key_check(derived_section['MiscInfoModule'])
removed_country_list = key_check(misc_module['RemovedCountryList'])
removed_countries = removed_country_list['RemovedCountry'] || []
collection = []
return nil unless !locations_array.empty? || !removed_countries.empty?
locations_array.each do |location|
collection.push(nct_id: nct_id, name: location['LocationCountry'], removed: false)
end
removed_countries.each do |country|
collection.push(nct_id: nct_id, name: country, removed: true)
end
collection
end
def documents_data
reference_module = key_check(protocol_section['ReferencesModule'])
avail_ipd_list = key_check(reference_module['AvailIPDList'])
avail_ipds = avail_ipd_list['AvailIPD'] || []
collection = []
return nil if avail_ipds.empty?
avail_ipds.each do |item|
collection.push(
nct_id: nct_id,
document_id: item['AvailIPDId'],
document_type: item['AvailIPDType'],
url: item['AvailIPDURL'],
comment: item['AvailIPDComment']
)
end
collection
end
def facilities_data
collection = []
return nil if locations_array.empty?
locations_array.each do |location|
location_contact_list = key_check(location['LocationContactList'])
location_contact = location_contact_list['LocationContact'] || []
facility_contacts = []
facility_investigators = []
location_contact.each_with_index do |contact, index|
contact_role = contact['LocationContactRole']
if contact_role =~ /Investigator|Study Chair/i
facility_investigators.push(
nct_id: nct_id,
facility_id: nil,
role: contact_role,
name: contact['LocationContactName']
)
else
facility_contacts.push(
nct_id: nct_id,
facility_id: nil,
contact_type: index == 0 ? 'primary' : 'backup',
name: contact['LocationContactName'],
email: contact['LocationContactEMail'],
phone: contact['LocationContactPhone']
)
end
end
collection.push(
facility: {
nct_id: nct_id,
status: location['LocationStatus'],
name: location['LocationFacility'],
city: location['LocationCity'],
state: location['LocationState'],
zip: location['LocationZip'],
country: location['LocationCountry']
},
facility_contacts: facility_contacts,
facility_investigators: facility_investigators
)
end
collection
end
def id_information_data
identification_module = key_check(protocol_section['IdentificationModule'])
alias_list = key_check(identification_module['NCTIdAliasList'])
nct_id_alias = alias_list['NCTIdAlias'] || []
org_study_info = key_check(identification_module['OrgStudyIdInfo'])
secondary_info_list = key_check(identification_module['SecondaryIdInfoList'])
secondary_info = secondary_info_list['SecondaryIdInfo'] || []
return if org_study_info.empty? && secondary_info.empty? && nct_id_alias.empty?
collection = [{nct_id: nct_id, id_type: 'org_study_id', id_value: org_study_info['OrgStudyId']}]
nct_id_alias.each do |nct_alias|
collection.push(
nct_id: nct_id, id_type: 'nct_alias', id_value: nct_alias
)
end
secondary_info.each do |info|
collection.push(
nct_id: nct_id, id_type: 'secondary_id', id_value: info['SecondaryId']
)
end
collection
end
def ipd_information_types_data
ipd_sharing_statement_module = key_check(protocol_section['IPDSharingStatementModule'])
ipd_sharing_info_type_list = key_check(ipd_sharing_statement_module['IPDSharingInfoTypeList'])
ipd_sharing_info_types = ipd_sharing_info_type_list['IPDSharingInfoType'] || []
collection = []
return nil if ipd_sharing_info_types.empty?
ipd_sharing_info_types.each do |info|
collection.push(nct_id: nct_id, name: info)
end
collection
end
def keywords_data
conditions_module = key_check(protocol_section['ConditionsModule'])
keyword_list = key_check(conditions_module['KeywordList'])
keywords = keyword_list['Keyword'] || []
collection = []
return nil if keywords.empty?
keywords.each do |keyword|
collection.push(nct_id: nct_id, name: keyword, downcase_name: keyword.downcase)
end
collection
end
def links_data
references_module = key_check(protocol_section['ReferencesModule'])
see_also_link_list = key_check(references_module['SeeAlsoLinkList'])
see_also_links = see_also_link_list['SeeAlsoLink'] || []
collection = []
return nil if see_also_links.empty?
see_also_links.each do |link|
collection.push(nct_id: nct_id, url: link['SeeAlsoLinkURL'], description: link['SeeAlsoLinkLabel'])
end
collection
end
def milestones_data
participant_flow_module = key_check(results_section['ParticipantFlowModule'])
flow_period_list = key_check(participant_flow_module['FlowPeriodList'])
flow_periods = flow_period_list['FlowPeriod'] || []
collection = {result_groups: flow_result_groups_data, milestones: []}
return nil if flow_periods.empty?
flow_periods.each do |period|
flow_period = period['FlowPeriodTitle']
flow_milestone_list = key_check(period['FlowMilestoneList'])
flow_milestones = flow_milestone_list['FlowMilestone'] || []
flow_milestones.each do |milestone|
flow_achievement_list = key_check(milestone['FlowAchievementList'])
flow_achievements = flow_achievement_list['FlowAchievement'] || []
flow_achievements.each do |achievement|
collection[:milestones].push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: achievement['FlowAchievementGroupId'],
title: milestone['FlowMilestoneType'],
period: period['FlowPeriodTitle'],
description: achievement['FlowAchievementComment'],
count: achievement['FlowAchievementNumSubjects']
)
end
end
end
return nil if collection[:milestones].empty?
collection
end
def flow_result_groups_data
participant_flow_module = key_check(results_section['ParticipantFlowModule'])
flow_group_list = key_check(participant_flow_module['FlowGroupList'])
flow_groups = flow_group_list['FlowGroup'] || []
StudyJsonRecord.result_groups(flow_groups, 'Flow', 'Participant Flow', nct_id)
end
def outcomes_data
outcomes_module = key_check(results_section['OutcomeMeasuresModule'])
outcome_measure_list = key_check(outcomes_module['OutcomeMeasureList'])
outcome_measures = outcome_measure_list['OutcomeMeasure'] || []
collection = {result_groups: outcome_result_groups_data, outcome_measures: []}
return nil if outcome_measures.empty?
outcome_measures.each do |outcome_measure|
collection[:outcome_measures].push(
outcome_measure: {
nct_id: nct_id,
outcome_type: outcome_measure['OutcomeMeasureType'],
title: outcome_measure['OutcomeMeasureTitle'],
description: outcome_measure['OutcomeMeasureDescription'],
time_frame: outcome_measure['OutcomeMeasureTimeFrame'],
population: outcome_measure['OutcomeMeasurePopulationDescription'],
anticipated_posting_date: convert_date(outcome_measure['OutcomeMeasureAnticipatedPostingDate']),
anticipated_posting_month_year: outcome_measure['OutcomeMeasureAnticipatedPostingDate'],
units: outcome_measure['OutcomeMeasureUnitOfMeasure'],
units_analyzed: outcome_measure['OutcomeMeasureTypeUnitsAnalyzed'],
dispersion_type: outcome_measure['OutcomeMeasureDispersionType'],
param_type: outcome_measure['OutcomeMeasureParamType']
},
outcome_counts: outcome_counts_data(outcome_measure),
outcome_measurements: outcome_measurements_data(outcome_measure),
outcome_analyses: outcome_analyses_data(outcome_measure)
)
end
collection
end
def outcome_result_groups_data
outcomes_module = key_check(results_section['OutcomeMeasuresModule'])
outcome_measure_list = key_check(outcomes_module['OutcomeMeasureList'])
outcome_measures = outcome_measure_list['OutcomeMeasure'] || []
collection = []
outcome_measures.each do |measure|
outcome_group_list = key_check(measure['OutcomeGroupList'])
outcome_groups = outcome_group_list['OutcomeGroup'] || []
collection.push(
StudyJsonRecord.result_groups(outcome_groups, 'Outcome', 'Outcome', nct_id)
)
end
collection.flatten.uniq
end
def self.result_groups(groups, key_name='Flow', type='Participant Flow', nct_id)
collection = []
return nil if groups.nil? || groups.empty?
groups.each do |group|
collection.push(
nct_id: nct_id,
ctgov_beta_group_code: group["#{key_name}GroupId"],
result_type: type,
title: group["#{key_name}GroupTitle"],
description: group["#{key_name}GroupDescription"]
)
end
collection
end
def outcome_counts_data(outcome_measure)
outcome_denom_list = key_check(outcome_measure['OutcomeDenomList'])
outcome_denoms = outcome_denom_list['OutcomeDenom'] || []
collection = []
return nil if outcome_denoms.empty?
outcome_denoms.each do |denom|
outcome_denom_count_list = key_check(denom['OutcomeDenomCountList'])
outcome_denom_count = outcome_denom_count_list['OutcomeDenomCount'] || []
outcome_denom_count.each do |denom_count|
collection.push(
nct_id: nct_id,
outcome_id: nil,
result_group_id: nil,
ctgov_beta_group_code: denom_count['OutcomeDenomCountGroupId'],
scope: 'Measure',
units: denom['OutcomeDenomUnits'],
count: denom_count['OutcomeDenomCountValue']
)
end
end
collection
end
def outcome_measurements_data(outcome_measure)
outcome_class_list = key_check(outcome_measure['OutcomeClassList'])
outcome_classes = outcome_class_list['OutcomeClass'] || []
collection = []
return nil if outcome_classes.empty?
outcome_classes.each do |outcome_class|
outcome_category_list = key_check(outcome_class['OutcomeCategoryList'])
outcome_categories = outcome_category_list['OutcomeCategory'] || []
outcome_categories.each do |category|
outcome_measurement_list = key_check(category['OutcomeMeasurementList'])
measurements = outcome_measurement_list['OutcomeMeasurement'] || []
measurements.each do |measure|
collection.push(
nct_id: nct_id,
outcome_id: nil,
result_group_id: nil,
ctgov_beta_group_code: measure['OutcomeMeasurementGroupId'],
classification: outcome_class['OutcomeClassTitle'],
category: category['OutcomeCategoryTitle'],
title: outcome_measure['OutcomeMeasureTitle'],
description: outcome_measure['OutcomeMeasureDescription'],
units: outcome_measure['OutcomeMeasureUnitOfMeasure'],
param_type: outcome_measure['OutcomeMeasureParamType'],
param_value: measure['OutcomeMeasurementValue'],
param_value_num: StudyJsonRecord.float(measure['OutcomeMeasurementValue']),
dispersion_type: outcome_measure['OutcomeMeasureDispersionType'],
dispersion_value: measure['OutcomeMeasurementSpread'],
dispersion_value_num: StudyJsonRecord.float(measure['OutcomeMeasurementSpread']),
dispersion_lower_limit: StudyJsonRecord.float(measure['OutcomeMeasurementLowerLimit']),
dispersion_upper_limit: StudyJsonRecord.float(measure['OutcomeMeasurementUpperLimit']),
explanation_of_na: measure['OutcomeMeasurementComment']
)
end
end
end
collection
end
def outcome_analyses_data(outcome_measure)
outcome_analysis_list = key_check(outcome_measure['OutcomeAnalysisList'])
outcome_analyses = outcome_analysis_list['OutcomeAnalysis'] || []
collection = []
return nil if outcome_analyses.empty?
outcome_analyses.each do |analysis|
raw_value = analysis['OutcomeAnalysisPValue'] || ''
collection.push(
outcome_analysis: {
nct_id: nct_id,
outcome_id: nil,
non_inferiority_type: analysis['OutcomeAnalysisNonInferiorityType'],
non_inferiority_description: analysis['OutcomeAnalysisNonInferiorityComment'],
param_type: analysis['OutcomeAnalysisParamType'],
param_value: analysis['OutcomeAnalysisParamValue'],
dispersion_type: analysis['OutcomeAnalysisDispersionType'],
dispersion_value: analysis['OutcomeAnalysisDispersionValue'],
p_value_modifier: raw_value.gsub(/\d+/, "").gsub('.','').gsub('-','').strip,
p_value: raw_value.gsub(/</, '').gsub(/>/, '').gsub(/ /, '').strip,
p_value_description: analysis['OutcomeAnalysisPValueComment'],
ci_n_sides: analysis['OutcomeAnalysisCINumSides'],
ci_percent: StudyJsonRecord.float(analysis['OutcomeAnalysisCIPctValue']),
ci_lower_limit: analysis['OutcomeAnalysisCILowerLimit'],
ci_upper_limit: analysis['OutcomeAnalysisCIUpperLimit'],
ci_upper_limit_na_comment: analysis['OutcomeAnalysisCIUpperLimitComment'],
method: analysis['OutcomeAnalysisStatisticalMethod'],
method_description: analysis['OutcomeAnalysisStatisticalComment'],
estimate_description: analysis['OutcomeAnalysisEstimateComment'],
groups_description: analysis['OutcomeAnalysisGroupDescription'],
other_analysis_description: analysis['OutcomeAnalysisOtherAnalysisDescription']
},
outcome_analysis_group_ids: outcome_analysis_groups_data(analysis)
)
end
collection
end
def outcome_analysis_groups_data(outcome_analysis)
outcome_analysis_group_id_list = key_check(outcome_analysis['OutcomeAnalysisGroupIdList'])
outcome_analysis_group_ids = outcome_analysis_group_id_list['OutcomeAnalysisGroupId'] || []
collection = []
return nil if outcome_analysis_group_ids.empty?
outcome_analysis_group_ids.each do |group_id|
collection.push(
nct_id: nct_id,
outcome_analysis_id: nil,
result_group_id: nil,
ctgov_beta_group_code: group_id
)
end
end
def overall_officials_data
overall_officials_list = key_check(contacts_location_module['OverallOfficialList'])
overall_officials = overall_officials_list['OverallOfficial'] || []
collection = []
return nil if overall_officials.empty?
overall_officials.each do |overall_official|
collection.push(
nct_id: nct_id,
role: overall_official['OverallOfficialRole'],
name: overall_official['OverallOfficialName'],
affiliation: overall_official['OverallOfficialAffiliation']
)
end
collection
end
def design_outcomes_data
primary_outcomes = outcome_list('Primary')
secondary_outcomes = outcome_list('Secondary')
other_outcomes = outcome_list('Other')
primary_outcomes ||= []
secondary_outcomes ||= []
total = primary_outcomes + secondary_outcomes
return nil if total.empty?
total
end
def outcome_list(outcome_type='Primary')
outcomes_module = key_check(protocol_section['OutcomesModule'])
outcome_list = key_check(outcomes_module["#{outcome_type}OutcomeList"])
outcomes = outcome_list["#{outcome_type}Outcome"] || []
collection = []
return nil if outcomes.empty?
outcomes.each do |outcome|
collection.push(
nct_id: nct_id,
outcome_type: outcome_type.downcase,
measure: outcome["#{outcome_type}OutcomeMeasure"],
time_frame: outcome["#{outcome_type}OutcomeTimeFrame"],
population: nil,
description: outcome["#{outcome_type}OutcomeDescription"]
)
end
collection
end
def pending_results_data
annotation_module = key_check(annotation_section['AnnotationModule'])
unposted_annotation = key_check(annotation_module['UnpostedAnnotation'])
unposted_event_list = key_check(unposted_annotation['UnpostedEventList'])
unposted_events = unposted_event_list['UnpostedEvent'] || []
collection = []
return nil if unposted_events.empty?
unposted_events.each do |event|
collection.push(
nct_id: nct_id,
event: event['UnpostedEventType'],
event_date_description: event['UnpostedEventDate'],
event_date: event['UnpostedEventDate'].try(:to_date)
)
end
collection
end
def provided_documents_data
large_document_module = key_check(document_section['LargeDocumentModule'])
large_doc_list = key_check(large_document_module['LargeDocList'])
large_docs = large_doc_list['LargeDoc'] || []
collection = []
return nil if large_docs.empty?
large_docs.each do |doc|
base_url = 'https://ClinicalTrials.gov/ProvidedDocs/'
number = "#{nct_id[-2]}#{nct_id[-1]}/#{nct_id}"
full_url = base_url + number + "/#{doc['LargeDocFilename']}" if doc['LargeDocFilename']
collection.push(
nct_id: nct_id,
document_type: doc['LargeDocLabel'],
has_protocol: get_boolean(doc['LargeDocHasProtocol']),
has_icf: get_boolean(doc['LargeDocHasICF']),
has_sap: get_boolean(doc['LargeDocHasSAP']),
document_date: doc['LargeDocDate'].try(:to_date),
url: full_url
)
end
collection
end
def reported_events_data
adverse_events_module = key_check(results_section['AdverseEventsModule'])
event_group_list = key_check(adverse_events_module['EventGroupList'])
event_groups = event_group_list['EventGroup'] || []
events = events_data('Serious') + events_data('Other')
return nil if events.empty?
{
result_groups: StudyJsonRecord.result_groups(event_groups, 'Event', 'Reported Event', nct_id),
events: events
}
end
def events_data(event_type='Serious')
adverse_events_module = key_check(results_section['AdverseEventsModule'])
event_list = key_check(adverse_events_module["#{event_type}EventList"])
events = event_list["#{event_type}Event"] || []
collection = []
events.each do |event|
event_stat_list = key_check(event["#{event_type}EventStatsList"])
event_stats = event_stat_list["#{event_type}EventStats"] || []
event_stats.each do |event_stat|
collection.push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: event_stat["#{event_type}EventStatsGroupId"],
time_frame: adverse_events_module['EventsTimeFrame'],
event_type: event_type.downcase,
default_vocab: event["#{event_type}EventSourceVocabulary"],
default_assessment: event["#{event_type}EventAssessmentType"],
subjects_affected: event_stat["#{event_type}EventStatsNumAffected"],
subjects_at_risk: event_stat["#{event_type}EventStatsNumAtRisk"],
description: adverse_events_module['EventsDescription'],
event_count: event_stat["#{event_type}EventStatsNumEvents"],
organ_system: event["#{event_type}EventOrganSystem"],
adverse_event_term: event["#{event_type}EventTerm"],
frequency_threshold: adverse_events_module['EventsFrequencyThreshold'],
vocab: nil,
assessment: event["#{event_type}EventAssessmentType"]
)
end
end
collection
end
def responsible_party_data
# https://clinicaltrials.gov/api/query/full_studies?expr=NCT04053270&fmt=json
# https://clinicaltrials.gov/api/query/full_studies?expr=NCT04076787&fmt=json
sponsor_collaborators_module = key_check(protocol_section['SponsorCollaboratorsModule'])
responsible_party = key_check(sponsor_collaborators_module['ResponsibleParty'])
return nil if responsible_party.empty?
{
nct_id: nct_id,
responsible_party_type: responsible_party['ResponsiblePartyType'],
name: responsible_party['ResponsiblePartyInvestigatorFullName'],
title: responsible_party['ResponsiblePartyInvestigatorTitle'],
organization: responsible_party['ResponsiblePartyOldOrganization'],
affiliation: responsible_party['ResponsiblePartyInvestigatorAffiliation']
}
end
def result_agreement_data
more_info_module = key_check(results_section['MoreInfoModule'])
certain_agreement = key_check(more_info_module['CertainAgreement'])
return nil if certain_agreement.empty?
{
nct_id: nct_id,
pi_employee: certain_agreement['AgreementPISponsorEmployee'],
restrictive_agreement: certain_agreement['AgreementOtherDetails'],
restriction_type: certain_agreement['AgreementRestrictionType'],
other_details: certain_agreement['AgreementOtherDetails']
}
end
def result_contact_data
more_info_module = key_check(results_section['MoreInfoModule'])
point_of_contact = key_check(more_info_module['PointOfContact'])
return nil if point_of_contact.empty?
ext = point_of_contact['PointOfContactPhoneExt']
phone = point_of_contact['PointOfContactPhone']
{
nct_id: nct_id,
organization: point_of_contact['PointOfContactOrganization'],
name: point_of_contact['PointOfContactTitle'],
phone: ext ? (phone + " ext #{ext}") : phone,
email: point_of_contact['PointOfContactEMail']
}
end
def study_references_data
reference_module = key_check(protocol_section['ReferencesModule'])
reference_list = key_check(reference_module['ReferenceList'])
references = reference_list['Reference'] || []
collection = []
return nil if references.empty?
references.each do |reference|
collection.push(
nct_id: nct_id,
pmid: reference['ReferencePMID'],
reference_type: reference['ReferenceType'],
# results_reference - old data format
citation: reference['ReferenceCitation']
)
end
collection
end
def sponsors_data
sponsor_collaborators_module = key_check(protocol_section['SponsorCollaboratorsModule'])
lead_sponsor = key_check(sponsor_collaborators_module['LeadSponsor'])
collaborator_list = key_check(sponsor_collaborators_module['CollaboratorList'])
collaborators = collaborator_list['Collaborator'] || []
collection = []
lead_info = sponsor_info(lead_sponsor, 'LeadSponsor')
return nil if lead_info.nil? && collaborators.empty?
collection.push(lead_info) unless lead_info.nil?
collaborators.each do |collaborator|
info = sponsor_info(collaborator, 'Collaborator')
collection.push(info) unless info.nil?
end
collection
end
def sponsor_info(sponsor_hash, sponsor_type='LeadSponsor')
type_of_sponsor = sponsor_type =~ /Lead/i ? 'lead' : 'collaborator'
return nil if sponsor_hash.empty?
{
nct_id: nct_id,
agency_class: sponsor_hash["#{sponsor_type}Class"],
lead_or_collaborator: type_of_sponsor,
name: sponsor_hash["#{sponsor_type}Name"]
}
end
def data_collection
{
study: study_data,
design_groups: design_groups_data,
interventions: interventions_data,
detailed_description: detailed_description_data,
brief_summary: brief_summary_data,
design: designs_data,
eligibility: eligibility_data,
participant_flow: participant_flow_data,
baseline_measurements: baseline_measurements_data,
browse_conditions: browse_conditions_data,
browse_interventions: browse_interventions_data,
central_contacts_list: central_contacts_data,
conditions: conditions_data,
countries: countries_data,
documents: documents_data,
facilities: facilities_data,
id_information: id_information_data,
ipd_information_type: ipd_information_types_data,
keywords: keywords_data,
links: links_data,
milestones: milestones_data,
outcomes: outcomes_data,
overall_officials: overall_officials_data,
design_outcomes: design_outcomes_data,
pending_results: pending_results_data,
provided_documents: provided_documents_data,
reported_events: reported_events_data,
responsible_party: responsible_party_data,
result_agreement: result_agreement_data,
result_contact: result_contact_data,
study_references: study_references_data,
sponsors: sponsors_data
}
end
def build_study
begin
data = data_collection
Study.find_or_create_by(nct_id: nct_id).update(data[:study]) if data[:study]
# saving design_groups, and associated objects
save_interventions(data[:interventions])
save_design_groups(data[:design_groups])
DetailedDescription.create(data[:detailed_description]) if data[:detailed_description]
BriefSummary.create(data[:brief_summary]) if data[:brief_summary]
Design.create(data[:design]) if data[:design]
Eligibility.create(data[:eligibility]) if data[:eligibility]
ParticipantFlow.create(data[:participant_flow]) if data[:participant_flow]
# saving baseline_measurements and associated objects
baseline_info = data[:baseline_measurements]
ResultGroup.create(baseline_info[:result_groups]) if baseline_info
save_with_result_group(baseline_info[:baseline_counts], 'BaselineCount') if baseline_info
save_with_result_group(baseline_info[:measurements], 'BaselineMeasurement') if baseline_info
BrowseCondition.create(data[:browse_conditions]) if data[:browse_conditions]
BrowseIntervention.create(data[:browse_interventions]) if data[:browse_interventions]
CentralContact.create(data[:central_contacts_list]) if data[:central_contacts_list]
Condition.create(data[:conditions]) if data[:conditions]
Country.create(data[:countries]) if data[:countries]
Document.create(data[:documents]) if data[:documents]
# saving facilities and related objects
save_facilities(data[:facilities])
IdInformation.create(data[:id_information]) if data[:id_information]
IpdInformationType.create(data[:ipd_information_type]) if data[:ipd_information_type]
Keyword.create(data[:keywords]) if data[:keywords]
Link.create(data[:links]) if data[:links]
# saving milestones and associated objects
milestone_info = data[:milestones] || {}
ResultGroup.create(milestone_info[:result_groups]) if milestone_info[:result_groups]
save_with_result_group(milestone_info[:milestones], 'Milestone') if milestone_info[:milestones]
# saving outcomes and associated objects
outcomes_info = data[:outcomes] || {}
ResultGroup.create(outcomes_info[:result_groups]) if outcomes_info[:result_groups]
save_outcomes(outcomes_info[:outcome_measures]) if outcomes_info[:outcome_measures]
OverallOfficial.create(data[:overall_officials]) if data[:overall_officials]
DesignOutcome.create(data[:design_outcomes]) if data[:design_outcomes]
PendingResult.create(data[:pending_results]) if data[:pending_results]
ProvidedDocument.create(data[:provided_documents]) if data[:provided_documents]
# saving reported events and associated objects
reported_events_info = data[:reported_events] || {}
ResultGroup.create(reported_events_info[:result_groups]) if reported_events_info[:result_groups]
save_with_result_group(reported_events_info[:events], 'ReportedEvent') if reported_events_info[:events]
ResponsibleParty.create(data[:responsible_party]) if data[:responsible_party]
ResultAgreement.create(data[:result_agreement]) if data[:result_agreement]
ResultContact.create(data[:result_contact]) if data[:result_contact]
Reference.create(data[:study_references]) if data[:study_references]
Sponsor.create(data[:sponsors]) if data[:sponsors]
update(saved_study_at: Time.now)
puts "~~~~~~~~~~~~~~"
puts "#{nct_id} done"
"~~~~~~~~~~~~~~"
rescue => error
puts "#{error}"
byebug
@study_build_failures ||= []
@study_build_failures << id
end
end
def self.object_counts
{
study: Study.count,
intervention: Intervention.count,
intervention_other_name: InterventionOtherName.count,
design_group: DesignGroup.count,
design_group_intervention: DesignGroupIntervention.count,
detailed_description: DetailedDescription.count,
brief_summary: BriefSummary.count,
design: Design.count,
eligibility: Eligibility.count,
participant_flow: ParticipantFlow.count,
result_groups: ResultGroup.count,
baseline_count: BaselineCount.count,
baseline_measurement: BaselineMeasurement.count,
browse_condition: BrowseCondition.count,
browse_intervention: BrowseIntervention.count,
central_contact: CentralContact.count,
condition: Condition.count,
country: Country.count,
document: Document.count,
facility: Facility.count,
facility_contact: FacilityContact.count,
facility_investigator: FacilityInvestigator.count,
id_information: IdInformation.count,
ipd_information_type: IpdInformationType.count,
keyword: Keyword.count,
link: Link.count,
milestone: Milestone.count,
outcome: Outcome.count,
outcome_count: OutcomeCount.count,
outcome_measurement: OutcomeMeasurement.count,
outcome_analysis: OutcomeAnalysis.count,
outcome_analysis_group: OutcomeAnalysisGroup.count,
overall_official: OverallOfficial.count,
design_outcome: DesignOutcome.count,
pending_result: PendingResult.count,
provided_document: ProvidedDocument.count,
reported_event: ReportedEvent.count,
responsible_party: ResponsibleParty.count,
result_agreement: ResultAgreement.count,
result_contact: ResultContact.count,
study_reference: Reference.count,
sponsor: Sponsor.count
}
end
def self.set_table_schema(schema = 'ctgov')
return unless schema == 'ctgov' || schema == 'ctgov_beta'
table_names = Util::DbManager.new.loadable_tables
table_names.each do |name|
model_name = name.singularize.camelize.safe_constantize
model_name.table_name = schema + ".#{name}" if model_name
end
end
def self.comparison
count_array = []
dif = []
set_table_schema('ctgov_beta')
beta_counts = object_counts
set_table_schema('ctgov')
reg_counts = object_counts
beta_counts.each do |model_name, object_count|
count_hash = { beta: object_count, reg: reg_counts[:"#{model_name}"]}
dif.push({ "#{model_name}": count_hash }) if object_count != reg_counts[:"#{model_name}"]
count_array.push({ "#{model_name}": count_hash })
end
count_array.push({inconsistencies: dif})
end
def self.check_results
set_table_schema('ctgov_beta')
beta_outcomes = ResultGroup.where(result_type: 'Outcome').count
beta_baseline = ResultGroup.where(result_type: 'Baseline').count
beta_participant = ResultGroup.where(result_type: 'Participant Flow').count
beta_reported = ResultGroup.where(result_type: 'Reported Event').count
set_table_schema('ctgov')
reg_outcomes = ResultGroup.where(result_type: 'Outcome').count
reg_baseline = ResultGroup.where(result_type: 'Baseline').count
reg_participant = ResultGroup.where(result_type: 'Participant Flow').count
reg_reported = ResultGroup.where(result_type: 'Reported Event').count
{
beta_outcome: beta_outcomes,
reg_outcomes: reg_outcomes,
beta_baseline: beta_baseline,
reg_baseline: reg_baseline,
beta_participant: beta_participant,
reg_participant: reg_participant,
beta_reported: beta_reported,
reg_reported: reg_reported
}
end
def self.new_check
set_table_schema('ctgov_beta')
nct = %w[
NCT04316403
]
# Outcome
# Baseline
# Participant Flow
# Reported Event
{:inconsistencies=>[ {:design_group_intervention=>{:beta=>4906, :reg=>4911}},
{:design=>{:beta=>1551, :reg=>1562}},
{:eligibility=>{:beta=>1555, :reg=>1562}},
{:result_groups=>{:beta=>925, :reg=>2561}},
{:country=>{:beta=>44121, :reg=>3830}},
{:design_outcome=>{:beta=>12642, :reg=>13230}},
{:reported_event=>{:beta=>13544, :reg=>14119}}]}
x_nct = %w[
NCT04292080
NCT04050527
NCT00530010
NCT04144088
NCT04053270
NCT03897712
NCT03845673
NCT04245423
NCT03519243
NCT03034044
NCT03496987
NCT04204200
NCT04182217
NCT04167644
NCT04214080
NCT02982187
NCT04027218
NCT03811093
NCT04109703
NCT03763058
NCT00489281
NCT04076787
NCT00725621
NCT02222493
NCT04014062
]
StudyJsonRecord.where(nct_id: nct).each{ |i| puts i.interventions_data }
# StudyJsonRecord.all.order(:id).each{ |i| puts i.study_data }
# StudyJsonRecord.where(nct_id: nct).each{ |i| puts i.data_collection }
# StudyJsonRecord.all.order(:id).each{ |i| puts i.data_collection }
# record = StudyJsonRecord.find_by(nct_id: 'NCT04072432')
[]
end
def save_interventions(interventions)
return unless interventions
interventions.each do |intervention_info|
info = intervention_info[:intervention]
intervention = Intervention.create(info)
intervention_other_names = intervention_info[:intervention_other_names]
next unless intervention_other_names
intervention_other_names.each do |name_info|
name_info[:intervention_id] = intervention.id
InterventionOtherName.create(name_info)
end
end
end
def save_design_groups(design_groups)
return unless design_groups
design_groups.each do |group|
design_info = group[:design_group]
design_group = DesignGroup.find_by(nct_id: nct_id, title: design_info[:title])
design_group ||= DesignGroup.create(nct_id: nct_id, title: design_info[:title])
design_group.update(design_info)
interventions = group[:design_group_interventions]
next unless interventions
interventions.each do |intervention_info|
intervention = Intervention.find_by(
nct_id: nct_id,
name: intervention_info[:name],
intervention_type: intervention_info[:type]
)
unless intervention
hash = {
nct_id: nct_id,
name: intervention_info[:name],
intervention_type: intervention_info[:type]
}
puts hash
byebug
end
next unless intervention
DesignGroupIntervention.create(
nct_id: nct_id,
design_group_id: design_group.id,
intervention_id: intervention.id
)
end
end
end
def save_baseline_counts(counts)
return unless counts
counts.each do |count|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: count[:ctgov_beta_group_code])
next unless result_group
count[:result_group_id] = result_group.id
BaselineCount.create(count)
end
end
def save_with_result_group(group, model_name='BaselineMeasurement')
return unless group
group.each do |item|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: item[:ctgov_beta_group_code])
next unless result_group
item[:result_group_id] = result_group.id
model_name.safe_constantize.create(item)
end
end
def save_facilities(facilities)
return unless facilities
facilities.each do |facility_info|
facility = Facility.create(facility_info[:facility]) if facility_info[:facility]
next unless facility
facility_info[:facility_contacts].each{|h| h[:facility_id] = facility.id}
facility_info[:facility_investigators].each{|h| h[:facility_id] = facility.id}
FacilityContact.create(facility_info[:facility_contacts]) if facility_info[:facility_contacts]
FacilityInvestigator.create(facility_info[:facility_investigators]) if facility_info[:facility_investigators]
end
end
def save_outcomes(outcome_measures)
return unless outcome_measures
outcome_measures.each do |outcome_measure|
puts outcome_measure
outcome = Outcome.create(outcome_measure[:outcome_measure]) if outcome_measure[:outcome_measure]
next unless outcome
outcome_counts = StudyJsonRecord.set_key_value(outcome_measure[:outcome_counts], :outcome_id, outcome.id)
outcome_measurements = StudyJsonRecord.set_key_value(outcome_measure[:outcome_measurements], :outcome_id, outcome.id)
save_with_result_group(outcome_counts, 'OutcomeCount') if outcome_counts
save_with_result_group(outcome_measurements, 'OutcomeMeasurement') if outcome_measurements
outcome_analyses = outcome_measure[:outcome_analyses] || []
outcome_analyses.map{ |h| h[:outcome_analysis][:outcome_id] = outcome.id } unless outcome_analyses.empty?
outcome_analyses.each do |analysis_info|
outcome_analysis = OutcomeAnalysis.create(analysis_info[:outcome_analysis])
outcome_analysis_group_ids = analysis_info[:outcome_analysis_group_ids] || []
outcome_analysis_group_ids.each do |group_id|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: group_id)
next unless result_group && outcome_analysis
OutcomeAnalysisGroup.create(
nct_id: nct_id,
outcome_analysis_id: outcome_analysis.id,
result_group_id: result_group.id,
ctgov_beta_group_code: group_id
)
end
end
end
end
def self.set_key_value(hash_array, key, value)
return unless hash_array
hash_array.map{ |h| h[key] = value }
hash_array
end
end
still trying to figure out why the counts don't match
require 'open-uri'
require 'fileutils'
# require 'zip'
# run incremental load with: bundle exec rake db:beta_load[1,incremental]
# run full load with: bundle exec rake db:beta_loadload[1,full]
include ActionView::Helpers::DateHelper
class StudyJsonRecord < ActiveRecord::Base
self.table_name = 'ctgov_beta.study_json_records'
def self.db_mgr
@db_mgr ||= Util::DbManager.new({search_path: 'ctgov_beta'})
end
def self.updater(params={})
@updater ||= Util::Updater.new(params)
end
def self.run(params={})
start_time = Time.current
set_table_schema('ctgov_beta')
@broken_batch = {}
@study_build_failures = []
@full_featured = params[:full_featured] || false
@params = params
@type = params[:event_type] ? params[:event_type] : 'incremental'
@days_back = (params[:days_back] ? params[:days_back] : 2)
puts "now running #{@type}, #{@days_back} days back"
begin
@type == 'full' ? full : incremental
rescue => error
msg="#{error.message} (#{error.class} #{error.backtrace}"
puts"#{@type} load failed in run: #{msg}"
end
puts "broken----- #{@broken_batch}" if @type == 'incremental'
puts "about to rerun batches"
sleep 5
rerun_batches(@broken_batch)
puts "broken----- #{@broken_batch}" if @type == 'incremental'
puts "failed to build #{@study_build_failures.uniq}"
set_table_schema('ctgov')
puts comparison
puts "finshed in #{time_ago_in_words(start_time)}"
puts "total number we have #{StudyJsonRecord.count}"
end
def self.root_dir
"#{Rails.public_path}/static"
end
def self.json_file_directory
FileUtils.mkdir_p "#{root_dir}/json_downloads"
"#{root_dir}/json_downloads"
end
def self.download_all_studies(url='https://ClinicalTrials.gov/AllAPIJSON.zip')
tries ||= 5
file_name="#{json_file_directory}/#{Time.zone.now.strftime("%Y%m%d-%H")}.zip"
file = File.new file_name, 'w'
begin
if tries < 5
`wget -c #{url} -O #{file.path}`
else
`wget #{url} -O #{file.path}`
end
rescue Errno::ECONNRESET => e
if (tries -=1) > 0
retry
end
end
file.binmode
file.size
file
end
def self.full
start_time = Time.current
study_download = download_all_studies
# finshed in about 12 hours
# total number we have 326614
# finshed in about 1 hour
# total number we should have 3131
# total number we have 1578
nct_ids = StudyJsonRecord.all.map(&:nct_id)
clear_out_data_for(nct_ids)
Zip::File.open(study_download.path) do |unzipped_folders|
original_count = unzipped_folders.size
count_down = original_count
unzipped_folders.each do |file|
begin
contents = file.get_input_stream.read
json = JSON.parse(contents)
rescue
next unless json
end
study = json['FullStudy']
next unless study
save_single_study(study)
nct_id = study['Study']['ProtocolSection']['IdentificationModule']['NCTId']
puts "added NCTId #{nct_id} study_json_record: #{count_down} of #{original_count}"
count_down -= 1
end
end
seconds = Time.now - start_time
end
def self.incremental
first_batch = json_data
# total_number is the number of studies available, meaning the total number in their database
total_number = first_batch['FullStudiesResponse']['NStudiesFound']
limit = (total_number/100.0).ceil
puts "batch 1 of #{limit}"
sleep 5
save_study_records(first_batch['FullStudiesResponse']['FullStudies'])
# since I already saved the first hundred studies I start the loop after that point
# studies must be retrieved in batches of 99,
# using min and max to determine the study to start with and the study to end with respectively (in that batch)
min = 101
max = 200
for x in 1..limit
puts "batch #{x + 1} of #{limit}"
sleep 5
fetch_studies(min, max)
min += 100
max += 100
end
end
def self.fetch_studies(min=1, max=100)
begin
retries ||= 0
puts "try ##{ retries }"
# "https://clinicaltrials.gov/api/query/full_studies?expr=AREA[LastUpdatePostDate]RANGE[01/01/2020,%20MAX]&fmt=json"
url = "https://clinicaltrials.gov/api/query/full_studies?expr=#{time_range}&min_rnk=#{min}&max_rnk=#{max}&fmt=json"
data = json_data(url) || {}
data = data.dig('FullStudiesResponse', 'FullStudies')
save_study_records(data) if data
rescue
retry if (retries += 1) < 6
if retries >= 6
@broken_batch ||= {}
@broken_batch[url] = { min: min, max: max }
end
end
end
def self.rerun_batches(url_hash)
set_table_schema('ctgov_beta')
url_hash.each do |url, min_max|
puts "running #{url}"
fetch_studies(min_max[:min], min_max[:max])
end
end
def self.save_study_records(study_batch)
return unless study_batch
nct_id_array = study_batch.map{|study_data| study_data['Study']['ProtocolSection']['IdentificationModule']['NCTId'] }
clear_out_data_for(nct_id_array)
study_batch.each do |study_data|
save_single_study(study_data)
end
end
def self.save_single_study(study_data)
nct_id = study_data['Study']['ProtocolSection']['IdentificationModule']['NCTId']
record = StudyJsonRecord.find_by(nct_id: nct_id) || StudyJsonRecord.new(nct_id: nct_id)
record.content = study_data
record.saved_study_at = nil
record.download_date = Time.current
if record.save
record.build_study
else
byebug
end
end
def self.clear_out_data_for(nct_ids)
return if nct_ids.nil? || nct_ids.empty?
db_mgr.remove_indexes_and_constraints # Index significantly slow the load process.
db_mgr.clear_out_data_for(nct_ids)
delete_json_records(nct_ids)
db_mgr.add_indexes_and_constraints
end
def self.delete_json_records(nct_ids)
return if nct_ids.nil? || nct_ids.empty?
ids = nct_ids.map { |i| "'" + i.to_s + "'" }.join(",")
ActiveRecord::Base.connection.execute("DELETE FROM #{self.table_name} WHERE nct_id IN (#{ids})")
end
def self.json_data(url="https://clinicaltrials.gov/api/query/full_studies?expr=#{time_range}&min_rnk=1&max_rnk=100&fmt=json")
puts url
page = open(url)
JSON.parse(page.read)
end
def self.time_range
return nil if @type == 'full'
return nil unless @days_back != 'nil'
date = (Date.current - @days_back.to_i).strftime('%m/%d/%Y')
"AREA[LastUpdatePostDate]RANGE[#{date},%20MAX]"
end
def key_check(key)
key ||= {}
end
def get_boolean(val)
return nil unless val
return true if val.downcase=='yes'||val.downcase=='y'||val.downcase=='true'
return false if val.downcase=='no'||val.downcase=='n'||val.downcase=='false'
end
def get_date(str)
Date.parse(str) if str
end
def convert_date(str)
return nil unless str
return str.to_date.end_of_month if is_missing_the_day?(str)
get_date(str)
end
def is_missing_the_day?(str)
# use this method on string representations of dates. If only one space in the string, then the day is not provided.
(str.count ' ') == 1
end
def protocol_section
key_check(content['Study']['ProtocolSection'])
end
def results_section
key_check(content['Study']['ResultsSection'])
end
def derived_section
key_check(content['Study']['DerivedSection'])
end
def annotation_section
key_check(content['Study']['AnnotationSection'])
end
def document_section
key_check(content['Study']['DocumentSection'])
end
def contacts_location_module
key_check(protocol_section['ContactsLocationsModule'])
end
def locations_array
locations_list = key_check(contacts_location_module['LocationList'])
locations_list['Location'] || []
end
def study_data
protocol = protocol_section
return nil if protocol.empty?
status = protocol['StatusModule']
ident = protocol['IdentificationModule']
design = key_check(protocol['DesignModule'])
oversight = key_check(protocol['OversightModule'])
ipd_sharing = key_check(protocol['IPDSharingStatementModule'])
study_posted = status['StudyFirstPostDateStruct']
results_posted = key_check(status['ResultsFirstPostDateStruct'])
disp_posted = key_check(status['DispFirstPostDateStruct'])
last_posted = status['LastUpdatePostDateStruct']
start_date = key_check(status['StartDateStruct'])
completion_date = key_check(status['CompletionDateStruct'])
primary_completion_date = key_check(status['PrimaryCompletionDateStruct'])
results = key_check(content['Study']['ResultsSection'])
baseline = key_check(results['BaselineCharacteristicsModule'])
enrollment = key_check(design['EnrollmentInfo'])
expanded_access = status.dig('ExpandedAccessInfo', 'HasExpandedAccess')
expanded = key_check(design['ExpandedAccessTypes'])
biospec = key_check(design['BioSpec'])
arms_intervention = key_check(protocol['ArmsInterventionsModule'])
arms_group_list = key_check(arms_intervention['ArmGroupList'])
arms_groups = arms_group_list['ArmGroup'] || []
{
nct_id: nct_id,
nlm_download_date_description: download_date,
study_first_submitted_date: get_date(status['StudyFirstSubmitDate']),
results_first_submitted_date: get_date(status['ResultsFirstSubmitDate']),
disposition_first_submitted_date: get_date(status['DispFirstSubmitDate']),
last_update_submitted_date: get_date(status['LastUpdateSubmitDate']),
study_first_submitted_qc_date: status['StudyFirstSubmitQCDate'],
study_first_posted_date: study_posted['StudyFirstPostDate'],
study_first_posted_date_type: study_posted['StudyFirstPostDateType'],
results_first_submitted_qc_date: status['ResultsFirstSubmitQCDate'],
results_first_posted_date: results_posted['ResultsFirstPostDate'],
results_first_posted_date_type: results_posted['ResultsFirstPostDateType'],
disposition_first_submitted_qc_date: status['DispFirstSubmitQCDate'],
disposition_first_posted_date: disp_posted['DispFirstPostDate'],
disposition_first_posted_date_type: disp_posted['DispFirstPostDateType'],
last_update_submitted_qc_date: status['LastUpdateSubmitDate'],
last_update_posted_date: last_posted['LastUpdatePostDate'],
last_update_posted_date_type: last_posted['LastUpdatePostDateType'],
start_month_year: start_date['StartDate'],
start_date_type: start_date['StartDateType'],
start_date: convert_date(start_date['StartDate']),
verification_month_year: status['StatusVerifiedDate'],
verification_date: convert_date(status['StatusVerifiedDate']),
completion_month_year: completion_date['CompletionDate'],
completion_date_type: completion_date['CompletionDateType'],
completion_date: convert_date(completion_date['CompletionDate']),
primary_completion_month_year: primary_completion_date['PrimaryCompletionDate'],
primary_completion_date_type: primary_completion_date['PrimaryCompletionDateType'],
primary_completion_date: convert_date(primary_completion_date['PrimaryCompletionDate']),
target_duration: design['TargetDuration'],
study_type: design['StudyType'],
acronym: ident['Acronym'],
baseline_population: baseline['BaselinePopulationDescription'],
brief_title: ident['BriefTitle'],
official_title: ident['OfficialTitle'],
overall_status: status['OverallStatus'],
last_known_status: status['LastKnownStatus'],
phase: key_check(design['PhaseList'])['Phase'],
enrollment: enrollment['EnrollmentCount'],
enrollment_type: enrollment['EnrollmentType'],
source: ident.dig('Organization', 'OrgFullName'),
limitations_and_caveats: key_check(results['MoreInfoModule'])['LimitationsAndCaveats'],
number_of_arms: arms_groups.count,
number_of_groups: arms_groups.count,
why_stopped: status['WhyStopped'],
has_expanded_access: get_boolean(expanded_access),
expanded_access_type_individual: get_boolean(expanded['ExpAccTypeIndividual']),
expanded_access_type_intermediate: get_boolean(expanded['ExpAccTypeIntermediate']),
expanded_access_type_treatment: get_boolean(expanded['ExpAccTypeTreatment']),
has_dmc: get_boolean(oversight['OversightHasDMC']),
is_fda_regulated_drug: get_boolean(oversight['IsFDARegulatedDrug']),
is_fda_regulated_device: get_boolean(oversight['IsFDARegulatedDevice']),
is_unapproved_device: get_boolean(oversight['IsUnapprovedDevice']),
is_ppsd: get_boolean(oversight['IsPPSD']),
is_us_export: get_boolean(oversight['IsUSExport']),
biospec_retention: biospec['BioSpecRetention'],
biospec_description: biospec['BioSpecDescription'],
ipd_time_frame: ipd_sharing['IPDSharingTimeFrame'],
ipd_access_criteria: ipd_sharing['IPDSharingAccessCriteria'],
ipd_url: ipd_sharing['IPDSharingURL'],
plan_to_share_ipd: ipd_sharing['IPDSharing'],
plan_to_share_ipd_description: ipd_sharing['IPDSharingDescription']
}
end
def design_groups_data
arms_intervention = key_check(protocol_section['ArmsInterventionsModule'])
arms_group_list = key_check(arms_intervention['ArmGroupList'])
arms_groups = arms_group_list['ArmGroup'] || []
collection = []
return nil if arms_groups.empty?
arms_groups.each do |group|
collection.push(
design_group: {
nct_id: nct_id,
group_type: group['ArmGroupType'],
title: group['ArmGroupLabel'],
description: group['ArmGroupDescription']
},
design_group_interventions: design_group_interventions_data(group)
)
end
collection
end
def design_group_interventions_data(arms_group)
collection = []
intervention_list = key_check(arms_group['ArmGroupInterventionList'])
intervention_names = intervention_list['ArmGroupInterventionName'] || []
return nil if intervention_names.empty?
intervention_names.each do |name|
# I collect the info I need to do queries later so I can create or find the links
# between design groups and interventions in the database
# Foo.where("bar LIKE ?", "%#{query}%")
divide = name.split(': ')
intervention_type = divide[0]
if divide.count < 2
byebug
end
divide.shift
intervention_name = divide.join(': ')
collection.push(
nct_id: nct_id,
name: intervention_name,
type: intervention_type,
design_group: arms_group['ArmGroupLabel']
)
end
collection
end
def interventions_data
arms_intervention = key_check(protocol_section['ArmsInterventionsModule'])
intervention_list = key_check(arms_intervention['InterventionList'])
interventions = intervention_list['Intervention'] || []
collection = []
return nil if interventions.empty?
interventions.each do |intervention|
collection.push(
intervention: {
nct_id: nct_id,
intervention_type: intervention['InterventionType'],
name: intervention['InterventionName'],
description: intervention['InterventionDescription']
},
intervention_other_names: intervention_other_names_data(intervention)
)
end
collection
end
def intervention_other_names_data(intervention)
other_name_list = key_check(intervention['InterventionOtherNameList'])
collection = []
other_names = other_name_list['InterventionOtherName'] || []
return nil if other_names.empty?
other_names.each do |name|
collection.push(nct_id: nct_id, intervention_id: nil, name: name)
end
collection
end
def detailed_description_data
protocol = protocol_section
description = key_check(protocol['DescriptionModule'])['DetailedDescription']
return nil unless description
{ nct_id: nct_id, description: description }
end
def brief_summary_data
protocol = protocol_section
description = key_check(protocol['DescriptionModule'])['BriefSummary']
return nil unless description
{ nct_id: nct_id, description: description }
end
def self.make_list(array)
array.join(', ')
end
def designs_data
protocol = protocol_section
design = key_check(protocol['DesignModule'])
info = key_check(design['DesignInfo'])
masking = key_check(info['DesignMaskingInfo'])
masked_list = key_check(masking['DesignWhoMaskedList'])
who_masked = masked_list['DesignWhoMasked'] || []
observation_list = key_check(info['DesignObservationalModelList'])
observations = observation_list['DesignObservationalModel'] || []
time_perspective_list = key_check(info['DesignTimePerspectiveList'])
time_perspectives = time_perspective_list['DesignTimePerspective'] || []
return nil if info.empty?
{
nct_id: nct_id,
allocation: info['DesignAllocation'],
observational_model: StudyJsonRecord.make_list(observations),
intervention_model: info['DesignInterventionModel'],
intervention_model_description: info['DesignInterventionModelDescription'],
primary_purpose: info['DesignPrimaryPurpose'],
time_perspective: StudyJsonRecord.make_list(time_perspectives),
masking: masking['DesignMasking'],
masking_description: masking['DesignMaskingDescription'],
subject_masked: is_masked?(who_masked, ['Subject','Participant']),
caregiver_masked: is_masked?(who_masked, ['Caregiver','Care Provider']),
investigator_masked: is_masked?(who_masked, ['Investigator']),
outcomes_assessor_masked: is_masked?(who_masked, ['Outcomes Assessor']),
}
end
def is_masked?(who_masked_array, query_array)
# example who_masked array ["Participant", "Care Provider", "Investigator", "Outcomes Assessor"]
query_array.each do |term|
return true if who_masked_array.try(:include?, term)
end
nil
end
def eligibility_data
protocol = protocol_section
eligibility = key_check(protocol['EligibilityModule'])
return nil if eligibility.empty?
{
nct_id: nct_id,
sampling_method: eligibility['SamplingMethod'],
population: eligibility['StudyPopulation'],
maximum_age: eligibility['MaximumAge'] || 'N/A',
minimum_age: eligibility['MinimumAge'] || 'N/A',
gender: eligibility['Gender'],
gender_based: get_boolean(eligibility['GenderBased']),
gender_description: eligibility['GenderDescription'],
healthy_volunteers: eligibility['HealthyVolunteers'],
criteria: eligibility['EligibilityCriteria']
}
end
def participant_flow_data
results = key_check(results_section)
participant_flow = key_check(results['ParticipantFlowModule'])
return nil if participant_flow.empty?
{
nct_id: nct_id,
recruitment_details: participant_flow['FlowRecruitmentDetails'],
pre_assignment_details: participant_flow['FlowPreAssignmentDetails'],
}
end
def baseline_measurements_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
return nil if baseline_characteristics_module.empty?
baseline_measure_list = key_check(baseline_characteristics_module['BaselineMeasureList'])
baseline_measures = baseline_measure_list['BaselineMeasure'] || []
collection = {result_groups: baseline_result_groups_data, baseline_counts: baseline_counts_data, measurements: []}
return if baseline_measures.empty?
baseline_measures.each do |measure|
baseline_class_list = key_check(measure['BaselineClassList'])
baseline_classes = baseline_class_list['BaselineClass'] || []
baseline_classes.each do |baseline_class|
baseline_category_list = key_check(baseline_class['BaselineCategoryList'])
baseline_categories = baseline_category_list['BaselineCategory'] || []
baseline_categories.each do |baseline_category|
measurement_list = key_check(baseline_category['BaselineMeasurementList'])
measurements = measurement_list['BaselineMeasurement'] || []
measurements.each do |measurement|
param_value = measurement['BaselineMeasurementValue']
dispersion_value = measurement['BaselineMeasurementSpread']
collection[:measurements].push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: measurement['BaselineMeasurementGroupId'],
classification: baseline_class['BaselineClassTitle'],
category: baseline_category['BaselineCategoryTitle'],
title: measure['BaselineMeasureTitle'],
description: measure['BaselineMeasureDescription'],
units: measure['BaselineMeasureUnitOfMeasure'],
param_type: measure['BaselineMeasureParamType'],
param_value: param_value,
param_value_num: StudyJsonRecord.float(param_value),
dispersion_type: measure['BaselineMeasureDispersionType'],
dispersion_value: dispersion_value,
dispersion_value_num: StudyJsonRecord.float(dispersion_value),
dispersion_lower_limit: StudyJsonRecord.float(measurement['BaselineMeasurementLowerLimit']),
dispersion_upper_limit: StudyJsonRecord.float(measurement['BaselineMeasurementUpperLimit']),
explanation_of_na: measurement['BaselineMeasurementComment']
)
end
end
end
end
collection
end
def self.float(string)
Float(string) rescue nil
end
def baseline_result_groups_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
baseline_group_list = key_check(baseline_characteristics_module['BaselineGroupList'])
baseline_group = baseline_group_list['BaselineGroup'] || []
StudyJsonRecord.result_groups(baseline_group, 'Baseline', 'Baseline', nct_id)
end
def baseline_counts_data
results = results_section
baseline_characteristics_module = key_check(results['BaselineCharacteristicsModule'])
baseline_denom_list = key_check(baseline_characteristics_module['BaselineDenomList'])
baseline_denoms = key_check(baseline_denom_list['BaselineDenom'])
collection = []
return nil if baseline_denoms.empty?
baseline_denoms.each do |denom|
baseline_denom_count_list = denom['BaselineDenomCountList']
baseline_denom_count = baseline_denom_count_list['BaselineDenomCount'] || []
baseline_denom_count.each do |count|
collection.push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: count['BaselineDenomCountGroupId'],
units: denom['BaselineDenomUnits'],
scope: 'overall',
count: count['BaselineDenomCountValue']
)
end
end
collection
end
def browse_conditions_data
browse('Condition')
end
def browse_interventions_data
browse('Intervention')
end
def browse(type='Condition')
derived = derived_section
browse_module = key_check(derived["#{type}BrowseModule"])
mesh_list = key_check(browse_module["#{type}MeshList"])
meshes = mesh_list["#{type}Mesh"] || []
collection = []
return nil if meshes.empty?
meshes.each do |mesh|
collection.push(
nct_id: nct_id, mesh_term: mesh["#{type}MeshTerm"], downcase_mesh_term: mesh["#{type}MeshTerm"].try(:downcase)
)
end
collection
end
def central_contacts_data
central_contacts_list = key_check(contacts_location_module['CentralContactList'])
central_contacts = central_contacts_list['CentralContact'] || []
collection = []
return nil if central_contacts.empty?
central_contacts.each_with_index do |contact, index|
collection.push(
nct_id: nct_id,
contact_type: index == 0 ? 'primary' : 'backup',
name: contact['CentralContactName'],
phone: contact['CentralContactPhone'],
email: contact['CentralContactEMail']
)
end
collection
end
def conditions_data
conditions_module = key_check(protocol_section['ConditionsModule'])
conditions_list = key_check(conditions_module['ConditionList'])
conditions = conditions_list['Condition'] || []
collection = []
return nil if conditions.empty?
conditions.each do |condition|
collection.push(nct_id: nct_id, name: condition, downcase_name: condition.try(:downcase))
end
collection
end
def countries_data
misc_module = key_check(derived_section['MiscInfoModule'])
removed_country_list = key_check(misc_module['RemovedCountryList'])
removed_countries = removed_country_list['RemovedCountry'] || []
collection = []
return nil unless !locations_array.empty? || !removed_countries.empty?
locations_array.each do |location|
collection.push(nct_id: nct_id, name: location['LocationCountry'], removed: false)
end
removed_countries.each do |country|
collection.push(nct_id: nct_id, name: country, removed: true)
end
collection
end
def documents_data
reference_module = key_check(protocol_section['ReferencesModule'])
avail_ipd_list = key_check(reference_module['AvailIPDList'])
avail_ipds = avail_ipd_list['AvailIPD'] || []
collection = []
return nil if avail_ipds.empty?
avail_ipds.each do |item|
collection.push(
nct_id: nct_id,
document_id: item['AvailIPDId'],
document_type: item['AvailIPDType'],
url: item['AvailIPDURL'],
comment: item['AvailIPDComment']
)
end
collection
end
def facilities_data
collection = []
return nil if locations_array.empty?
locations_array.each do |location|
location_contact_list = key_check(location['LocationContactList'])
location_contact = location_contact_list['LocationContact'] || []
facility_contacts = []
facility_investigators = []
location_contact.each_with_index do |contact, index|
contact_role = contact['LocationContactRole']
if contact_role =~ /Investigator|Study Chair/i
facility_investigators.push(
nct_id: nct_id,
facility_id: nil,
role: contact_role,
name: contact['LocationContactName']
)
else
facility_contacts.push(
nct_id: nct_id,
facility_id: nil,
contact_type: index == 0 ? 'primary' : 'backup',
name: contact['LocationContactName'],
email: contact['LocationContactEMail'],
phone: contact['LocationContactPhone']
)
end
end
collection.push(
facility: {
nct_id: nct_id,
status: location['LocationStatus'],
name: location['LocationFacility'],
city: location['LocationCity'],
state: location['LocationState'],
zip: location['LocationZip'],
country: location['LocationCountry']
},
facility_contacts: facility_contacts,
facility_investigators: facility_investigators
)
end
collection
end
def id_information_data
identification_module = key_check(protocol_section['IdentificationModule'])
alias_list = key_check(identification_module['NCTIdAliasList'])
nct_id_alias = alias_list['NCTIdAlias'] || []
org_study_info = key_check(identification_module['OrgStudyIdInfo'])
secondary_info_list = key_check(identification_module['SecondaryIdInfoList'])
secondary_info = secondary_info_list['SecondaryIdInfo'] || []
return if org_study_info.empty? && secondary_info.empty? && nct_id_alias.empty?
collection = [{nct_id: nct_id, id_type: 'org_study_id', id_value: org_study_info['OrgStudyId']}]
nct_id_alias.each do |nct_alias|
collection.push(
nct_id: nct_id, id_type: 'nct_alias', id_value: nct_alias
)
end
secondary_info.each do |info|
collection.push(
nct_id: nct_id, id_type: 'secondary_id', id_value: info['SecondaryId']
)
end
collection
end
def ipd_information_types_data
ipd_sharing_statement_module = key_check(protocol_section['IPDSharingStatementModule'])
ipd_sharing_info_type_list = key_check(ipd_sharing_statement_module['IPDSharingInfoTypeList'])
ipd_sharing_info_types = ipd_sharing_info_type_list['IPDSharingInfoType'] || []
collection = []
return nil if ipd_sharing_info_types.empty?
ipd_sharing_info_types.each do |info|
collection.push(nct_id: nct_id, name: info)
end
collection
end
def keywords_data
conditions_module = key_check(protocol_section['ConditionsModule'])
keyword_list = key_check(conditions_module['KeywordList'])
keywords = keyword_list['Keyword'] || []
collection = []
return nil if keywords.empty?
keywords.each do |keyword|
collection.push(nct_id: nct_id, name: keyword, downcase_name: keyword.downcase)
end
collection
end
def links_data
references_module = key_check(protocol_section['ReferencesModule'])
see_also_link_list = key_check(references_module['SeeAlsoLinkList'])
see_also_links = see_also_link_list['SeeAlsoLink'] || []
collection = []
return nil if see_also_links.empty?
see_also_links.each do |link|
collection.push(nct_id: nct_id, url: link['SeeAlsoLinkURL'], description: link['SeeAlsoLinkLabel'])
end
collection
end
def milestones_data
participant_flow_module = key_check(results_section['ParticipantFlowModule'])
flow_period_list = key_check(participant_flow_module['FlowPeriodList'])
flow_periods = flow_period_list['FlowPeriod'] || []
collection = {result_groups: flow_result_groups_data, milestones: []}
return nil if flow_periods.empty?
flow_periods.each do |period|
flow_period = period['FlowPeriodTitle']
flow_milestone_list = key_check(period['FlowMilestoneList'])
flow_milestones = flow_milestone_list['FlowMilestone'] || []
flow_milestones.each do |milestone|
flow_achievement_list = key_check(milestone['FlowAchievementList'])
flow_achievements = flow_achievement_list['FlowAchievement'] || []
flow_achievements.each do |achievement|
collection[:milestones].push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: achievement['FlowAchievementGroupId'],
title: milestone['FlowMilestoneType'],
period: period['FlowPeriodTitle'],
description: achievement['FlowAchievementComment'],
count: achievement['FlowAchievementNumSubjects']
)
end
end
end
return nil if collection[:milestones].empty?
collection
end
def flow_result_groups_data
participant_flow_module = key_check(results_section['ParticipantFlowModule'])
flow_group_list = key_check(participant_flow_module['FlowGroupList'])
flow_groups = flow_group_list['FlowGroup'] || []
StudyJsonRecord.result_groups(flow_groups, 'Flow', 'Participant Flow', nct_id)
end
def outcomes_data
outcomes_module = key_check(results_section['OutcomeMeasuresModule'])
outcome_measure_list = key_check(outcomes_module['OutcomeMeasureList'])
outcome_measures = outcome_measure_list['OutcomeMeasure'] || []
collection = {result_groups: outcome_result_groups_data, outcome_measures: []}
return nil if outcome_measures.empty?
outcome_measures.each do |outcome_measure|
collection[:outcome_measures].push(
outcome_measure: {
nct_id: nct_id,
outcome_type: outcome_measure['OutcomeMeasureType'],
title: outcome_measure['OutcomeMeasureTitle'],
description: outcome_measure['OutcomeMeasureDescription'],
time_frame: outcome_measure['OutcomeMeasureTimeFrame'],
population: outcome_measure['OutcomeMeasurePopulationDescription'],
anticipated_posting_date: convert_date(outcome_measure['OutcomeMeasureAnticipatedPostingDate']),
anticipated_posting_month_year: outcome_measure['OutcomeMeasureAnticipatedPostingDate'],
units: outcome_measure['OutcomeMeasureUnitOfMeasure'],
units_analyzed: outcome_measure['OutcomeMeasureTypeUnitsAnalyzed'],
dispersion_type: outcome_measure['OutcomeMeasureDispersionType'],
param_type: outcome_measure['OutcomeMeasureParamType']
},
outcome_counts: outcome_counts_data(outcome_measure),
outcome_measurements: outcome_measurements_data(outcome_measure),
outcome_analyses: outcome_analyses_data(outcome_measure)
)
end
collection
end
def outcome_result_groups_data
outcomes_module = key_check(results_section['OutcomeMeasuresModule'])
outcome_measure_list = key_check(outcomes_module['OutcomeMeasureList'])
outcome_measures = outcome_measure_list['OutcomeMeasure'] || []
collection = []
outcome_measures.each do |measure|
outcome_group_list = key_check(measure['OutcomeGroupList'])
outcome_groups = outcome_group_list['OutcomeGroup'] || []
collection.push(
StudyJsonRecord.result_groups(outcome_groups, 'Outcome', 'Outcome', nct_id)
)
end
collection.flatten.uniq
end
def self.result_groups(groups, key_name='Flow', type='Participant Flow', nct_id)
collection = []
return nil if groups.nil? || groups.empty?
groups.each do |group|
collection.push(
nct_id: nct_id,
ctgov_beta_group_code: group["#{key_name}GroupId"],
result_type: type,
title: group["#{key_name}GroupTitle"],
description: group["#{key_name}GroupDescription"]
)
end
collection
end
def outcome_counts_data(outcome_measure)
outcome_denom_list = key_check(outcome_measure['OutcomeDenomList'])
outcome_denoms = outcome_denom_list['OutcomeDenom'] || []
collection = []
return nil if outcome_denoms.empty?
outcome_denoms.each do |denom|
outcome_denom_count_list = key_check(denom['OutcomeDenomCountList'])
outcome_denom_count = outcome_denom_count_list['OutcomeDenomCount'] || []
outcome_denom_count.each do |denom_count|
collection.push(
nct_id: nct_id,
outcome_id: nil,
result_group_id: nil,
ctgov_beta_group_code: denom_count['OutcomeDenomCountGroupId'],
scope: 'Measure',
units: denom['OutcomeDenomUnits'],
count: denom_count['OutcomeDenomCountValue']
)
end
end
collection
end
def outcome_measurements_data(outcome_measure)
outcome_class_list = key_check(outcome_measure['OutcomeClassList'])
outcome_classes = outcome_class_list['OutcomeClass'] || []
collection = []
return nil if outcome_classes.empty?
outcome_classes.each do |outcome_class|
outcome_category_list = key_check(outcome_class['OutcomeCategoryList'])
outcome_categories = outcome_category_list['OutcomeCategory'] || []
outcome_categories.each do |category|
outcome_measurement_list = key_check(category['OutcomeMeasurementList'])
measurements = outcome_measurement_list['OutcomeMeasurement'] || []
measurements.each do |measure|
collection.push(
nct_id: nct_id,
outcome_id: nil,
result_group_id: nil,
ctgov_beta_group_code: measure['OutcomeMeasurementGroupId'],
classification: outcome_class['OutcomeClassTitle'],
category: category['OutcomeCategoryTitle'],
title: outcome_measure['OutcomeMeasureTitle'],
description: outcome_measure['OutcomeMeasureDescription'],
units: outcome_measure['OutcomeMeasureUnitOfMeasure'],
param_type: outcome_measure['OutcomeMeasureParamType'],
param_value: measure['OutcomeMeasurementValue'],
param_value_num: StudyJsonRecord.float(measure['OutcomeMeasurementValue']),
dispersion_type: outcome_measure['OutcomeMeasureDispersionType'],
dispersion_value: measure['OutcomeMeasurementSpread'],
dispersion_value_num: StudyJsonRecord.float(measure['OutcomeMeasurementSpread']),
dispersion_lower_limit: StudyJsonRecord.float(measure['OutcomeMeasurementLowerLimit']),
dispersion_upper_limit: StudyJsonRecord.float(measure['OutcomeMeasurementUpperLimit']),
explanation_of_na: measure['OutcomeMeasurementComment']
)
end
end
end
collection
end
def outcome_analyses_data(outcome_measure)
outcome_analysis_list = key_check(outcome_measure['OutcomeAnalysisList'])
outcome_analyses = outcome_analysis_list['OutcomeAnalysis'] || []
collection = []
return nil if outcome_analyses.empty?
outcome_analyses.each do |analysis|
raw_value = analysis['OutcomeAnalysisPValue'] || ''
collection.push(
outcome_analysis: {
nct_id: nct_id,
outcome_id: nil,
non_inferiority_type: analysis['OutcomeAnalysisNonInferiorityType'],
non_inferiority_description: analysis['OutcomeAnalysisNonInferiorityComment'],
param_type: analysis['OutcomeAnalysisParamType'],
param_value: analysis['OutcomeAnalysisParamValue'],
dispersion_type: analysis['OutcomeAnalysisDispersionType'],
dispersion_value: analysis['OutcomeAnalysisDispersionValue'],
p_value_modifier: raw_value.gsub(/\d+/, "").gsub('.','').gsub('-','').strip,
p_value: raw_value.gsub(/</, '').gsub(/>/, '').gsub(/ /, '').strip,
p_value_description: analysis['OutcomeAnalysisPValueComment'],
ci_n_sides: analysis['OutcomeAnalysisCINumSides'],
ci_percent: StudyJsonRecord.float(analysis['OutcomeAnalysisCIPctValue']),
ci_lower_limit: analysis['OutcomeAnalysisCILowerLimit'],
ci_upper_limit: analysis['OutcomeAnalysisCIUpperLimit'],
ci_upper_limit_na_comment: analysis['OutcomeAnalysisCIUpperLimitComment'],
method: analysis['OutcomeAnalysisStatisticalMethod'],
method_description: analysis['OutcomeAnalysisStatisticalComment'],
estimate_description: analysis['OutcomeAnalysisEstimateComment'],
groups_description: analysis['OutcomeAnalysisGroupDescription'],
other_analysis_description: analysis['OutcomeAnalysisOtherAnalysisDescription']
},
outcome_analysis_group_ids: outcome_analysis_groups_data(analysis)
)
end
collection
end
def outcome_analysis_groups_data(outcome_analysis)
outcome_analysis_group_id_list = key_check(outcome_analysis['OutcomeAnalysisGroupIdList'])
outcome_analysis_group_ids = outcome_analysis_group_id_list['OutcomeAnalysisGroupId'] || []
collection = []
return nil if outcome_analysis_group_ids.empty?
outcome_analysis_group_ids.each do |group_id|
collection.push(
nct_id: nct_id,
outcome_analysis_id: nil,
result_group_id: nil,
ctgov_beta_group_code: group_id
)
end
end
def overall_officials_data
overall_officials_list = key_check(contacts_location_module['OverallOfficialList'])
overall_officials = overall_officials_list['OverallOfficial'] || []
collection = []
return nil if overall_officials.empty?
overall_officials.each do |overall_official|
collection.push(
nct_id: nct_id,
role: overall_official['OverallOfficialRole'],
name: overall_official['OverallOfficialName'],
affiliation: overall_official['OverallOfficialAffiliation']
)
end
collection
end
def design_outcomes_data
primary_outcomes = outcome_list('Primary')
secondary_outcomes = outcome_list('Secondary')
other_outcomes = outcome_list('Other')
primary_outcomes ||= []
secondary_outcomes ||= []
total = primary_outcomes + secondary_outcomes
return nil if total.empty?
total
end
def outcome_list(outcome_type='Primary')
outcomes_module = key_check(protocol_section['OutcomesModule'])
outcome_list = key_check(outcomes_module["#{outcome_type}OutcomeList"])
outcomes = outcome_list["#{outcome_type}Outcome"] || []
collection = []
return nil if outcomes.empty?
outcomes.each do |outcome|
collection.push(
nct_id: nct_id,
outcome_type: outcome_type.downcase,
measure: outcome["#{outcome_type}OutcomeMeasure"],
time_frame: outcome["#{outcome_type}OutcomeTimeFrame"],
population: nil,
description: outcome["#{outcome_type}OutcomeDescription"]
)
end
collection
end
def pending_results_data
annotation_module = key_check(annotation_section['AnnotationModule'])
unposted_annotation = key_check(annotation_module['UnpostedAnnotation'])
unposted_event_list = key_check(unposted_annotation['UnpostedEventList'])
unposted_events = unposted_event_list['UnpostedEvent'] || []
collection = []
return nil if unposted_events.empty?
unposted_events.each do |event|
collection.push(
nct_id: nct_id,
event: event['UnpostedEventType'],
event_date_description: event['UnpostedEventDate'],
event_date: event['UnpostedEventDate'].try(:to_date)
)
end
collection
end
def provided_documents_data
large_document_module = key_check(document_section['LargeDocumentModule'])
large_doc_list = key_check(large_document_module['LargeDocList'])
large_docs = large_doc_list['LargeDoc'] || []
collection = []
return nil if large_docs.empty?
large_docs.each do |doc|
base_url = 'https://ClinicalTrials.gov/ProvidedDocs/'
number = "#{nct_id[-2]}#{nct_id[-1]}/#{nct_id}"
full_url = base_url + number + "/#{doc['LargeDocFilename']}" if doc['LargeDocFilename']
collection.push(
nct_id: nct_id,
document_type: doc['LargeDocLabel'],
has_protocol: get_boolean(doc['LargeDocHasProtocol']),
has_icf: get_boolean(doc['LargeDocHasICF']),
has_sap: get_boolean(doc['LargeDocHasSAP']),
document_date: doc['LargeDocDate'].try(:to_date),
url: full_url
)
end
collection
end
def reported_events_data
adverse_events_module = key_check(results_section['AdverseEventsModule'])
event_group_list = key_check(adverse_events_module['EventGroupList'])
event_groups = event_group_list['EventGroup'] || []
events = events_data('Serious') + events_data('Other')
return nil if events.empty?
{
result_groups: StudyJsonRecord.result_groups(event_groups, 'Event', 'Reported Event', nct_id),
events: events
}
end
def events_data(event_type='Serious')
adverse_events_module = key_check(results_section['AdverseEventsModule'])
event_list = key_check(adverse_events_module["#{event_type}EventList"])
events = event_list["#{event_type}Event"] || []
collection = []
events.each do |event|
event_stat_list = key_check(event["#{event_type}EventStatsList"])
event_stats = event_stat_list["#{event_type}EventStats"] || []
event_stats.each do |event_stat|
collection.push(
nct_id: nct_id,
result_group_id: nil,
ctgov_beta_group_code: event_stat["#{event_type}EventStatsGroupId"],
time_frame: adverse_events_module['EventsTimeFrame'],
event_type: event_type.downcase,
default_vocab: event["#{event_type}EventSourceVocabulary"],
default_assessment: event["#{event_type}EventAssessmentType"],
subjects_affected: event_stat["#{event_type}EventStatsNumAffected"],
subjects_at_risk: event_stat["#{event_type}EventStatsNumAtRisk"],
description: adverse_events_module['EventsDescription'],
event_count: event_stat["#{event_type}EventStatsNumEvents"],
organ_system: event["#{event_type}EventOrganSystem"],
adverse_event_term: event["#{event_type}EventTerm"],
frequency_threshold: adverse_events_module['EventsFrequencyThreshold'],
vocab: nil,
assessment: event["#{event_type}EventAssessmentType"]
)
end
end
collection
end
def responsible_party_data
# https://clinicaltrials.gov/api/query/full_studies?expr=NCT04053270&fmt=json
# https://clinicaltrials.gov/api/query/full_studies?expr=NCT04076787&fmt=json
sponsor_collaborators_module = key_check(protocol_section['SponsorCollaboratorsModule'])
responsible_party = key_check(sponsor_collaborators_module['ResponsibleParty'])
return nil if responsible_party.empty?
{
nct_id: nct_id,
responsible_party_type: responsible_party['ResponsiblePartyType'],
name: responsible_party['ResponsiblePartyInvestigatorFullName'],
title: responsible_party['ResponsiblePartyInvestigatorTitle'],
organization: responsible_party['ResponsiblePartyOldOrganization'],
affiliation: responsible_party['ResponsiblePartyInvestigatorAffiliation']
}
end
def result_agreement_data
more_info_module = key_check(results_section['MoreInfoModule'])
certain_agreement = key_check(more_info_module['CertainAgreement'])
return nil if certain_agreement.empty?
{
nct_id: nct_id,
pi_employee: certain_agreement['AgreementPISponsorEmployee'],
restrictive_agreement: certain_agreement['AgreementOtherDetails'],
restriction_type: certain_agreement['AgreementRestrictionType'],
other_details: certain_agreement['AgreementOtherDetails']
}
end
def result_contact_data
more_info_module = key_check(results_section['MoreInfoModule'])
point_of_contact = key_check(more_info_module['PointOfContact'])
return nil if point_of_contact.empty?
ext = point_of_contact['PointOfContactPhoneExt']
phone = point_of_contact['PointOfContactPhone']
{
nct_id: nct_id,
organization: point_of_contact['PointOfContactOrganization'],
name: point_of_contact['PointOfContactTitle'],
phone: ext ? (phone + " ext #{ext}") : phone,
email: point_of_contact['PointOfContactEMail']
}
end
def study_references_data
reference_module = key_check(protocol_section['ReferencesModule'])
reference_list = key_check(reference_module['ReferenceList'])
references = reference_list['Reference'] || []
collection = []
return nil if references.empty?
references.each do |reference|
collection.push(
nct_id: nct_id,
pmid: reference['ReferencePMID'],
reference_type: reference['ReferenceType'],
# results_reference - old data format
citation: reference['ReferenceCitation']
)
end
collection
end
def sponsors_data
sponsor_collaborators_module = key_check(protocol_section['SponsorCollaboratorsModule'])
lead_sponsor = key_check(sponsor_collaborators_module['LeadSponsor'])
collaborator_list = key_check(sponsor_collaborators_module['CollaboratorList'])
collaborators = collaborator_list['Collaborator'] || []
collection = []
lead_info = sponsor_info(lead_sponsor, 'LeadSponsor')
return nil if lead_info.nil? && collaborators.empty?
collection.push(lead_info) unless lead_info.nil?
collaborators.each do |collaborator|
info = sponsor_info(collaborator, 'Collaborator')
collection.push(info) unless info.nil?
end
collection
end
def sponsor_info(sponsor_hash, sponsor_type='LeadSponsor')
type_of_sponsor = sponsor_type =~ /Lead/i ? 'lead' : 'collaborator'
return nil if sponsor_hash.empty?
{
nct_id: nct_id,
agency_class: sponsor_hash["#{sponsor_type}Class"],
lead_or_collaborator: type_of_sponsor,
name: sponsor_hash["#{sponsor_type}Name"]
}
end
def data_collection
{
study: study_data,
design_groups: design_groups_data,
interventions: interventions_data,
detailed_description: detailed_description_data,
brief_summary: brief_summary_data,
design: designs_data,
eligibility: eligibility_data,
participant_flow: participant_flow_data,
baseline_measurements: baseline_measurements_data,
browse_conditions: browse_conditions_data,
browse_interventions: browse_interventions_data,
central_contacts_list: central_contacts_data,
conditions: conditions_data,
countries: countries_data,
documents: documents_data,
facilities: facilities_data,
id_information: id_information_data,
ipd_information_type: ipd_information_types_data,
keywords: keywords_data,
links: links_data,
milestones: milestones_data,
outcomes: outcomes_data,
overall_officials: overall_officials_data,
design_outcomes: design_outcomes_data,
pending_results: pending_results_data,
provided_documents: provided_documents_data,
reported_events: reported_events_data,
responsible_party: responsible_party_data,
result_agreement: result_agreement_data,
result_contact: result_contact_data,
study_references: study_references_data,
sponsors: sponsors_data
}
end
def build_study
begin
data = data_collection
Study.create(data[:study]) if data[:study]
# saving design_groups, and associated objects
save_interventions(data[:interventions])
save_design_groups(data[:design_groups])
DetailedDescription.create(data[:detailed_description]) if data[:detailed_description]
BriefSummary.create(data[:brief_summary]) if data[:brief_summary]
Design.create(data[:design]) if data[:design]
Eligibility.create(data[:eligibility]) if data[:eligibility]
ParticipantFlow.create(data[:participant_flow]) if data[:participant_flow]
# saving baseline_measurements and associated objects
baseline_info = data[:baseline_measurements]
ResultGroup.create(baseline_info[:result_groups]) if baseline_info
save_with_result_group(baseline_info[:baseline_counts], 'BaselineCount') if baseline_info
save_with_result_group(baseline_info[:measurements], 'BaselineMeasurement') if baseline_info
BrowseCondition.create(data[:browse_conditions]) if data[:browse_conditions]
BrowseIntervention.create(data[:browse_interventions]) if data[:browse_interventions]
CentralContact.create(data[:central_contacts_list]) if data[:central_contacts_list]
Condition.create(data[:conditions]) if data[:conditions]
Country.create(data[:countries]) if data[:countries]
Document.create(data[:documents]) if data[:documents]
# saving facilities and related objects
save_facilities(data[:facilities])
IdInformation.create(data[:id_information]) if data[:id_information]
IpdInformationType.create(data[:ipd_information_type]) if data[:ipd_information_type]
Keyword.create(data[:keywords]) if data[:keywords]
Link.create(data[:links]) if data[:links]
# saving milestones and associated objects
milestone_info = data[:milestones] || {}
ResultGroup.create(milestone_info[:result_groups]) if milestone_info[:result_groups]
save_with_result_group(milestone_info[:milestones], 'Milestone') if milestone_info[:milestones]
# saving outcomes and associated objects
outcomes_info = data[:outcomes] || {}
ResultGroup.create(outcomes_info[:result_groups]) if outcomes_info[:result_groups]
save_outcomes(outcomes_info[:outcome_measures]) if outcomes_info[:outcome_measures]
OverallOfficial.create(data[:overall_officials]) if data[:overall_officials]
DesignOutcome.create(data[:design_outcomes]) if data[:design_outcomes]
PendingResult.create(data[:pending_results]) if data[:pending_results]
ProvidedDocument.create(data[:provided_documents]) if data[:provided_documents]
# saving reported events and associated objects
reported_events_info = data[:reported_events] || {}
ResultGroup.create(reported_events_info[:result_groups]) if reported_events_info[:result_groups]
save_with_result_group(reported_events_info[:events], 'ReportedEvent') if reported_events_info[:events]
ResponsibleParty.create(data[:responsible_party]) if data[:responsible_party]
ResultAgreement.create(data[:result_agreement]) if data[:result_agreement]
ResultContact.create(data[:result_contact]) if data[:result_contact]
Reference.create(data[:study_references]) if data[:study_references]
Sponsor.create(data[:sponsors]) if data[:sponsors]
update(saved_study_at: Time.now)
puts "~~~~~~~~~~~~~~"
puts "#{nct_id} done"
"~~~~~~~~~~~~~~"
rescue => error
byebug
@study_build_failures ||= []
@study_build_failures << id
end
end
def self.object_counts
{
study: Study.count,
intervention: Intervention.count,
intervention_other_name: InterventionOtherName.count,
design_group: DesignGroup.count,
design_group_intervention: DesignGroupIntervention.count,
detailed_description: DetailedDescription.count,
brief_summary: BriefSummary.count,
design: Design.count,
eligibility: Eligibility.count,
participant_flow: ParticipantFlow.count,
result_groups: ResultGroup.count,
baseline_count: BaselineCount.count,
baseline_measurement: BaselineMeasurement.count,
browse_condition: BrowseCondition.count,
browse_intervention: BrowseIntervention.count,
central_contact: CentralContact.count,
condition: Condition.count,
country: Country.count,
document: Document.count,
facility: Facility.count,
facility_contact: FacilityContact.count,
facility_investigator: FacilityInvestigator.count,
id_information: IdInformation.count,
ipd_information_type: IpdInformationType.count,
keyword: Keyword.count,
link: Link.count,
milestone: Milestone.count,
outcome: Outcome.count,
outcome_count: OutcomeCount.count,
outcome_measurement: OutcomeMeasurement.count,
outcome_analysis: OutcomeAnalysis.count,
outcome_analysis_group: OutcomeAnalysisGroup.count,
overall_official: OverallOfficial.count,
design_outcome: DesignOutcome.count,
pending_result: PendingResult.count,
provided_document: ProvidedDocument.count,
reported_event: ReportedEvent.count,
responsible_party: ResponsibleParty.count,
result_agreement: ResultAgreement.count,
result_contact: ResultContact.count,
study_reference: Reference.count,
sponsor: Sponsor.count
}
end
def self.set_table_schema(schema = 'ctgov')
return unless schema == 'ctgov' || schema == 'ctgov_beta'
table_names = Util::DbManager.new.loadable_tables
table_names.each do |name|
model_name = name.singularize.camelize.safe_constantize
model_name.table_name = schema + ".#{name}" if model_name
end
end
def self.comparison
count_array = []
dif = []
set_table_schema('ctgov_beta')
beta_counts = object_counts
set_table_schema('ctgov')
reg_counts = object_counts
beta_counts.each do |model_name, object_count|
count_hash = { beta: object_count, reg: reg_counts[:"#{model_name}"]}
dif.push({ "#{model_name}": count_hash }) if object_count != reg_counts[:"#{model_name}"]
count_array.push({ "#{model_name}": count_hash })
end
count_array.push({inconsistencies: dif})
end
def self.check_results
set_table_schema('ctgov_beta')
beta_outcomes = ResultGroup.where(result_type: 'Outcome').count
beta_baseline = ResultGroup.where(result_type: 'Baseline').count
beta_participant = ResultGroup.where(result_type: 'Participant Flow').count
beta_reported = ResultGroup.where(result_type: 'Reported Event').count
set_table_schema('ctgov')
reg_outcomes = ResultGroup.where(result_type: 'Outcome').count
reg_baseline = ResultGroup.where(result_type: 'Baseline').count
reg_participant = ResultGroup.where(result_type: 'Participant Flow').count
reg_reported = ResultGroup.where(result_type: 'Reported Event').count
{
beta_outcome: beta_outcomes,
reg_outcomes: reg_outcomes,
beta_baseline: beta_baseline,
reg_baseline: reg_baseline,
beta_participant: beta_participant,
reg_participant: reg_participant,
beta_reported: beta_reported,
reg_reported: reg_reported
}
end
def self.new_check
set_table_schema('ctgov_beta')
nct = %w[
NCT04316403
]
# Outcome
# Baseline
# Participant Flow
# Reported Event
{:inconsistencies=>[ {:design_group_intervention=>{:beta=>4906, :reg=>4911}},
{:design=>{:beta=>1551, :reg=>1562}},
{:eligibility=>{:beta=>1555, :reg=>1562}},
{:result_groups=>{:beta=>925, :reg=>2561}},
{:country=>{:beta=>44121, :reg=>3830}},
{:design_outcome=>{:beta=>12642, :reg=>13230}},
{:reported_event=>{:beta=>13544, :reg=>14119}}]}
x_nct = %w[
NCT04292080
NCT04050527
NCT00530010
NCT04144088
NCT04053270
NCT03897712
NCT03845673
NCT04245423
NCT03519243
NCT03034044
NCT03496987
NCT04204200
NCT04182217
NCT04167644
NCT04214080
NCT02982187
NCT04027218
NCT03811093
NCT04109703
NCT03763058
NCT00489281
NCT04076787
NCT00725621
NCT02222493
NCT04014062
]
StudyJsonRecord.where(nct_id: nct).each{ |i| puts i.interventions_data }
# StudyJsonRecord.all.order(:id).each{ |i| puts i.study_data }
# StudyJsonRecord.where(nct_id: nct).each{ |i| puts i.data_collection }
# StudyJsonRecord.all.order(:id).each{ |i| puts i.data_collection }
# record = StudyJsonRecord.find_by(nct_id: 'NCT04072432')
[]
end
def save_interventions(interventions)
return unless interventions
interventions.each do |intervention_info|
info = intervention_info[:intervention]
intervention = Intervention.create(info)
intervention_other_names = intervention_info[:intervention_other_names]
next unless intervention_other_names
intervention_other_names.each do |name_info|
name_info[:intervention_id] = intervention.id
InterventionOtherName.create(name_info)
end
end
end
def save_design_groups(design_groups)
return unless design_groups
design_groups.each do |group|
design_info = group[:design_group]
design_group = DesignGroup.find_by(nct_id: nct_id, title: design_info[:title])
design_group ||= DesignGroup.create(nct_id: nct_id, title: design_info[:title])
design_group.update(design_info)
interventions = group[:design_group_interventions]
next unless interventions
interventions.each do |intervention_info|
intervention = Intervention.find_by(
nct_id: nct_id,
name: intervention_info[:name],
intervention_type: intervention_info[:type]
)
unless intervention
hash = {
nct_id: nct_id,
name: intervention_info[:name],
intervention_type: intervention_info[:type]
}
puts hash
byebug
end
next unless intervention
DesignGroupIntervention.create(
nct_id: nct_id,
design_group_id: design_group.id,
intervention_id: intervention.id
)
end
end
end
def save_baseline_counts(counts)
return unless counts
counts.each do |count|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: count[:ctgov_beta_group_code])
next unless result_group
count[:result_group_id] = result_group.id
BaselineCount.create(count)
end
end
def save_with_result_group(group, model_name='BaselineMeasurement')
return unless group
group.each do |item|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: item[:ctgov_beta_group_code])
next unless result_group
item[:result_group_id] = result_group.id
model_name.safe_constantize.create(item)
end
end
def save_facilities(facilities)
return unless facilities
facilities.each do |facility_info|
facility = Facility.create(facility_info[:facility]) if facility_info[:facility]
next unless facility
facility_info[:facility_contacts].each{|h| h[:facility_id] = facility.id}
facility_info[:facility_investigators].each{|h| h[:facility_id] = facility.id}
FacilityContact.create(facility_info[:facility_contacts]) if facility_info[:facility_contacts]
FacilityInvestigator.create(facility_info[:facility_investigators]) if facility_info[:facility_investigators]
end
end
def save_outcomes(outcome_measures)
return unless outcome_measures
outcome_measures.each do |outcome_measure|
puts outcome_measure
outcome = Outcome.create(outcome_measure[:outcome_measure]) if outcome_measure[:outcome_measure]
next unless outcome
outcome_counts = StudyJsonRecord.set_key_value(outcome_measure[:outcome_counts], :outcome_id, outcome.id)
outcome_measurements = StudyJsonRecord.set_key_value(outcome_measure[:outcome_measurements], :outcome_id, outcome.id)
save_with_result_group(outcome_counts, 'OutcomeCount') if outcome_counts
save_with_result_group(outcome_measurements, 'OutcomeMeasurement') if outcome_measurements
outcome_analyses = outcome_measure[:outcome_analyses] || []
outcome_analyses.map{ |h| h[:outcome_analysis][:outcome_id] = outcome.id } unless outcome_analyses.empty?
outcome_analyses.each do |analysis_info|
outcome_analysis = OutcomeAnalysis.create(analysis_info[:outcome_analysis])
outcome_analysis_group_ids = analysis_info[:outcome_analysis_group_ids] || []
outcome_analysis_group_ids.each do |group_id|
result_group = ResultGroup.find_by(nct_id: nct_id, ctgov_beta_group_code: group_id)
next unless result_group && outcome_analysis
OutcomeAnalysisGroup.create(
nct_id: nct_id,
outcome_analysis_id: outcome_analysis.id,
result_group_id: result_group.id,
ctgov_beta_group_code: group_id
)
end
end
end
end
def self.set_key_value(hash_array, key, value)
return unless hash_array
hash_array.map{ |h| h[key] = value }
hash_array
end
end
|
# frozen_string_literal: true
class SupervisorReport < ApplicationRecord
has_paper_trail
REASONS_FOR_TEST = ['Post-Accident', 'Reasonable Suspicion'].freeze
TESTING_FACILITIES = [
'Occuhealth East Longmeadow',
'Occuhealth Northampton',
'On-Site (Employee Work Location)'
].freeze
HISTORY_EXCLUDE_FIELDS = %w[id created_at updated_at].freeze
validates :reason_test_completed, inclusion: { in: REASONS_FOR_TEST,
allow_blank: true }
has_one :incident
has_many :witnesses
has_many :injured_passengers
accepts_nested_attributes_for :witnesses
accepts_nested_attributes_for :injured_passengers
def additional_comments
if amplifying_comments.present?
amplifying_comments
else
sections = []
if reason_threshold_not_met.present?
sections << "Reason FTA threshold not met: #{reason_threshold_not_met}"
end
if reason_driver_discounted.present?
sections << "Reason driver was discounted: #{reason_driver_discounted}"
end
sections.join("\n")
end
end
def has_injured_passengers?
injured_passengers.present? && injured_passengers.first.persisted?
end
def has_witnesses?
witnesses.present? && witnesses.first.persisted?
end
def last_update
versions.last
end
def last_updated_at
last_update.created_at.strftime '%A, %B %e - %l:%M %P'
end
def last_updated_by
User.find_by(id: last_update.whodunnit).try(:name) || 'Unknown'
end
def reasonable_suspicion?
reason_test_completed == 'Reasonable Suspicion'
end
def post_accident?
reason_test_completed == 'Post-Accident'
end
def timeline
events = {}
%w[
testing_facility_notified
employee_notified_of_test
employee_departed_to_test
employee_arrived_at_test
test_started
test_ended
employee_returned
superintendent_notified
program_manager_notified
director_notified
].each do |method|
time = send "#{method}_at"
events[method] = time if time.present?
end
format_timeline(events)
end
private
def format_timeline(events)
events.sort_by { |_, time| time }.map do |method, time|
[time.strftime('%-l:%M %P'), method.humanize.capitalize].join ': '
end
end
end
Fix complexity of additional comments with a sub-method
# frozen_string_literal: true
class SupervisorReport < ApplicationRecord
has_paper_trail
REASONS_FOR_TEST = ['Post-Accident', 'Reasonable Suspicion'].freeze
TESTING_FACILITIES = [
'Occuhealth East Longmeadow',
'Occuhealth Northampton',
'On-Site (Employee Work Location)'
].freeze
HISTORY_EXCLUDE_FIELDS = %w[id created_at updated_at].freeze
validates :reason_test_completed, inclusion: { in: REASONS_FOR_TEST,
allow_blank: true }
has_one :incident
has_many :witnesses
has_many :injured_passengers
accepts_nested_attributes_for :witnesses
accepts_nested_attributes_for :injured_passengers
def additional_comments
if completed_drug_or_alcohol_test?
amplifying_comments
else fta_justifications
end
end
def fta_justifications
sections = []
if reason_threshold_not_met.present?
sections << "Reason FTA threshold not met: #{reason_threshold_not_met}"
end
if reason_driver_discounted.present?
sections << "Reason driver was discounted: #{reason_driver_discounted}"
end
sections.join("\n")
end
def has_injured_passengers?
injured_passengers.present? && injured_passengers.first.persisted?
end
def has_witnesses?
witnesses.present? && witnesses.first.persisted?
end
def last_update
versions.last
end
def last_updated_at
last_update.created_at.strftime '%A, %B %e - %l:%M %P'
end
def last_updated_by
User.find_by(id: last_update.whodunnit).try(:name) || 'Unknown'
end
def reasonable_suspicion?
reason_test_completed == 'Reasonable Suspicion'
end
def post_accident?
reason_test_completed == 'Post-Accident'
end
def timeline
events = {}
%w[
testing_facility_notified
employee_notified_of_test
employee_departed_to_test
employee_arrived_at_test
test_started
test_ended
employee_returned
superintendent_notified
program_manager_notified
director_notified
].each do |method|
time = send "#{method}_at"
events[method] = time if time.present?
end
format_timeline(events)
end
private
def format_timeline(events)
events.sort_by { |_, time| time }.map do |method, time|
[time.strftime('%-l:%M %P'), method.humanize.capitalize].join ': '
end
end
end
|
# -*- coding: utf-8 -*-
# id :integer not null
# user_id :integer not null
# steps :integer not null
# body_mass :float not null
# created_at :datetime
# updated_at :datetime
class UserDataHistory < ActiveRecord::Base
belongs_to :users
def self.update_user_data
config = begin
Fitgem::Client.symbolize_keys(YAML.load(File.open("/home/kaname/development/independent_research/yacmo/config/.fitgem.yml")))
rescue ArgumentError => e
puts "Cud not parse YAMLE: #{e.message}"
end
client = Fitgem::Client.new(config[:oauth])
if config[:oauth][:token] && config[:oauth][:secret]
begin
client.reconnect(config[:oauth][:token], config[:oauth][:secret])
rescue Exception => e
puts "Error: .fitgem.ymlのkeyが不正です。Fitgem::Clientへ再接続できません"
return
end
else
puts "Fitgem::Clientの初期化を行ってください"
return
end
# activities = client.activities_on_date('today')
# puts activities
# friendの情報の取得方法
# puts client.friends
# user_infoの取得方法
# puts client.user_info['user']
end
end
体重と歩数を取得する処理を追加
# -*- coding: utf-8 -*-
# id :integer not null
# user_id :integer not null
# steps :integer not null
# body_mass :float not null
# created_at :datetime
# updated_at :datetime
class UserDataHistory < ActiveRecord::Base
belongs_to :users
def self.update_user_data
config = begin
Fitgem::Client.symbolize_keys(YAML.load(File.open("/home/kaname/development/independent_research/yacmo/config/.fitgem.yml")))
rescue ArgumentError => e
puts "Cud not parse YAMLE: #{e.message}"
end
client = Fitgem::Client.new(config[:oauth])
if config[:oauth][:token] && config[:oauth][:secret]
begin
client.reconnect(config[:oauth][:token], config[:oauth][:secret])
rescue Exception => e
puts "Error: .fitgem.ymlのkeyが不正です。Fitgem::Clientへ再接続できません"
return
end
else
puts "Fitgem::Clientの初期化を行ってください"
return
end
# 体重の取得
weight = client.user_info['user']["weight"] * 0.453592
p "体重: #{weight}"
# 歩数の取得
activities = client.activities_on_date('today')
steps = activities["summary"]["steps"]
p "歩数: #{steps}"
# friendの情報の取得方法
# puts client.friends
end
end
|
Gem::Specification.new do |s|
s.name = %q{mega_menus}
s.version = "0.5.4"
s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
s.authors = ["Marko Toros"]
s.date = %q{2008-11-10}
s.description = %q{Adds a model, controller to perform the tasks in order to have a treeview menu. To use this gem simply install it and write script/generate menu name_of_the_menu}
s.email = %q{mtoros@gmail.com}
s.extra_rdoc_files = ["README", "lib/mega_menus.rb", "lib/mega_menus/editor.rb", "lib/mega_menus/view_helpers.rb"]
s.files = ["Manifest", "README", "Rakefile", "lib/mega_menus.rb", "lib/mega_menus/editor.rb", "lib/mega_menus/view_helpers.rb", "test/test_editor.rb", "rails_generators/menu/USAGE", "rails_generators/menu/menu_generator.rb", "rails_generators/menu/templates/controllers/menu_controller.rb", "rails_generators/menu/templates/views/add_menu_form.rjs", "rails_generators/menu/templates/views/add_menu.rjs", "rails_generators/menu/templates/views/delete_menu.rjs", "rails_generators/menu/templates/views/edit_menu_form.rjs", "rails_generators/menu/templates/views/edit_menu.rjs", "rails_generators/menu/templates/views/up_menu.rjs", "rails_generators/menu/templates/views/down_menu.rjs", "rails_generators/menu/templates/views/_menu.html.erb", "rails_generators/menu/templates/helpers/menu_helper.rb", "rails_generators/menu/templates/models/create_menus.rb", "rails_generators/menu/templates/models/menu.rb", "mega_menus.gemspec"]
s.has_rdoc = true
s.homepage = %q{}
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Mega_menus", "--main", "README"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{mega_menus}
s.rubygems_version = %q{1.2.0}
s.summary = %q{Treeview menu Gem for Rails}
s.test_files = ["test/test_editor.rb"]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if current_version >= 3 then
s.add_development_dependency(%q<echoe>, [">= 0"])
else
s.add_dependency(%q<echoe>, [">= 0"])
end
else
s.add_dependency(%q<echoe>, [">= 0"])
end
end
changed rwx permissions
Gem::Specification.new do |s|
s.name = %q{mega_menus}
s.version = "0.5.4"
s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
s.authors = ["Marko Toros"]
s.date = %q{2008-11-10}
s.description = %q{Adds a model, controller to perform the tasks in order to have a treeview menu. To use this gem simply install it and write script/generate menu name_of_the_menu}
s.email = %q{mtoros@gmail.com}
s.extra_rdoc_files = ["README", "lib/mega_menus.rb", "lib/mega_menus/editor.rb", "lib/mega_menus/view_helpers.rb"]
s.files = ["Manifest", "README", "Rakefile", "lib/mega_menus.rb", "lib/mega_menus/editor.rb", "lib/mega_menus/view_helpers.rb", "test/test_editor.rb", "rails_generators/menu/USAGE", "rails_generators/menu/menu_generator.rb", "rails_generators/menu/templates/controllers/menu_controller.rb", "rails_generators/menu/templates/views/add_menu_form.rjs", "rails_generators/menu/templates/views/add_menu.rjs", "rails_generators/menu/templates/views/delete_menu.rjs", "rails_generators/menu/templates/views/edit_menu_form.rjs", "rails_generators/menu/templates/views/edit_menu.rjs", "rails_generators/menu/templates/views/up_menu.rjs", "rails_generators/menu/templates/views/down_menu.rjs", "rails_generators/menu/templates/views/_menu.html.erb", "rails_generators/menu/templates/helpers/menu_helper.rb", "rails_generators/menu/templates/models/create_menus.rb", "rails_generators/menu/templates/models/menu.rb", "mega_menus.gemspec"]
s.has_rdoc = true
s.homepage = %q{}
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Mega_menus", "--main", "README"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{mega_menus}
s.rubygems_version = %q{1.2.0}
s.summary = %q{Treeview menu Gem for Rails}
s.test_files = ["test/test_editor.rb"]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if current_version >= 3 then
s.add_development_dependency(%q<echoe>, [">= 0"])
else
s.add_dependency(%q<echoe>, [">= 0"])
end
else
s.add_dependency(%q<echoe>, [">= 0"])
end
end
|
class GroupEntity < Grape::Entity
include ActionView::Helpers::NumberHelper
include RequestAwareEntity
include MembersHelper
include GroupsHelper
expose :id, :name, :path, :description, :visibility
expose :web_url
expose :full_name, :full_path
expose :parent_id
expose :created_at, :updated_at
expose :permissions do
expose :group_access do |group, options|
group.group_members.find_by(user_id: request.current_user)&.access_level
end
end
expose :edit_path do |group|
edit_group_path(group)
end
expose :leave_path do |group|
leave_group_group_members_path(group)
end
expose :can_edit do |group|
can?(request.current_user, :admin_group, group)
end
expose :has_subgroups do |group|
GroupsFinder.new(request.current_user, parent: group).execute.any?
end
expose :number_projects_with_delimiter do |group|
number_with_delimiter(GroupProjectsFinder.new(group: group, current_user: request.current_user).execute.count)
end
expose :number_users_with_delimiter do |group|
number_with_delimiter(group.users.count)
end
expose :avatar_url do |group|
group_icon(group)
end
end
Expose human group access on GroupEntity
class GroupEntity < Grape::Entity
include ActionView::Helpers::NumberHelper
include RequestAwareEntity
include MembersHelper
include GroupsHelper
expose :id, :name, :path, :description, :visibility
expose :web_url
expose :full_name, :full_path
expose :parent_id
expose :created_at, :updated_at
expose :permissions do
expose :human_group_access do |group, options|
group.group_members.find_by(user_id: request.current_user)&.human_access
end
end
expose :edit_path do |group|
edit_group_path(group)
end
expose :leave_path do |group|
leave_group_group_members_path(group)
end
expose :can_edit do |group|
can?(request.current_user, :admin_group, group)
end
expose :has_subgroups do |group|
GroupsFinder.new(request.current_user, parent: group).execute.any?
end
expose :number_projects_with_delimiter do |group|
number_with_delimiter(GroupProjectsFinder.new(group: group, current_user: request.current_user).execute.count)
end
expose :number_users_with_delimiter do |group|
number_with_delimiter(group.users.count)
end
expose :avatar_url do |group|
group_icon(group)
end
end
|
module Transform
class Claim
DUPLICATE = %w[disk_evidence retrial_reduction case_concluded_at effective_pcmh_date first_day_of_trial
legal_aid_transfer_date retrial_concluded_at retrial_started_at trial_concluded_at
trial_cracked_at trial_fixed_at trial_fixed_notice_at authorised_at created_at
last_submitted_at original_submission_date disbursements_total disbursements_vat
expenses_total expenses_vat fees_total fees_vat total vat_amount actual_trial_length
estimated_trial_length retrial_actual_length retrial_estimated_length advocate_category
trial_cracked_at_third source supplier_number].freeze
COUNT = {
num_of_documents: { object: :documents, where: '' },
num_of_defendants: { object: :defendants, where: '' },
rejections: { object: :claim_state_transitions, where: "\"to\"='rejected'" },
refusals: { object: :claim_state_transitions, where: "\"to\"='refused'" }
}.freeze
OBJECT_VALUE = {
court: { object: :court, att: :name },
transfer_court: { object: :transfer_court, att: :name },
case_type: { object: :case_type, att: :name },
offence_name: { object: :offence, att: :description },
date_last_assessed: { object: :assessment, att: :created_at },
provider_name: { object: :provider, att: :name },
provider_type: { object: :provider, att: :provider_type },
assessment_total: { object: :assessment, att: :total },
assessment_fees: { object: :assessment, att: :fees },
assessment_expenses: { object: :assessment, att: :expenses },
assessment_disbursements: { object: :assessment, att: :disbursements },
assessment_vat: { object: :assessment, att: :vat_amount },
scheme_name: { object: :fee_scheme, att: :name },
scheme_number: { object: :fee_scheme, att: :version }
}.freeze
CALCULATIONS = {
amount_claimed: %i[total vat_amount],
amount_authorised: %i[assessment_total assessment_vat]
}.freeze
CLAIM_TYPE_CONVERSIONS = {
'Claim::AdvocateInterimClaim' => 'Advocate interim claim',
'Claim::AdvocateClaim' => 'Advocate final claim',
'Claim::InterimClaim' => 'Litigator interim claim',
'Claim::LitigatorClaim' => 'Litigator final claim',
'Claim::TransferClaim' => 'Litigator transfer claim'
}.freeze
class << self
def call(claim)
hash = {}
@claim = claim
DUPLICATE.each { |att| hash[att.to_sym] = claim.send(att) }
COUNT.each { |att, source| hash[att] = claim.send(source[:object]).where(source[:where]).count }
OBJECT_VALUE.each { |att, source| hash[att] = claim.send(source[:object])&.send(source[:att]) }
CALCULATIONS.each { |att, sum| hash[att] = (hash[sum[0]] || 0) + (hash[sum[1]] || 0) }
hash[:ppe] = ppe
hash[:claim_type] = CLAIM_TYPE_CONVERSIONS[claim.type.to_s]
hash[:offence_type] = claim.offence&.offence_band&.description || claim.offence&.offence_class&.class_letter
hash
end
private
attr_accessor :claim
def ppe
if @claim.agfs?
@claim.fees.find_by(fee_type_id: ::Fee::BaseFeeType.find_by_id_or_unique_code('BAPPE'))&.quantity.to_i
else
@claim.fees.where(type: %w[Fee::GraduatedFee Fee::TransferFee Fee::InterimFee]).first&.quantity.to_i
end
end
end
end
end
Add supplementary claim type to MI data type conversion
Was missing and could be causing bug in MI and archiving
which call this.
module Transform
class Claim
DUPLICATE = %w[disk_evidence retrial_reduction case_concluded_at effective_pcmh_date first_day_of_trial
legal_aid_transfer_date retrial_concluded_at retrial_started_at trial_concluded_at
trial_cracked_at trial_fixed_at trial_fixed_notice_at authorised_at created_at
last_submitted_at original_submission_date disbursements_total disbursements_vat
expenses_total expenses_vat fees_total fees_vat total vat_amount actual_trial_length
estimated_trial_length retrial_actual_length retrial_estimated_length advocate_category
trial_cracked_at_third source supplier_number].freeze
COUNT = {
num_of_documents: { object: :documents, where: '' },
num_of_defendants: { object: :defendants, where: '' },
rejections: { object: :claim_state_transitions, where: "\"to\"='rejected'" },
refusals: { object: :claim_state_transitions, where: "\"to\"='refused'" }
}.freeze
OBJECT_VALUE = {
court: { object: :court, att: :name },
transfer_court: { object: :transfer_court, att: :name },
case_type: { object: :case_type, att: :name },
offence_name: { object: :offence, att: :description },
date_last_assessed: { object: :assessment, att: :created_at },
provider_name: { object: :provider, att: :name },
provider_type: { object: :provider, att: :provider_type },
assessment_total: { object: :assessment, att: :total },
assessment_fees: { object: :assessment, att: :fees },
assessment_expenses: { object: :assessment, att: :expenses },
assessment_disbursements: { object: :assessment, att: :disbursements },
assessment_vat: { object: :assessment, att: :vat_amount },
scheme_name: { object: :fee_scheme, att: :name },
scheme_number: { object: :fee_scheme, att: :version }
}.freeze
CALCULATIONS = {
amount_claimed: %i[total vat_amount],
amount_authorised: %i[assessment_total assessment_vat]
}.freeze
CLAIM_TYPE_CONVERSIONS = {
'Claim::AdvocateSupplementaryClaim' => 'Advocate supplementary claim',
'Claim::AdvocateInterimClaim' => 'Advocate interim claim',
'Claim::AdvocateClaim' => 'Advocate final claim',
'Claim::InterimClaim' => 'Litigator interim claim',
'Claim::LitigatorClaim' => 'Litigator final claim',
'Claim::TransferClaim' => 'Litigator transfer claim'
}.freeze
class << self
def call(claim)
hash = {}
@claim = claim
DUPLICATE.each { |att| hash[att.to_sym] = claim.send(att) }
COUNT.each { |att, source| hash[att] = claim.send(source[:object]).where(source[:where]).count }
OBJECT_VALUE.each { |att, source| hash[att] = claim.send(source[:object])&.send(source[:att]) }
CALCULATIONS.each { |att, sum| hash[att] = (hash[sum[0]] || 0) + (hash[sum[1]] || 0) }
hash[:ppe] = ppe
hash[:claim_type] = CLAIM_TYPE_CONVERSIONS[claim.type.to_s]
hash[:offence_type] = claim.offence&.offence_band&.description || claim.offence&.offence_class&.class_letter
hash
end
private
attr_accessor :claim
def ppe
if @claim.agfs?
@claim.fees.find_by(fee_type_id: ::Fee::BaseFeeType.find_by_id_or_unique_code('BAPPE'))&.quantity.to_i
else
@claim.fees.where(type: %w[Fee::GraduatedFee Fee::TransferFee Fee::InterimFee]).first&.quantity.to_i
end
end
end
end
end
|
class TwitterService
@@instance = nil
def self.instance
@@instance ||= TwitterService.new
@@instance
end
attr_accessor :client
# Downloads all the new mentions since the last mention and the hashtagged
# tweets. Creates all the necessary {Tweet} records.
def download_tweets
opts = {}
if tweet = Tweet.last
opts[:since_id] = tweet.id
end
tl = client.mentions_timeline(opts)
tl.each do |tuit|
Tweet.find_or_create!(tuit)
end
tl = client.search "InfractoresBA -rt"
tl.each do |tuit|
Tweet.find_or_create!(tuit)
end
end
private
def initialize
client = Twitter::REST::Client.new do |config|
config.consumer_key = ENV["TWITTER_CONSUMER_KEY"]
config.consumer_secret = ENV["TWITTER_CONSUMER_SECRET"]
config.access_token = ENV["TWITTER_ACCESS_TOKEN"]
config.access_token_secret = ENV["TWITTER_ACCESS_SECRET"]
end
self.client = client
end
end
use only hashtag #InfractoresBA
class TwitterService
@@instance = nil
def self.instance
@@instance ||= TwitterService.new
@@instance
end
attr_accessor :client
# Downloads all the new mentions since the last mention and the hashtagged
# tweets. Creates all the necessary {Tweet} records.
def download_tweets
opts = {}
if tweet = Tweet.last
opts[:since_id] = tweet.id
end
tl = client.mentions_timeline(opts)
tl.each do |tuit|
Tweet.find_or_create!(tuit)
end
tl = client.search "#InfractoresBA -rt"
tl.each do |tuit|
Tweet.find_or_create!(tuit)
end
end
private
def initialize
client = Twitter::REST::Client.new do |config|
config.consumer_key = ENV["TWITTER_CONSUMER_KEY"]
config.consumer_secret = ENV["TWITTER_CONSUMER_SECRET"]
config.access_token = ENV["TWITTER_ACCESS_TOKEN"]
config.access_token_secret = ENV["TWITTER_ACCESS_SECRET"]
end
self.client = client
end
end
|
require File.expand_path("../spec_helper", __FILE__)
describe "Error" do
not_compliant_on :browser => :chrome do
it "should have an appropriate message" do
driver.navigate.to url_for("xhtmlTest.html")
lambda { driver.find_element(:id, "nonexistant") }.should raise_error(
WebDriver::Error::NoSuchElementError, /unable to (find|locate) element/i # TODO: pick one of "find" vs "locate"
)
end
end
compliant_on :driver => [:remote, :firefox] do
it "should show stack trace information" do
driver.navigate.to url_for("xhtmlTest.html")
rescued = false
ex = nil
begin
driver.find_element(:id, "nonexistant")
rescue => ex
rescued = true
end
ex.backtrace.first.should include("[remote server]")
end
end
end
JariBakken: Minor spec fix, don't assert on error message.
r13501
require File.expand_path("../spec_helper", __FILE__)
describe Selenium::WebDriver::Error do
it "should raise an appropriate error" do
driver.navigate.to url_for("xhtmlTest.html")
lambda {
driver.find_element(:id, "nonexistant")
}.should raise_error(WebDriver::Error::NoSuchElementError)
end
compliant_on :driver => [:remote, :firefox] do
it "should show stack trace information" do
driver.navigate.to url_for("xhtmlTest.html")
rescued = false
ex = nil
begin
driver.find_element(:id, "nonexistant")
rescue => ex
rescued = true
end
rescued.should be_true
ex.backtrace.first.should include("[remote server]")
end
end
end |
require File.expand_path("../spec_helper", __FILE__)
module Selenium
module WebDriver
describe Mouse do
it "clicks an element" do
driver.navigate.to url_for("formPage.html")
driver.mouse.click driver.find_element(:id, "imageButton")
end
it "can drag and drop" do
driver.navigate.to url_for("droppableItems.html")
draggable = long_wait.until {
driver.find_element(:id => "draggable")
}
droppable = driver.find_element(:id => "droppable")
driver.mouse.down draggable
driver.mouse.move_to droppable
driver.mouse.up droppable
text = droppable.find_element(:tag_name => "p").text
text.should == "Dropped!"
end
not_compliant_on :browser => :firefox, :platform => :windows do
it "double clicks an element" do
driver.navigate.to url_for("javascriptPage.html")
element = driver.find_element(:id, 'doubleClickField')
driver.mouse.double_click element
element.attribute(:value).should == 'DoubleClicked'
end
end
it "context clicks an element" do
driver.navigate.to url_for("javascriptPage.html")
element = driver.find_element(:id, 'doubleClickField')
driver.mouse.context_click element
element.attribute(:value).should == 'ContextClicked'
end
end
end
end
JariBakken: Add missing wait to double/context click spec.
git-svn-id: aa1aa1384423cb28c2b1e29129bb3a91de1d9196@14055 07704840-8298-11de-bf8c-fd130f914ac9
require File.expand_path("../spec_helper", __FILE__)
module Selenium
module WebDriver
describe Mouse do
it "clicks an element" do
driver.navigate.to url_for("formPage.html")
driver.mouse.click driver.find_element(:id, "imageButton")
end
it "can drag and drop" do
driver.navigate.to url_for("droppableItems.html")
draggable = long_wait.until {
driver.find_element(:id => "draggable")
}
droppable = driver.find_element(:id => "droppable")
driver.mouse.down draggable
driver.mouse.move_to droppable
driver.mouse.up droppable
text = droppable.find_element(:tag_name => "p").text
text.should == "Dropped!"
end
it "double clicks an element" do
driver.navigate.to url_for("javascriptPage.html")
element = driver.find_element(:id, 'doubleClickField')
driver.mouse.double_click element
wait(5).until {
element.attribute(:value) == 'DoubleClicked'
}
end
it "context clicks an element" do
driver.navigate.to url_for("javascriptPage.html")
element = driver.find_element(:id, 'doubleClickField')
driver.mouse.context_click element
wait(5).until {
element.attribute(:value) == 'ContextClicked'
}
end
end
end
end
|
Rename samvera-nesting_indexer_initializer
this file didn't follow project (ruby, rails) naming conventions, but there's no
reason it shouldn't. rename it.
# frozen_string_literal: true
require 'samvera/nesting_indexer'
require 'hyrax/repository_reindexer'
Samvera::NestingIndexer.configure do |config|
# How many layers of nesting are allowed for collections
# For maximum_nesting_depth of 3 the following will raise an exception
# C1 <- C2 <- C3 <- W1
config.maximum_nesting_depth = 5
config.adapter = Hyrax::Adapters::NestingIndexAdapter
config.solr_field_name_for_storing_parent_ids = "nesting_collection__parent_ids_ssim"
config.solr_field_name_for_storing_ancestors = "nesting_collection__ancestors_ssim"
config.solr_field_name_for_storing_pathnames = "nesting_collection__pathnames_ssim"
config.solr_field_name_for_deepest_nested_depth = 'nesting_collection__deepest_nested_depth_isi'
end
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/os/main'
require 'metasm/debug'
module Metasm
class PTrace
attr_accessor :buf, :pid
def self.open(target)
ptrace = new(target)
return ptrace if not block_given?
ret = yield ptrace
ptrace.detach
ret
end
# calls PTRACE_TRACEME on the current (ruby) process
def self.traceme
new(::Process.pid, false).traceme
end
# creates a ptraced process (target = path)
# or opens a running process (target = pid)
# values for do_attach:
# :create => always fork+traceme+exec+wait
# :attach => always attach
# false/nil => same as attach, without actually calling PT_ATTACH (usefull if we're already tracing pid)
# anything else: try to attach if pid is numeric (using Integer()), else create
def initialize(target, do_attach=true)
begin
raise ArgumentError if do_attach == :create
@pid = Integer(target)
tweak_for_pid(@pid)
return if not do_attach
attach
rescue ArgumentError, TypeError
raise if do_attach == :attach or not do_attach
did_exec = true
if not @pid = fork
tweak_for_pid(::Process.pid)
traceme
::Process.exec(*target)
exit!(0)
end
end
wait
raise "could not exec #{target}" if $?.exited?
tweak_for_pid(@pid) if did_exec
puts "Ptrace: attached to #@pid" if $DEBUG
end
def wait
::Process.wait(@pid, ::Process::WALL)
end
attr_accessor :reg_off, :intsize, :syscallnr, :syscallreg
attr_accessor :packint, :packuint, :host_intsize, :host_syscallnr
# setup the variables according to the target
def tweak_for_pid(pid=@pid)
# use these for our syscalls PTRACE
case LinOS.open_process(::Process.pid).cpu.shortname
when 'ia32'
@packint = 'l'
@packuint = 'L'
@host_intsize = 4
@host_syscallnr = SYSCALLNR_I386
@reg_off = REGS_I386
when 'x64'
@packint = 'q'
@packuint = 'Q'
@host_intsize = 8
@host_syscallnr = SYSCALLNR_X86_64
@reg_off = REGS_X86_64
else raise 'unsupported architecture'
end
# use these to interpret the child state
@tgcpu = LinOS.open_process(pid).cpu
case @tgcpu.shortname
when 'ia32'
@syscallreg = 'ORIG_EAX'
@syscallnr = SYSCALLNR_I386
@intsize = 4
when 'x64'
@syscallreg = 'ORIG_RAX'
@syscallnr = SYSCALLNR_X86_64
@intsize = 8
else raise 'unsupported target architecture'
end
# buffer used in ptrace syscalls
@buf = [0].pack(@packint)
@bufptr = str_ptr(@buf)
end
def str_ptr(str)
[str].pack('P').unpack(@packint).first
end
# interpret the value turned as an unsigned long
def bufval
@buf.unpack(@packint).first
end
# reads a memory range
def readmem(off, len)
decal = off % @host_intsize
buf = ''
if decal > 0
off -= decal
peekdata(off)
off += @host_intsize
buf << @buf[decal...@host_intsize]
end
offend = off + len
while off < offend
peekdata(off)
buf << @buf[0, @host_intsize]
off += @host_intsize
end
buf[0, len]
end
def writemem(off, str)
decal = off % @host_intsize
if decal > 0
off -= decal
peekdata(off)
str = @buf[0...decal] + str
end
decal = str.length % @host_intsize
if decal > 0
peekdata(off+str.length-decal)
str += @buf[decal...@host_intsize]
end
i = 0
while i < str.length
pokedata(off+i, str[i, @host_intsize])
i += @host_intsize
end
end
# linux/ptrace.h
COMMAND = {
'TRACEME' => 0, 'PEEKTEXT' => 1,
'PEEKDATA' => 2, 'PEEKUSR' => 3,
'POKETEXT' => 4, 'POKEDATA' => 5,
'POKEUSR' => 6, 'CONT' => 7,
'KILL' => 8, 'SINGLESTEP' => 9,
'ATTACH' => 16, 'DETACH' => 17,
'SYSCALL' => 24,
# arch/x86/include/ptrace-abi.h
'GETREGS' => 12, 'SETREGS' => 13,
'GETFPREGS' => 14, 'SETFPREGS' => 15,
'GETFPXREGS' => 18, 'SETFPXREGS' => 19,
'OLDSETOPTIONS' => 21, 'GET_THREAD_AREA' => 25,
'SET_THREAD_AREA' => 26, 'ARCH_PRCTL' => 30,
'SYSEMU' => 31, 'SYSEMU_SINGLESTEP'=> 32,
'SINGLEBLOCK' => 33,
# 0x4200-0x4300 are reserved for architecture-independent additions.
'SETOPTIONS' => 0x4200, 'GETEVENTMSG' => 0x4201,
'GETSIGINFO' => 0x4202, 'SETSIGINFO' => 0x4203
}
OPTIONS = {
# options set using PTRACE_SETOPTIONS
'TRACESYSGOOD' => 0x01, 'TRACEFORK' => 0x02,
'TRACEVFORK' => 0x04, 'TRACECLONE' => 0x08,
'TRACEEXEC' => 0x10, 'TRACEVFORKDONE'=> 0x20,
'TRACEEXIT' => 0x40
}
WAIT_EXTENDEDRESULT = {
# Wait extended result codes for the above trace options.
'EVENT_FORK' => 1, 'EVENT_VFORK' => 2,
'EVENT_CLONE' => 3, 'EVENT_EXEC' => 4,
'EVENT_VFORK_DONE' => 5, 'EVENT_EXIT' => 6
}
WAIT_EXTENDEDRESULT.update WAIT_EXTENDEDRESULT.invert
# block trace
BTS_O = { 'TRACE' => 1, 'SCHED' => 2, 'SIGNAL' => 4, 'ALLOC' => 8 }
BTS = { 'CONFIG' => 40, 'STATUS' => 41, 'SIZE' => 42,
'GET' => 43, 'CLEAR' => 44, 'DRAIN' => 45 }
REGS_I386 = {
'EBX' => 0, 'ECX' => 1, 'EDX' => 2, 'ESI' => 3,
'EDI' => 4, 'EBP' => 5, 'EAX' => 6, 'DS' => 7,
'ES' => 8, 'FS' => 9, 'GS' => 10, 'ORIG_EAX' => 11,
'EIP' => 12, 'CS' => 13, 'EFL' => 14, 'UESP'=> 15,
'EFLAGS' => 14, 'ESP' => 15,
'SS' => 16,
# from ptrace.c in kernel source & asm-i386/user.h
'DR0' => 63, 'DR1' => 64, 'DR2' => 65, 'DR3' => 66,
'DR4' => 67, 'DR5' => 68, 'DR6' => 69, 'DR7' => 70
}
REGS_X86_64 = {
'R15' => 0, 'R14' => 1, 'R13' => 2, 'R12' => 3,
'RBP' => 4, 'RBX' => 5, 'R11' => 6, 'R10' => 7,
'R9' => 8, 'R8' => 9, 'RAX' => 10, 'RCX' => 11,
'RDX' => 12, 'RSI' => 13, 'RDI' => 14, 'ORIG_RAX' => 15,
'RIP' => 16, 'CS' => 17, 'RFLAGS' => 18, 'RSP' => 19,
'SS' => 20, 'FS_BASE' => 21, 'GS_BASE' => 22, 'DS' => 23,
'ES' => 24, 'FS' => 25, 'GS' => 26,
#'FP_VALID' => 27,
#'387_XWD' => 28, '387_RIP' => 29, '387_RDP' => 30, '387_MXCSR' => 31,
#'FP0' => 32, 'FP1' => 34, 'FP2' => 36, 'FP3' => 38,
#'FP4' => 40, 'FP5' => 42, 'FP6' => 44, 'FP7' => 46,
#'XMM0' => 48, 'XMM1' => 52, 'XMM2' => 56, 'XMM3' => 60,
#'XMM4' => 64, 'XMM5' => 68, 'XMM6' => 72, 'XMM7' => 76,
#'FPAD0' => 80, 'FPAD11' => 91,
#'TSZ' => 92, 'DSZ' => 93, 'SSZ' => 94, 'CODE' => 95,
#'STK' => 96, 'SIG' => 97, 'PAD' => 98, 'U_AR0' => 99,
#'FPPTR' => 100, 'MAGIC' => 101, 'COMM0' => 102, 'COMM1' => 103,
#'COMM2' => 104, 'COMM3' => 105,
'DR0' => 106, 'DR1' => 107, 'DR2' => 108, 'DR3' => 109,
'DR4' => 110, 'DR5' => 111, 'DR6' => 112, 'DR7' => 113,
#'ERROR_CODE' => 114, 'FAULT_ADDR' => 115
}
SYSCALLNR_I386 = %w[restart_syscall exit fork read write open close waitpid creat link unlink execve chdir time
mknod chmod lchown break oldstat lseek getpid mount umount setuid getuid stime ptrace alarm oldfstat
pause utime stty gtty access nice ftime sync kill rename mkdir rmdir dup pipe times prof brk setgid
getgid signal geteuid getegid acct umount2 lock ioctl fcntl mpx setpgid ulimit oldolduname umask
chroot ustat dup2 getppid getpgrp setsid sigaction sgetmask ssetmask setreuid setregid sigsuspend
sigpending sethostname setrlimit getrlimit getrusage gettimeofday settimeofday getgroups setgroups
select symlink oldlstat readlink uselib swapon reboot readdir mmap munmap truncate ftruncate fchmod
fchown getpriority setpriority profil statfs fstatfs ioperm socketcall syslog setitimer getitimer
stat lstat fstat olduname iopl vhangup idle vm86old wait4 swapoff sysinfo ipc fsync sigreturn
clone setdomainname uname modify_ldt adjtimex mprotect sigprocmask create_module init_module
delete_module get_kernel_syms quotactl getpgid fchdir bdflush sysfs personality afs_syscall setfsuid
setfsgid _llseek getdents _newselect flock msync readv writev getsid fdatasync _sysctl mlock
munlock mlockall munlockall sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_yield sched_get_priority_max sched_get_priority_min sched_rr_get_interval nanosleep mremap
setresuid getresuid vm86 query_module poll nfsservctl setresgid getresgid prctl rt_sigreturn
rt_sigaction rt_sigprocmask rt_sigpending rt_sigtimedwait rt_sigqueueinfo rt_sigsuspend pread64
pwrite64 chown getcwd capget capset sigaltstack sendfile getpmsg putpmsg vfork ugetrlimit mmap2
truncate64 ftruncate64 stat64 lstat64 fstat64 lchown32 getuid32 getgid32 geteuid32 getegid32
setreuid32 setregid32 getgroups32 setgroups32 fchown32 setresuid32 getresuid32 setresgid32
getresgid32 chown32 setuid32 setgid32 setfsuid32 setfsgid32 pivot_root mincore madvise getdents64
fcntl64 sys_222 sys_223 gettid readahead setxattr lsetxattr fsetxattr getxattr lgetxattr fgetxattr
listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill sendfile64 futex
sched_setaffinity sched_getaffinity set_thread_area get_thread_area io_setup io_destroy io_getevents
io_submit io_cancel fadvise64 sys_251 exit_group lookup_dcookie epoll_create epoll_ctl epoll_wait
remap_file_pages set_tid_address timer_create timer_settime timer_gettime timer_getoverrun
timer_delete clock_settime clock_gettime clock_getres clock_nanosleep statfs64 fstatfs64 tgkill
utimes fadvise64_64 vserver mbind get_mempolicy set_mempolicy mq_open mq_unlink mq_timedsend
mq_timedreceive mq_notify mq_getsetattr kexec_load waitid sys_setaltroot add_key request_key keyctl
ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat mkdirat
mknodat fchownat futimesat fstatat64 unlinkat renameat linkat symlinkat readlinkat fchmodat
faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice sync_file_range tee vmsplice
move_pages getcpu epoll_pwait utimensat signalfd timerfd eventfd fallocate timerfd_settime
timerfd_gettime signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tg_sigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_I386.update SYSCALLNR_I386.invert
SYSCALLNR_X86_64 = %w[read write open close stat fstat lstat poll lseek mmap mprotect munmap brk rt_sigaction
rt_sigprocmask rt_sigreturn ioctl pread64 pwrite64 readv writev access pipe select sched_yield
mremap msync mincore madvise shmget shmat shmctl dup dup2 pause nanosleep getitimer alarm
setitimer getpid sendfile socket connect accept sendto recvfrom sendmsg recvmsg shutdown
bind listen getsockname getpeername socketpair setsockopt getsockopt clone fork vfork execve
exit wait4 kill uname semget semop semctl shmdt msgget msgsnd msgrcv msgctl fcntl flock
fsync fdatasync truncate ftruncate getdents getcwd chdir fchdir rename mkdir rmdir creat
link unlink symlink readlink chmod fchmod chown fchown lchown umask gettimeofday getrlimit
getrusage sysinfo times ptrace getuid syslog getgid setuid setgid geteuid getegid setpgid
getppid getpgrp setsid setreuid setregid getgroups setgroups setresuid getresuid setresgid
getresgid getpgid setfsuid setfsgid getsid capget capset rt_sigpending rt_sigtimedwait
rt_sigqueueinfo rt_sigsuspend sigaltstack utime mknod uselib personality ustat statfs fstatfs
sysfs getpriority setpriority sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_get_priority_max sched_get_priority_min sched_rr_get_interval mlock munlock mlockall
munlockall vhangup modify_ldt pivot_root _sysctl prctl arch_prctl adjtimex setrlimit chroot sync
acct settimeofday mount umount2 swapon swapoff reboot sethostname setdomainname iopl ioperm
create_module init_module delete_module get_kernel_syms query_module quotactl nfsservctl getpmsg
putpmsg afs_syscall tuxcall security gettid readahead setxattr lsetxattr fsetxattr getxattr
lgetxattr fgetxattr listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill time
futex sched_setaffinity sched_getaffinity set_thread_area io_setup io_destroy io_getevents io_submit
io_cancel get_thread_area lookup_dcookie epoll_create epoll_ctl_old epoll_wait_old remap_file_pages
getdents64 set_tid_address restart_syscall semtimedop fadvise64 timer_create timer_settime
timer_gettime timer_getoverrun timer_delete clock_settime clock_gettime clock_getres clock_nanosleep
exit_group epoll_wait epoll_ctl tgkill utimes vserver mbind set_mempolicy get_mempolicy mq_open
mq_unlink mq_timedsend mq_timedreceive mq_notify mq_getsetattr kexec_load waitid add_key request_key
keyctl ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat
mkdirat mknodat fchownat futimesat newfstatat unlinkat renameat linkat symlinkat readlinkat
fchmodat faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice tee sync_file_range
vmsplice move_pages utimensat epoll_pwait signalfd timerfd_create eventfd fallocate timerfd_settime
timerfd_gettime accept4 signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tgsigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_X86_64.update SYSCALLNR_X86_64.invert
SIGNAL = Signal.list.dup
SIGNAL.delete SIGNAL.index(0)
SIGNAL['TRAP'] ||= 5 # windows+gdbremote
SIGNAL.update SIGNAL.invert
# include/asm-generic/errno-base.h
ERRNO = %w[ERR0 EPERM ENOENT ESRCH EINTR EIO ENXIO E2BIG ENOEXEC EBADF ECHILD EAGAIN ENOMEM EACCES EFAULT
ENOTBLK EBUSY EEXIST EXDEV ENODEV ENOTDIR EISDIR EINVAL ENFILE EMFILE ENOTTY ETXTBSY EFBIG ENOSPC
ESPIPE EROFS EMLINK EPIPE EDOM ERANGE].inject({}) { |h, e| h.update e => h.length }
ERRNO.update ERRNO.invert
SIGINFO = {
# user-generated signal
'DETHREAD' => -7, # execve killing threads
'TKILL' => -6, 'SIGIO' => -5, 'ASYNCIO' => -4, 'MESGQ' => -3,
'TIMER' => -2, 'QUEUE' => -1, 'USER' => 0, 'KERNEL' => 0x80,
# ILL
'ILLOPC' => 1, 'ILLOPN' => 2, 'ILLADR' => 3, 'ILLTRP' => 4,
'PRVOPC' => 5, 'PRVREG' => 6, 'COPROC' => 7, 'BADSTK' => 8,
# FPE
'INTDIV' => 1, 'INTOVF' => 2, 'FLTDIV' => 3, 'FLTOVF' => 4,
'FLTUND' => 5, 'FLTRES' => 6, 'FLTINV' => 7, 'FLTSUB' => 8,
# SEGV
'MAPERR' => 1, 'ACCERR' => 2,
# BUS
'ADRALN' => 1, 'ADRERR' => 2, 'OBJERR' => 3, 'MCEERR_AR' => 4,
'MCEERR_AO' => 5,
# TRAP
'BRKPT' => 1, 'TRACE' => 2, 'BRANCH' => 3, 'HWBKPT' => 4,
# CHLD
'EXITED' => 1, 'KILLED' => 2, 'DUMPED' => 3, 'TRAPPED' => 4,
'STOPPED' => 5, 'CONTINUED' => 6,
# POLL
'POLL_IN' => 1, 'POLL_OUT' => 2, 'POLL_MSG' => 3, 'POLL_ERR' => 4,
'POLL_PRI' => 5, 'POLL_HUP' => 6
}
SIGINFO_C = <<EOS
typedef __int32 __pid_t;
typedef unsigned __int32 __uid_t;
typedef uintptr_t sigval_t;
typedef long __clock_t;
typedef struct siginfo {
int si_signo;
int si_errno;
int si_code;
// int pad64;
union {
int _pad[128/4-3]; /* total >= 128b */
struct { /* kill(). */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
} _kill;
struct { /* POSIX.1b timers. */
int si_tid; /* Timer ID. */
int si_overrun; /* Overrun count. */
sigval_t si_sigval; /* Signal value. */
} _timer;
struct { /* POSIX.1b signals. */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
sigval_t si_sigval; /* Signal value. */
} _rt;
struct { /* SIGCHLD. */
__pid_t si_pid; /* Which child. */
__uid_t si_uid; /* Real user ID of sending process. */
int si_status; /* Exit value or signal. */
__clock_t si_utime;
__clock_t si_stime;
} _sigchld;
struct { /* SIGILL, SIGFPE, SIGSEGV, SIGBUS. */
uintptr_t si_addr; /* Faulting insn/memory ref. */
} _sigfault;
struct { /* SIGPOLL. */
long int si_band; /* Band event for SIGPOLL. */
int si_fd;
} _sigpoll;
};
} siginfo_t;
EOS
def sys_ptrace(req, pid, addr, data)
data = str_ptr(data) if data.kind_of?(String)
addr = [addr].pack(@packint).unpack(@packint).first
data = [data].pack(@packint).unpack(@packint).first
Kernel.syscall(@host_syscallnr['ptrace'], req, pid, addr, data)
end
def traceme
sys_ptrace(COMMAND['TRACEME'], 0, 0, 0)
end
def peektext(addr)
sys_ptrace(COMMAND['PEEKTEXT'], @pid, addr, @bufptr)
@buf
end
def peekdata(addr)
sys_ptrace(COMMAND['PEEKDATA'], @pid, addr, @bufptr)
@buf
end
def peekusr(addr)
sys_ptrace(COMMAND['PEEKUSR'], @pid, @host_intsize*addr, @bufptr)
bufval & ((1 << ([@host_intsize, @intsize].min*8)) - 1)
end
def poketext(addr, data)
sys_ptrace(COMMAND['POKETEXT'], @pid, addr, data.unpack(@packint).first)
end
def pokedata(addr, data)
sys_ptrace(COMMAND['POKEDATA'], @pid, addr, data.unpack(@packint).first)
end
def pokeusr(addr, data)
sys_ptrace(COMMAND['POKEUSR'], @pid, @host_intsize*addr, data)
end
def getregs(buf=nil)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND['GETREGS'], @pid, 0, buf)
buf
end
def setregs(buf)
sys_ptrace(COMMAND['SETREGS'], @pid, 0, buf)
end
def getfpregs(buf=nil)
buf ||= [0].pack('C')*1024
sys_ptrace(COMMAND['GETFPREGS'], @pid, 0, buf)
buf
end
def setfpregs(buf)
sys_ptrace(COMMAND['SETFPREGS'], @pid, 0, buf)
end
def getfpxregs(buf=nil)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND['GETFPXREGS'], @pid, 0, buf)
buf
end
def setfpxregs(buf)
sys_ptrace(COMMAND['SETFPXREGS'], @pid, 0, buf)
end
def get_thread_area(addr)
sys_ptrace(COMMAND['GET_THREAD_AREA'], @pid, addr, @bufptr)
bufval
end
def set_thread_area(addr, data)
sys_ptrace(COMMAND['SET_THREAD_AREA'], @pid, addr, data)
end
def prctl(addr, data)
sys_ptrace(COMMAND['ARCH_PRCTL'], @pid, addr, data)
end
def cont(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['CONT'], @pid, 0, sig)
end
def kill
sys_ptrace(COMMAND['KILL'], @pid, 0, 0)
end
def singlestep(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SINGLESTEP'], @pid, 0, sig)
end
def singleblock(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SINGLEBLOCK'], @pid, 0, sig)
end
def syscall(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SYSCALL'], @pid, 0, sig)
end
def attach
sys_ptrace(COMMAND['ATTACH'], @pid, 0, 0)
end
def detach
sys_ptrace(COMMAND['DETACH'], @pid, 0, 0)
end
def setoptions(*opt)
opt = opt.inject(0) { |b, o| b |= o.kind_of?(Integer) ? o : OPTIONS[o] }
sys_ptrace(COMMAND['SETOPTIONS'], @pid, 0, opt)
end
# retrieve pid of cld for EVENT_CLONE/FORK, exitcode for EVENT_EXIT
def geteventmsg
sys_ptrace(COMMAND['GETEVENTMSG'], @pid, 0, @bufptr)
bufval
end
def siginfo
@siginfo ||= (
cp = @tgcpu.new_cparser
cp.parse SIGINFO_C
cp.alloc_c_struct('siginfo')
)
end
def getsiginfo
sys_ptrace(COMMAND['GETSIGINFO'], @pid, 0, siginfo.str)
siginfo
end
def setsiginfo(si=siginfo)
si = si.str if si.respond_to?(:str)
sys_ptrace(COMMAND['SETSIGINFO'], @pid, 0, si)
end
end
class LinOS < OS
class Process < OS::Process
# returns/create a LinuxRemoteString
def memory
@memory ||= LinuxRemoteString.new(pid)
end
attr_writer :memory
def debugger
@debugger ||= LinDebugger.new(@pid)
end
attr_writer :debugger
# returns the list of loaded Modules, incl start address & path
# read from /proc/pid/maps
def modules
list = []
seen = {}
File.readlines("/proc/#{pid}/maps").each { |l|
# 08048000-08064000 r-xp 000000 08:01 4234 /usr/bin/true
l = l.split
next if l.length < 6 or seen[l[-1]]
seen[l[-1]] = true
m = Module.new
m.addr = l[0].to_i(16)
m.path = l[-1]
list << m
}
list
rescue
[]
end
# return a list of [addr_start, length, perms, file]
def mappings
list = []
File.readlines("/proc/#{pid}/maps").each { |l|
l = l.split
addrstart, addrend = l[0].split('-').map { |i| i.to_i 16 }
list << [addrstart, addrend-addrstart, l[1], l[5]]
}
list
rescue
[]
end
# returns a list of threads sharing this process address space
# read from /proc/pid/task/
def threads
Dir.entries("/proc/#{pid}/task/").grep(/^\d+$/).map { |tid| tid.to_i }
rescue
# TODO handle pthread stuff (eg 2.4 kernels)
[pid]
end
# return the invocation commandline, from /proc/pid/cmdline
# this is manipulable by the target itself
def cmdline
@cmdline ||= File.read("/proc/#{pid}/cmdline") rescue ''
end
attr_writer :cmdline
def path
cmdline.split(0.chr)[0]
end
# returns the address size of the process, based on its #cpu
def addrsz
cpu.size
end
# returns the CPU for the process, by reading /proc/pid/exe
def cpu
e = ELF.load_file("/proc/#{pid}/exe")
# dont decode shdr/phdr, this is 2x faster for repeated debugger spawn
e.decode_header(0, false, false)
e.cpu
end
def terminate
kill
end
def kill(signr=9)
::Process.kill(signr, @pid)
end
end
class << self
# returns an array of Processes, with pid/module listing
def list_processes
Dir.entries('/proc').grep(/^\d+$/).map { |pid| Process.new(pid.to_i) }
end
# return a Process for the specified pid if it exists in /proc
def open_process(pid)
Process.new(pid) if check_process(pid)
end
def check_process(pid)
File.directory?("/proc/#{pid}")
end
# create a LinDebugger on the target pid/binary
def create_debugger(path)
LinDebugger.new(path)
end
end # class << self
end
class LinuxRemoteString < VirtualString
attr_accessor :pid, :readfd
attr_accessor :dbg
# returns a virtual string proxying the specified process memory range
# reads are cached (4096 aligned bytes read at once), from /proc/pid/mem
# writes are done directly by ptrace
def initialize(pid, addr_start=0, length=nil, dbg=nil)
@pid = pid
length ||= 1 << (dbg ? dbg.cpu.size : (LinOS.open_process(@pid).addrsz rescue 32))
@readfd = File.open("/proc/#@pid/mem", 'rb') rescue nil
@dbg = dbg if dbg
super(addr_start, length)
end
def dup(addr = @addr_start, len = @length)
self.class.new(@pid, addr, len, dbg)
end
def do_ptrace
if dbg
dbg.switch_context(@pid) {
# XXX tid ?
yield dbg.ptrace if dbg.state == :stopped
}
else
PTrace.open(@pid) { |ptrace| yield ptrace }
end
end
def rewrite_at(addr, data)
# target must be stopped
do_ptrace { |ptrace| ptrace.writemem(addr, data) }
end
def get_page(addr, len=@pagelength)
do_ptrace { |ptrace|
begin
if readfd and addr < (1<<63)
# 1<<63: ruby seek = 'too big to fit longlong', linux read = EINVAL
@readfd.pos = addr
@readfd.read len
elsif addr < (1<<(ptrace.host_intsize*8))
# can reach 1<<64 with peek_data only if ptrace accepts 64bit args
ptrace.readmem(addr, len)
end
rescue Errno::EIO, Errno::ESRCH
nil
end
}
end
end
module ::Process
WALL = 0x40000000 if not defined? WALL
WCLONE = 0x80000000 if not defined? WCLONE
end
# this class implements a high-level API over the ptrace debugging primitives
class LinDebugger < Debugger
# ptrace is per-process or per-thread ?
attr_accessor :ptrace, :continuesignal, :has_pax_mprotect, :target_syscall
attr_accessor :callback_syscall, :callback_branch, :callback_exec
def initialize(pidpath=nil)
super()
@pid_stuff_list << :has_pax_mprotect << :ptrace << :breaking << :os_process
@tid_stuff_list << :continuesignal << :saved_csig << :ctx << :target_syscall
# by default, break on all signals except SIGWINCH (terminal resize notification)
@pass_all_exceptions = lambda { |e| e[:signal] == 'WINCH' }
@callback_syscall = lambda { |i| log "syscall #{i[:syscall]}" }
@callback_exec = lambda { |i| log "execve #{os_process.path}" }
return if not pidpath
begin
pid = Integer(pidpath)
attach(pid)
rescue ArgumentError
create_process(pidpath)
end
end
def shortname; 'lindbg'; end
# attach to a running process and all its threads
def attach(pid, do_attach=:attach)
pt = PTrace.new(pid, do_attach)
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
list_threads.each { |tid| attach_thread(tid) if tid != @pid }
set_tid @pid
end
# create a process and debug it
def create_process(path)
pt = PTrace.new(path, :create)
# TODO save path, allow restart etc
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
end
def initialize_cpu
@cpu = os_process.cpu
# need to init @ptrace here, before init_dasm calls gui.swapin
@ptrace = PTrace.new(@pid, false)
if @cpu.size == 64 and @ptrace.reg_off['EAX']
hack_64_32
end
set_tid @pid
set_thread_options
end
def initialize_memory
@memory = os_process.memory = LinuxRemoteString.new(@pid, 0, nil, self)
end
def os_process
@os_process ||= LinOS.open_process(@pid)
end
def list_threads
os_process.threads
end
def list_processes
LinOS.list_processes
end
def check_pid(pid)
LinOS.check_process(pid)
end
def mappings
os_process.mappings
end
def modules
os_process.modules
end
# we're a 32bit process debugging a 64bit target
# the ptrace kernel interface we use only allow us a 32bit-like target access
# with this we advertize the cpu as having eax..edi registers (the only one we
# can access), while still decoding x64 instructions (whose addr < 4G)
def hack_64_32
log "WARNING: debugging a 64bit process from a 32bit debugger is a very bad idea !"
@cpu.instance_eval {
ia32 = Ia32.new
@dbg_register_pc = ia32.dbg_register_pc
@dbg_register_flags = ia32.dbg_register_flags
@dbg_register_list = ia32.dbg_register_list
@dbg_register_size = ia32.dbg_register_size
}
end
# attach a thread of the current process
def attach_thread(tid)
set_tid tid
@ptrace.pid = tid
@ptrace.attach
@state = :stopped # no need to wait()
log "attached thread #{tid}"
set_thread_options
end
# set the debugee ptrace options (notify clone/exec/exit, and fork/vfork depending on @trace_children)
def set_thread_options
opts = %w[TRACESYSGOOD TRACECLONE TRACEEXEC TRACEEXIT]
opts += %w[TRACEFORK TRACEVFORK TRACEVFORKDONE] if trace_children
@ptrace.pid = @tid
@ptrace.setoptions(*opts)
end
# update the current pid relative to tracing children (@trace_children only effects newly traced pid/tid)
def do_trace_children
each_tid { set_thread_options }
end
def invalidate
@ctx = nil
super()
end
# a hash of the current thread context
# TODO keys = :gpr, :fpu, :xmm, :dr ; val = AllocCStruct
# include accessors for st0/xmm12 (@ptrace.getfpregs.unpack etc)
def ctx
@ctx ||= {}
end
def get_reg_value(r)
raise "bad register #{r}" if not k = @ptrace.reg_off[r.to_s.upcase]
return ctx[r] || 0 if @state != :stopped
@ptrace.pid = @tid
ctx[r] ||= @ptrace.peekusr(k)
rescue Errno::ESRCH
0
end
def set_reg_value(r, v)
raise "bad register #{r}" if not k = @ptrace.reg_off[r.to_s.upcase]
ctx[r] = v
return if @state != :stopped
@ptrace.pid = @tid
@ptrace.pokeusr(k, v)
end
def update_waitpid(status)
invalidate
@continuesignal = 0
@state = :stopped # allow get_reg (for eg pt_syscall)
info = { :status => status }
if status.exited?
info.update :exitcode => status.exitstatus
if @tid == @pid # XXX
evt_endprocess info
else
evt_endthread info
end
elsif status.signaled?
info.update :signal => (PTrace::SIGNAL[status.termsig] || status.termsig)
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
elsif status.stopped?
sig = status.stopsig & 0x7f
signame = PTrace::SIGNAL[sig]
if signame == 'TRAP'
if status.stopsig & 0x80 > 0
# XXX int80 in x64 => syscallnr32 ?
evt_syscall info.update(:syscall => @ptrace.syscallnr[get_reg_value(@ptrace.syscallreg)])
elsif (status >> 16) > 0
case o = PTrace::WAIT_EXTENDEDRESULT[status >> 16]
when 'EVENT_FORK', 'EVENT_VFORK'
# parent notification of a fork
# child receives STOP (may have already happened)
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_CLONE'
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_EXIT'
info.update :exitcode => (@ptrace.geteventmsg rescue nil)
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
when 'EVENT_VFORKDONE'
resume_badbreak
when 'EVENT_EXEC'
evt_exec info
end
else
si = @ptrace.getsiginfo rescue nil
case si ? si.si_code : :foo
when PTrace::SIGINFO['BRKPT'],
PTrace::SIGINFO['KERNEL'] # \xCC prefer KERNEL to BRKPT
evt_bpx
when PTrace::SIGINFO['TRACE']
evt_singlestep # singlestep/singleblock
when PTrace::SIGINFO['BRANCH']
evt_branch # XXX BTS?
when PTrace::SIGINFO['HWBKPT']
evt_hwbp
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
evt_exception info
end
end
elsif signame == 'STOP' and @info == 'new'
# new thread break on creation (eg after fork + TRACEFORK)
if @pid == @tid
attach(@pid, false)
evt_newprocess info
else
evt_newthread info
end
elsif signame == 'STOP' and @breaking
@state = :stopped
@info = 'break'
@breaking = nil
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
if signame == 'SEGV'
# need more data on access violation (for bpm)
info.update :type => 'access violation'
si = @ptrace.getsiginfo
access = case si.si_code
when PTrace::SIGINFO['MAPERR']; :r # XXX write access to unmapped => ?
when PTrace::SIGINFO['ACCERR']; :w
end
info.update :fault_addr => si.si_addr, :fault_access => access
end
evt_exception info
end
else
log "unknown wait status #{status.inspect}"
evt_exception info.update(:type => "unknown wait #{status.inspect}")
end
end
def set_tid_findpid(tid)
return if tid == @tid
if tid != @pid and pr = list_processes.find { |p| p.threads.include? tid }
set_pid pr.pid
end
set_tid tid
end
def do_check_target
return unless t = ::Process.waitpid(-1, ::Process::WNOHANG | ::Process::WALL)
# XXX all threads may have stopped, wait them now ?
set_tid_findpid t
update_waitpid $?
rescue ::Errno::ECHILD
end
def do_wait_target
t = ::Process.waitpid(-1, ::Process::WALL)
set_tid_findpid t
update_waitpid $?
rescue ::Errno::ECHILD
end
def do_continue
@ptrace.pid = tid
@ptrace.cont(@continuesignal)
end
def do_singlestep(*a)
@ptrace.pid = tid
@ptrace.singlestep(@continuesignal)
end
# use the PT_SYSCALL to break on next syscall
# regexp allowed to wait a specific syscall
def syscall(arg=nil)
arg = nil if arg and arg.strip == ''
return if not check_pre_run(:syscall, arg)
@target_syscall = arg
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
end
def syscall_wait(*a, &b)
syscall(*a, &b)
wait_target
end
# use the PT_SINGLEBLOCK to execute until the next branch
def singleblock
# record as singlestep to avoid evt_singlestep -> evt_exception
# step or block doesn't matter much here anyway
return if not check_pre_run(:singlestep)
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
end
def singleblock_wait(*a, &b)
singleblock(*a, &b)
wait_target
end
# woke up from a PT_SYSCALL
def evt_syscall(info={})
@state = :stopped
@info = "syscall #{info[:syscall]}"
callback_syscall[info] if callback_syscall
if @target_syscall and info[:syscall] !~ /^#@target_syscall$/i
resume_badbreak
else
@target_syscall = nil
end
end
# SIGTRAP + SIGINFO_TRAP_BRANCH = ?
def evt_branch(info={})
@state = :stopped
@info = "branch"
callback_branch[info] if callback_branch
end
# called during sys_execve in the new process
def evt_exec(info={})
@state = :stopped
@info = "#{info[:exec]} execve"
initialize_newpid
# XXX will receive a SIGTRAP, could hide it..
callback_exec[info] if callback_exec
# calling continue() here will loop back to TRAP+INFO_EXEC
end
def break
@breaking = true
kill 'STOP'
end
def kill(sig=nil)
return if not tid
# XXX tkill ?
::Process.kill(sig2signr(sig), tid)
rescue Errno::ESRCH
end
def pass_current_exception(bool=true)
if bool
@continuesignal = @saved_csig
else
@continuesignal = 0
end
end
def sig2signr(sig)
case sig
when nil, ''; 9
when Integer; sig
when String
sig = sig.upcase.sub(/^SIG_?/, '')
PTrace::SIGNAL[sig] || Integer(sig)
else raise "unhandled signal #{sig.inspect}"
end
end
# stop debugging the current process
def detach
del_all_breakpoints
each_tid {
@ptrace.pid = @tid
@ptrace.detach
@delete_thread = true
}
del_pid
end
def bpx(addr, *a, &b)
return hwbp(addr, :x, 1, *a, &b) if @has_pax_mprotect
super(addr, *a, &b)
end
# handles exceptions from PaX-style mprotect restrictions on bpx,
# transmute them to hwbp on the fly
def do_enable_bp(b)
super(b)
rescue ::Errno::EIO
if b.type == :bpx
@memory[b.address, 1] # check if we can read
# didn't raise: it's a PaX-style config
@has_pax_mprotect = true
b.del
hwbp(b.address, :x, 1, b.oneshot, b.condition, &b.action)
log 'PaX: bpx->hwbp'
else raise
end
end
def ui_command_setup(ui)
ui.new_command('syscall', 'waits for the target to do a syscall using PT_SYSCALL') { |arg| ui.wrap_run { syscall arg } }
ui.keyboard_callback[:f6] = lambda { ui.wrap_run { syscall } }
ui.new_command('signal_cont', 'set/get the continue signal (0 == unset)') { |arg|
case arg.to_s.strip
when ''; log "#{@continuesignal} (#{PTrace::SIGNAL[@continuesignal]})"
else @continuesignal = sig2signr(arg)
end
}
end
end
end
lindbg: fix ptrace.pid on siginfo/evtmsg
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/os/main'
require 'metasm/debug'
module Metasm
class PTrace
attr_accessor :buf, :pid
def self.open(target)
ptrace = new(target)
return ptrace if not block_given?
ret = yield ptrace
ptrace.detach
ret
end
# calls PTRACE_TRACEME on the current (ruby) process
def self.traceme
new(::Process.pid, false).traceme
end
# creates a ptraced process (target = path)
# or opens a running process (target = pid)
# values for do_attach:
# :create => always fork+traceme+exec+wait
# :attach => always attach
# false/nil => same as attach, without actually calling PT_ATTACH (usefull if we're already tracing pid)
# anything else: try to attach if pid is numeric (using Integer()), else create
def initialize(target, do_attach=true)
begin
raise ArgumentError if do_attach == :create
@pid = Integer(target)
tweak_for_pid(@pid)
return if not do_attach
attach
rescue ArgumentError, TypeError
raise if do_attach == :attach or not do_attach
did_exec = true
if not @pid = fork
tweak_for_pid(::Process.pid)
traceme
::Process.exec(*target)
exit!(0)
end
end
wait
raise "could not exec #{target}" if $?.exited?
tweak_for_pid(@pid) if did_exec
puts "Ptrace: attached to #@pid" if $DEBUG
end
def wait
::Process.wait(@pid, ::Process::WALL)
end
attr_accessor :reg_off, :intsize, :syscallnr, :syscallreg
attr_accessor :packint, :packuint, :host_intsize, :host_syscallnr
# setup the variables according to the target
def tweak_for_pid(pid=@pid)
# use these for our syscalls PTRACE
case LinOS.open_process(::Process.pid).cpu.shortname
when 'ia32'
@packint = 'l'
@packuint = 'L'
@host_intsize = 4
@host_syscallnr = SYSCALLNR_I386
@reg_off = REGS_I386
when 'x64'
@packint = 'q'
@packuint = 'Q'
@host_intsize = 8
@host_syscallnr = SYSCALLNR_X86_64
@reg_off = REGS_X86_64
else raise 'unsupported architecture'
end
# use these to interpret the child state
@tgcpu = LinOS.open_process(pid).cpu
case @tgcpu.shortname
when 'ia32'
@syscallreg = 'ORIG_EAX'
@syscallnr = SYSCALLNR_I386
@intsize = 4
when 'x64'
@syscallreg = 'ORIG_RAX'
@syscallnr = SYSCALLNR_X86_64
@intsize = 8
else raise 'unsupported target architecture'
end
# buffer used in ptrace syscalls
@buf = [0].pack(@packint)
@bufptr = str_ptr(@buf)
end
def str_ptr(str)
[str].pack('P').unpack(@packint).first
end
# interpret the value turned as an unsigned long
def bufval
@buf.unpack(@packint).first
end
# reads a memory range
def readmem(off, len)
decal = off % @host_intsize
buf = ''
if decal > 0
off -= decal
peekdata(off)
off += @host_intsize
buf << @buf[decal...@host_intsize]
end
offend = off + len
while off < offend
peekdata(off)
buf << @buf[0, @host_intsize]
off += @host_intsize
end
buf[0, len]
end
def writemem(off, str)
decal = off % @host_intsize
if decal > 0
off -= decal
peekdata(off)
str = @buf[0...decal] + str
end
decal = str.length % @host_intsize
if decal > 0
peekdata(off+str.length-decal)
str += @buf[decal...@host_intsize]
end
i = 0
while i < str.length
pokedata(off+i, str[i, @host_intsize])
i += @host_intsize
end
end
# linux/ptrace.h
COMMAND = {
'TRACEME' => 0, 'PEEKTEXT' => 1,
'PEEKDATA' => 2, 'PEEKUSR' => 3,
'POKETEXT' => 4, 'POKEDATA' => 5,
'POKEUSR' => 6, 'CONT' => 7,
'KILL' => 8, 'SINGLESTEP' => 9,
'ATTACH' => 16, 'DETACH' => 17,
'SYSCALL' => 24,
# arch/x86/include/ptrace-abi.h
'GETREGS' => 12, 'SETREGS' => 13,
'GETFPREGS' => 14, 'SETFPREGS' => 15,
'GETFPXREGS' => 18, 'SETFPXREGS' => 19,
'OLDSETOPTIONS' => 21, 'GET_THREAD_AREA' => 25,
'SET_THREAD_AREA' => 26, 'ARCH_PRCTL' => 30,
'SYSEMU' => 31, 'SYSEMU_SINGLESTEP'=> 32,
'SINGLEBLOCK' => 33,
# 0x4200-0x4300 are reserved for architecture-independent additions.
'SETOPTIONS' => 0x4200, 'GETEVENTMSG' => 0x4201,
'GETSIGINFO' => 0x4202, 'SETSIGINFO' => 0x4203
}
OPTIONS = {
# options set using PTRACE_SETOPTIONS
'TRACESYSGOOD' => 0x01, 'TRACEFORK' => 0x02,
'TRACEVFORK' => 0x04, 'TRACECLONE' => 0x08,
'TRACEEXEC' => 0x10, 'TRACEVFORKDONE'=> 0x20,
'TRACEEXIT' => 0x40
}
WAIT_EXTENDEDRESULT = {
# Wait extended result codes for the above trace options.
'EVENT_FORK' => 1, 'EVENT_VFORK' => 2,
'EVENT_CLONE' => 3, 'EVENT_EXEC' => 4,
'EVENT_VFORK_DONE' => 5, 'EVENT_EXIT' => 6
}
WAIT_EXTENDEDRESULT.update WAIT_EXTENDEDRESULT.invert
# block trace
BTS_O = { 'TRACE' => 1, 'SCHED' => 2, 'SIGNAL' => 4, 'ALLOC' => 8 }
BTS = { 'CONFIG' => 40, 'STATUS' => 41, 'SIZE' => 42,
'GET' => 43, 'CLEAR' => 44, 'DRAIN' => 45 }
REGS_I386 = {
'EBX' => 0, 'ECX' => 1, 'EDX' => 2, 'ESI' => 3,
'EDI' => 4, 'EBP' => 5, 'EAX' => 6, 'DS' => 7,
'ES' => 8, 'FS' => 9, 'GS' => 10, 'ORIG_EAX' => 11,
'EIP' => 12, 'CS' => 13, 'EFL' => 14, 'UESP'=> 15,
'EFLAGS' => 14, 'ESP' => 15,
'SS' => 16,
# from ptrace.c in kernel source & asm-i386/user.h
'DR0' => 63, 'DR1' => 64, 'DR2' => 65, 'DR3' => 66,
'DR4' => 67, 'DR5' => 68, 'DR6' => 69, 'DR7' => 70
}
REGS_X86_64 = {
'R15' => 0, 'R14' => 1, 'R13' => 2, 'R12' => 3,
'RBP' => 4, 'RBX' => 5, 'R11' => 6, 'R10' => 7,
'R9' => 8, 'R8' => 9, 'RAX' => 10, 'RCX' => 11,
'RDX' => 12, 'RSI' => 13, 'RDI' => 14, 'ORIG_RAX' => 15,
'RIP' => 16, 'CS' => 17, 'RFLAGS' => 18, 'RSP' => 19,
'SS' => 20, 'FS_BASE' => 21, 'GS_BASE' => 22, 'DS' => 23,
'ES' => 24, 'FS' => 25, 'GS' => 26,
#'FP_VALID' => 27,
#'387_XWD' => 28, '387_RIP' => 29, '387_RDP' => 30, '387_MXCSR' => 31,
#'FP0' => 32, 'FP1' => 34, 'FP2' => 36, 'FP3' => 38,
#'FP4' => 40, 'FP5' => 42, 'FP6' => 44, 'FP7' => 46,
#'XMM0' => 48, 'XMM1' => 52, 'XMM2' => 56, 'XMM3' => 60,
#'XMM4' => 64, 'XMM5' => 68, 'XMM6' => 72, 'XMM7' => 76,
#'FPAD0' => 80, 'FPAD11' => 91,
#'TSZ' => 92, 'DSZ' => 93, 'SSZ' => 94, 'CODE' => 95,
#'STK' => 96, 'SIG' => 97, 'PAD' => 98, 'U_AR0' => 99,
#'FPPTR' => 100, 'MAGIC' => 101, 'COMM0' => 102, 'COMM1' => 103,
#'COMM2' => 104, 'COMM3' => 105,
'DR0' => 106, 'DR1' => 107, 'DR2' => 108, 'DR3' => 109,
'DR4' => 110, 'DR5' => 111, 'DR6' => 112, 'DR7' => 113,
#'ERROR_CODE' => 114, 'FAULT_ADDR' => 115
}
SYSCALLNR_I386 = %w[restart_syscall exit fork read write open close waitpid creat link unlink execve chdir time
mknod chmod lchown break oldstat lseek getpid mount umount setuid getuid stime ptrace alarm oldfstat
pause utime stty gtty access nice ftime sync kill rename mkdir rmdir dup pipe times prof brk setgid
getgid signal geteuid getegid acct umount2 lock ioctl fcntl mpx setpgid ulimit oldolduname umask
chroot ustat dup2 getppid getpgrp setsid sigaction sgetmask ssetmask setreuid setregid sigsuspend
sigpending sethostname setrlimit getrlimit getrusage gettimeofday settimeofday getgroups setgroups
select symlink oldlstat readlink uselib swapon reboot readdir mmap munmap truncate ftruncate fchmod
fchown getpriority setpriority profil statfs fstatfs ioperm socketcall syslog setitimer getitimer
stat lstat fstat olduname iopl vhangup idle vm86old wait4 swapoff sysinfo ipc fsync sigreturn
clone setdomainname uname modify_ldt adjtimex mprotect sigprocmask create_module init_module
delete_module get_kernel_syms quotactl getpgid fchdir bdflush sysfs personality afs_syscall setfsuid
setfsgid _llseek getdents _newselect flock msync readv writev getsid fdatasync _sysctl mlock
munlock mlockall munlockall sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_yield sched_get_priority_max sched_get_priority_min sched_rr_get_interval nanosleep mremap
setresuid getresuid vm86 query_module poll nfsservctl setresgid getresgid prctl rt_sigreturn
rt_sigaction rt_sigprocmask rt_sigpending rt_sigtimedwait rt_sigqueueinfo rt_sigsuspend pread64
pwrite64 chown getcwd capget capset sigaltstack sendfile getpmsg putpmsg vfork ugetrlimit mmap2
truncate64 ftruncate64 stat64 lstat64 fstat64 lchown32 getuid32 getgid32 geteuid32 getegid32
setreuid32 setregid32 getgroups32 setgroups32 fchown32 setresuid32 getresuid32 setresgid32
getresgid32 chown32 setuid32 setgid32 setfsuid32 setfsgid32 pivot_root mincore madvise getdents64
fcntl64 sys_222 sys_223 gettid readahead setxattr lsetxattr fsetxattr getxattr lgetxattr fgetxattr
listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill sendfile64 futex
sched_setaffinity sched_getaffinity set_thread_area get_thread_area io_setup io_destroy io_getevents
io_submit io_cancel fadvise64 sys_251 exit_group lookup_dcookie epoll_create epoll_ctl epoll_wait
remap_file_pages set_tid_address timer_create timer_settime timer_gettime timer_getoverrun
timer_delete clock_settime clock_gettime clock_getres clock_nanosleep statfs64 fstatfs64 tgkill
utimes fadvise64_64 vserver mbind get_mempolicy set_mempolicy mq_open mq_unlink mq_timedsend
mq_timedreceive mq_notify mq_getsetattr kexec_load waitid sys_setaltroot add_key request_key keyctl
ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat mkdirat
mknodat fchownat futimesat fstatat64 unlinkat renameat linkat symlinkat readlinkat fchmodat
faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice sync_file_range tee vmsplice
move_pages getcpu epoll_pwait utimensat signalfd timerfd eventfd fallocate timerfd_settime
timerfd_gettime signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tg_sigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_I386.update SYSCALLNR_I386.invert
SYSCALLNR_X86_64 = %w[read write open close stat fstat lstat poll lseek mmap mprotect munmap brk rt_sigaction
rt_sigprocmask rt_sigreturn ioctl pread64 pwrite64 readv writev access pipe select sched_yield
mremap msync mincore madvise shmget shmat shmctl dup dup2 pause nanosleep getitimer alarm
setitimer getpid sendfile socket connect accept sendto recvfrom sendmsg recvmsg shutdown
bind listen getsockname getpeername socketpair setsockopt getsockopt clone fork vfork execve
exit wait4 kill uname semget semop semctl shmdt msgget msgsnd msgrcv msgctl fcntl flock
fsync fdatasync truncate ftruncate getdents getcwd chdir fchdir rename mkdir rmdir creat
link unlink symlink readlink chmod fchmod chown fchown lchown umask gettimeofday getrlimit
getrusage sysinfo times ptrace getuid syslog getgid setuid setgid geteuid getegid setpgid
getppid getpgrp setsid setreuid setregid getgroups setgroups setresuid getresuid setresgid
getresgid getpgid setfsuid setfsgid getsid capget capset rt_sigpending rt_sigtimedwait
rt_sigqueueinfo rt_sigsuspend sigaltstack utime mknod uselib personality ustat statfs fstatfs
sysfs getpriority setpriority sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_get_priority_max sched_get_priority_min sched_rr_get_interval mlock munlock mlockall
munlockall vhangup modify_ldt pivot_root _sysctl prctl arch_prctl adjtimex setrlimit chroot sync
acct settimeofday mount umount2 swapon swapoff reboot sethostname setdomainname iopl ioperm
create_module init_module delete_module get_kernel_syms query_module quotactl nfsservctl getpmsg
putpmsg afs_syscall tuxcall security gettid readahead setxattr lsetxattr fsetxattr getxattr
lgetxattr fgetxattr listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill time
futex sched_setaffinity sched_getaffinity set_thread_area io_setup io_destroy io_getevents io_submit
io_cancel get_thread_area lookup_dcookie epoll_create epoll_ctl_old epoll_wait_old remap_file_pages
getdents64 set_tid_address restart_syscall semtimedop fadvise64 timer_create timer_settime
timer_gettime timer_getoverrun timer_delete clock_settime clock_gettime clock_getres clock_nanosleep
exit_group epoll_wait epoll_ctl tgkill utimes vserver mbind set_mempolicy get_mempolicy mq_open
mq_unlink mq_timedsend mq_timedreceive mq_notify mq_getsetattr kexec_load waitid add_key request_key
keyctl ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat
mkdirat mknodat fchownat futimesat newfstatat unlinkat renameat linkat symlinkat readlinkat
fchmodat faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice tee sync_file_range
vmsplice move_pages utimensat epoll_pwait signalfd timerfd_create eventfd fallocate timerfd_settime
timerfd_gettime accept4 signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tgsigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_X86_64.update SYSCALLNR_X86_64.invert
SIGNAL = Signal.list.dup
SIGNAL.delete SIGNAL.index(0)
SIGNAL['TRAP'] ||= 5 # windows+gdbremote
SIGNAL.update SIGNAL.invert
# include/asm-generic/errno-base.h
ERRNO = %w[ERR0 EPERM ENOENT ESRCH EINTR EIO ENXIO E2BIG ENOEXEC EBADF ECHILD EAGAIN ENOMEM EACCES EFAULT
ENOTBLK EBUSY EEXIST EXDEV ENODEV ENOTDIR EISDIR EINVAL ENFILE EMFILE ENOTTY ETXTBSY EFBIG ENOSPC
ESPIPE EROFS EMLINK EPIPE EDOM ERANGE].inject({}) { |h, e| h.update e => h.length }
ERRNO.update ERRNO.invert
SIGINFO = {
# user-generated signal
'DETHREAD' => -7, # execve killing threads
'TKILL' => -6, 'SIGIO' => -5, 'ASYNCIO' => -4, 'MESGQ' => -3,
'TIMER' => -2, 'QUEUE' => -1, 'USER' => 0, 'KERNEL' => 0x80,
# ILL
'ILLOPC' => 1, 'ILLOPN' => 2, 'ILLADR' => 3, 'ILLTRP' => 4,
'PRVOPC' => 5, 'PRVREG' => 6, 'COPROC' => 7, 'BADSTK' => 8,
# FPE
'INTDIV' => 1, 'INTOVF' => 2, 'FLTDIV' => 3, 'FLTOVF' => 4,
'FLTUND' => 5, 'FLTRES' => 6, 'FLTINV' => 7, 'FLTSUB' => 8,
# SEGV
'MAPERR' => 1, 'ACCERR' => 2,
# BUS
'ADRALN' => 1, 'ADRERR' => 2, 'OBJERR' => 3, 'MCEERR_AR' => 4,
'MCEERR_AO' => 5,
# TRAP
'BRKPT' => 1, 'TRACE' => 2, 'BRANCH' => 3, 'HWBKPT' => 4,
# CHLD
'EXITED' => 1, 'KILLED' => 2, 'DUMPED' => 3, 'TRAPPED' => 4,
'STOPPED' => 5, 'CONTINUED' => 6,
# POLL
'POLL_IN' => 1, 'POLL_OUT' => 2, 'POLL_MSG' => 3, 'POLL_ERR' => 4,
'POLL_PRI' => 5, 'POLL_HUP' => 6
}
SIGINFO_C = <<EOS
typedef __int32 __pid_t;
typedef unsigned __int32 __uid_t;
typedef uintptr_t sigval_t;
typedef long __clock_t;
typedef struct siginfo {
int si_signo;
int si_errno;
int si_code;
// int pad64;
union {
int _pad[128/4-3]; /* total >= 128b */
struct { /* kill(). */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
} _kill;
struct { /* POSIX.1b timers. */
int si_tid; /* Timer ID. */
int si_overrun; /* Overrun count. */
sigval_t si_sigval; /* Signal value. */
} _timer;
struct { /* POSIX.1b signals. */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
sigval_t si_sigval; /* Signal value. */
} _rt;
struct { /* SIGCHLD. */
__pid_t si_pid; /* Which child. */
__uid_t si_uid; /* Real user ID of sending process. */
int si_status; /* Exit value or signal. */
__clock_t si_utime;
__clock_t si_stime;
} _sigchld;
struct { /* SIGILL, SIGFPE, SIGSEGV, SIGBUS. */
uintptr_t si_addr; /* Faulting insn/memory ref. */
} _sigfault;
struct { /* SIGPOLL. */
long int si_band; /* Band event for SIGPOLL. */
int si_fd;
} _sigpoll;
};
} siginfo_t;
EOS
def sys_ptrace(req, pid, addr, data)
data = str_ptr(data) if data.kind_of?(String)
addr = [addr].pack(@packint).unpack(@packint).first
data = [data].pack(@packint).unpack(@packint).first
Kernel.syscall(@host_syscallnr['ptrace'], req, pid, addr, data)
end
def traceme
sys_ptrace(COMMAND['TRACEME'], 0, 0, 0)
end
def peektext(addr)
sys_ptrace(COMMAND['PEEKTEXT'], @pid, addr, @bufptr)
@buf
end
def peekdata(addr)
sys_ptrace(COMMAND['PEEKDATA'], @pid, addr, @bufptr)
@buf
end
def peekusr(addr)
sys_ptrace(COMMAND['PEEKUSR'], @pid, @host_intsize*addr, @bufptr)
bufval & ((1 << ([@host_intsize, @intsize].min*8)) - 1)
end
def poketext(addr, data)
sys_ptrace(COMMAND['POKETEXT'], @pid, addr, data.unpack(@packint).first)
end
def pokedata(addr, data)
sys_ptrace(COMMAND['POKEDATA'], @pid, addr, data.unpack(@packint).first)
end
def pokeusr(addr, data)
sys_ptrace(COMMAND['POKEUSR'], @pid, @host_intsize*addr, data)
end
def getregs(buf=nil)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND['GETREGS'], @pid, 0, buf)
buf
end
def setregs(buf)
sys_ptrace(COMMAND['SETREGS'], @pid, 0, buf)
end
def getfpregs(buf=nil)
buf ||= [0].pack('C')*1024
sys_ptrace(COMMAND['GETFPREGS'], @pid, 0, buf)
buf
end
def setfpregs(buf)
sys_ptrace(COMMAND['SETFPREGS'], @pid, 0, buf)
end
def getfpxregs(buf=nil)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND['GETFPXREGS'], @pid, 0, buf)
buf
end
def setfpxregs(buf)
sys_ptrace(COMMAND['SETFPXREGS'], @pid, 0, buf)
end
def get_thread_area(addr)
sys_ptrace(COMMAND['GET_THREAD_AREA'], @pid, addr, @bufptr)
bufval
end
def set_thread_area(addr, data)
sys_ptrace(COMMAND['SET_THREAD_AREA'], @pid, addr, data)
end
def prctl(addr, data)
sys_ptrace(COMMAND['ARCH_PRCTL'], @pid, addr, data)
end
def cont(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['CONT'], @pid, 0, sig)
end
def kill
sys_ptrace(COMMAND['KILL'], @pid, 0, 0)
end
def singlestep(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SINGLESTEP'], @pid, 0, sig)
end
def singleblock(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SINGLEBLOCK'], @pid, 0, sig)
end
def syscall(sig = nil)
sig ||= 0
sys_ptrace(COMMAND['SYSCALL'], @pid, 0, sig)
end
def attach
sys_ptrace(COMMAND['ATTACH'], @pid, 0, 0)
end
def detach
sys_ptrace(COMMAND['DETACH'], @pid, 0, 0)
end
def setoptions(*opt)
opt = opt.inject(0) { |b, o| b |= o.kind_of?(Integer) ? o : OPTIONS[o] }
sys_ptrace(COMMAND['SETOPTIONS'], @pid, 0, opt)
end
# retrieve pid of cld for EVENT_CLONE/FORK, exitcode for EVENT_EXIT
def geteventmsg
sys_ptrace(COMMAND['GETEVENTMSG'], @pid, 0, @bufptr)
bufval
end
def siginfo
@siginfo ||= (
cp = @tgcpu.new_cparser
cp.parse SIGINFO_C
cp.alloc_c_struct('siginfo')
)
end
def getsiginfo
sys_ptrace(COMMAND['GETSIGINFO'], @pid, 0, siginfo.str)
siginfo
end
def setsiginfo(si=siginfo)
si = si.str if si.respond_to?(:str)
sys_ptrace(COMMAND['SETSIGINFO'], @pid, 0, si)
end
end
class LinOS < OS
class Process < OS::Process
# returns/create a LinuxRemoteString
def memory
@memory ||= LinuxRemoteString.new(pid)
end
attr_writer :memory
def debugger
@debugger ||= LinDebugger.new(@pid)
end
attr_writer :debugger
# returns the list of loaded Modules, incl start address & path
# read from /proc/pid/maps
def modules
list = []
seen = {}
File.readlines("/proc/#{pid}/maps").each { |l|
# 08048000-08064000 r-xp 000000 08:01 4234 /usr/bin/true
l = l.split
next if l.length < 6 or seen[l[-1]]
seen[l[-1]] = true
m = Module.new
m.addr = l[0].to_i(16)
m.path = l[-1]
list << m
}
list
rescue
[]
end
# return a list of [addr_start, length, perms, file]
def mappings
list = []
File.readlines("/proc/#{pid}/maps").each { |l|
l = l.split
addrstart, addrend = l[0].split('-').map { |i| i.to_i 16 }
list << [addrstart, addrend-addrstart, l[1], l[5]]
}
list
rescue
[]
end
# returns a list of threads sharing this process address space
# read from /proc/pid/task/
def threads
Dir.entries("/proc/#{pid}/task/").grep(/^\d+$/).map { |tid| tid.to_i }
rescue
# TODO handle pthread stuff (eg 2.4 kernels)
[pid]
end
# return the invocation commandline, from /proc/pid/cmdline
# this is manipulable by the target itself
def cmdline
@cmdline ||= File.read("/proc/#{pid}/cmdline") rescue ''
end
attr_writer :cmdline
def path
cmdline.split(0.chr)[0]
end
# returns the address size of the process, based on its #cpu
def addrsz
cpu.size
end
# returns the CPU for the process, by reading /proc/pid/exe
def cpu
e = ELF.load_file("/proc/#{pid}/exe")
# dont decode shdr/phdr, this is 2x faster for repeated debugger spawn
e.decode_header(0, false, false)
e.cpu
end
def terminate
kill
end
def kill(signr=9)
::Process.kill(signr, @pid)
end
end
class << self
# returns an array of Processes, with pid/module listing
def list_processes
Dir.entries('/proc').grep(/^\d+$/).map { |pid| Process.new(pid.to_i) }
end
# return a Process for the specified pid if it exists in /proc
def open_process(pid)
Process.new(pid) if check_process(pid)
end
def check_process(pid)
File.directory?("/proc/#{pid}")
end
# create a LinDebugger on the target pid/binary
def create_debugger(path)
LinDebugger.new(path)
end
end # class << self
end
class LinuxRemoteString < VirtualString
attr_accessor :pid, :readfd
attr_accessor :dbg
# returns a virtual string proxying the specified process memory range
# reads are cached (4096 aligned bytes read at once), from /proc/pid/mem
# writes are done directly by ptrace
def initialize(pid, addr_start=0, length=nil, dbg=nil)
@pid = pid
length ||= 1 << (dbg ? dbg.cpu.size : (LinOS.open_process(@pid).addrsz rescue 32))
@readfd = File.open("/proc/#@pid/mem", 'rb') rescue nil
@dbg = dbg if dbg
super(addr_start, length)
end
def dup(addr = @addr_start, len = @length)
self.class.new(@pid, addr, len, dbg)
end
def do_ptrace
if dbg
dbg.switch_context(@pid) {
# XXX tid ?
yield dbg.ptrace if dbg.state == :stopped
}
else
PTrace.open(@pid) { |ptrace| yield ptrace }
end
end
def rewrite_at(addr, data)
# target must be stopped
do_ptrace { |ptrace| ptrace.writemem(addr, data) }
end
def get_page(addr, len=@pagelength)
do_ptrace { |ptrace|
begin
if readfd and addr < (1<<63)
# 1<<63: ruby seek = 'too big to fit longlong', linux read = EINVAL
@readfd.pos = addr
@readfd.read len
elsif addr < (1<<(ptrace.host_intsize*8))
# can reach 1<<64 with peek_data only if ptrace accepts 64bit args
ptrace.readmem(addr, len)
end
rescue Errno::EIO, Errno::ESRCH
nil
end
}
end
end
module ::Process
WALL = 0x40000000 if not defined? WALL
WCLONE = 0x80000000 if not defined? WCLONE
end
# this class implements a high-level API over the ptrace debugging primitives
class LinDebugger < Debugger
# ptrace is per-process or per-thread ?
attr_accessor :ptrace, :continuesignal, :has_pax_mprotect, :target_syscall
attr_accessor :callback_syscall, :callback_branch, :callback_exec
def initialize(pidpath=nil)
super()
@pid_stuff_list << :has_pax_mprotect << :ptrace << :breaking << :os_process
@tid_stuff_list << :continuesignal << :saved_csig << :ctx << :target_syscall
# by default, break on all signals except SIGWINCH (terminal resize notification)
@pass_all_exceptions = lambda { |e| e[:signal] == 'WINCH' }
@callback_syscall = lambda { |i| log "syscall #{i[:syscall]}" }
@callback_exec = lambda { |i| log "execve #{os_process.path}" }
return if not pidpath
begin
pid = Integer(pidpath)
attach(pid)
rescue ArgumentError
create_process(pidpath)
end
end
def shortname; 'lindbg'; end
# attach to a running process and all its threads
def attach(pid, do_attach=:attach)
pt = PTrace.new(pid, do_attach)
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
list_threads.each { |tid| attach_thread(tid) if tid != @pid }
set_tid @pid
end
# create a process and debug it
def create_process(path)
pt = PTrace.new(path, :create)
# TODO save path, allow restart etc
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
end
def initialize_cpu
@cpu = os_process.cpu
# need to init @ptrace here, before init_dasm calls gui.swapin
@ptrace = PTrace.new(@pid, false)
if @cpu.size == 64 and @ptrace.reg_off['EAX']
hack_64_32
end
set_tid @pid
set_thread_options
end
def initialize_memory
@memory = os_process.memory = LinuxRemoteString.new(@pid, 0, nil, self)
end
def os_process
@os_process ||= LinOS.open_process(@pid)
end
def list_threads
os_process.threads
end
def list_processes
LinOS.list_processes
end
def check_pid(pid)
LinOS.check_process(pid)
end
def mappings
os_process.mappings
end
def modules
os_process.modules
end
# we're a 32bit process debugging a 64bit target
# the ptrace kernel interface we use only allow us a 32bit-like target access
# with this we advertize the cpu as having eax..edi registers (the only one we
# can access), while still decoding x64 instructions (whose addr < 4G)
def hack_64_32
log "WARNING: debugging a 64bit process from a 32bit debugger is a very bad idea !"
@cpu.instance_eval {
ia32 = Ia32.new
@dbg_register_pc = ia32.dbg_register_pc
@dbg_register_flags = ia32.dbg_register_flags
@dbg_register_list = ia32.dbg_register_list
@dbg_register_size = ia32.dbg_register_size
}
end
# attach a thread of the current process
def attach_thread(tid)
set_tid tid
@ptrace.pid = tid
@ptrace.attach
@state = :stopped # no need to wait()
log "attached thread #{tid}"
set_thread_options
end
# set the debugee ptrace options (notify clone/exec/exit, and fork/vfork depending on @trace_children)
def set_thread_options
opts = %w[TRACESYSGOOD TRACECLONE TRACEEXEC TRACEEXIT]
opts += %w[TRACEFORK TRACEVFORK TRACEVFORKDONE] if trace_children
@ptrace.pid = @tid
@ptrace.setoptions(*opts)
end
# update the current pid relative to tracing children (@trace_children only effects newly traced pid/tid)
def do_trace_children
each_tid { set_thread_options }
end
def invalidate
@ctx = nil
super()
end
# a hash of the current thread context
# TODO keys = :gpr, :fpu, :xmm, :dr ; val = AllocCStruct
# include accessors for st0/xmm12 (@ptrace.getfpregs.unpack etc)
def ctx
@ctx ||= {}
end
def get_reg_value(r)
raise "bad register #{r}" if not k = @ptrace.reg_off[r.to_s.upcase]
return ctx[r] || 0 if @state != :stopped
@ptrace.pid = @tid
ctx[r] ||= @ptrace.peekusr(k)
rescue Errno::ESRCH
0
end
def set_reg_value(r, v)
raise "bad register #{r}" if not k = @ptrace.reg_off[r.to_s.upcase]
ctx[r] = v
return if @state != :stopped
@ptrace.pid = @tid
@ptrace.pokeusr(k, v)
end
def update_waitpid(status)
invalidate
@continuesignal = 0
@state = :stopped # allow get_reg (for eg pt_syscall)
info = { :status => status }
if status.exited?
info.update :exitcode => status.exitstatus
if @tid == @pid # XXX
evt_endprocess info
else
evt_endthread info
end
elsif status.signaled?
info.update :signal => (PTrace::SIGNAL[status.termsig] || status.termsig)
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
elsif status.stopped?
sig = status.stopsig & 0x7f
signame = PTrace::SIGNAL[sig]
if signame == 'TRAP'
if status.stopsig & 0x80 > 0
# XXX int80 in x64 => syscallnr32 ?
evt_syscall info.update(:syscall => @ptrace.syscallnr[get_reg_value(@ptrace.syscallreg)])
elsif (status >> 16) > 0
case o = PTrace::WAIT_EXTENDEDRESULT[status >> 16]
when 'EVENT_FORK', 'EVENT_VFORK'
# parent notification of a fork
# child receives STOP (may have already happened)
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_CLONE'
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_EXIT'
@ptrace.pid = @tid
info.update :exitcode => @ptrace.geteventmsg
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
when 'EVENT_VFORKDONE'
resume_badbreak
when 'EVENT_EXEC'
evt_exec info
end
else
@ptrace.pid = @tid
si = @ptrace.getsiginfo
case si.si_code
when PTrace::SIGINFO['BRKPT'],
PTrace::SIGINFO['KERNEL'] # \xCC prefer KERNEL to BRKPT
evt_bpx
when PTrace::SIGINFO['TRACE']
evt_singlestep # singlestep/singleblock
when PTrace::SIGINFO['BRANCH']
evt_branch # XXX BTS?
when PTrace::SIGINFO['HWBKPT']
evt_hwbp
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
evt_exception info
end
end
elsif signame == 'STOP' and @info == 'new'
# new thread break on creation (eg after fork + TRACEFORK)
if @pid == @tid
attach(@pid, false)
evt_newprocess info
else
evt_newthread info
end
elsif signame == 'STOP' and @breaking
@state = :stopped
@info = 'break'
@breaking = nil
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
if signame == 'SEGV'
# need more data on access violation (for bpm)
info.update :type => 'access violation'
@ptrace.pid = @tid
si = @ptrace.getsiginfo
access = case si.si_code
when PTrace::SIGINFO['MAPERR']; :r # XXX write access to unmapped => ?
when PTrace::SIGINFO['ACCERR']; :w
end
info.update :fault_addr => si.si_addr, :fault_access => access
end
evt_exception info
end
else
log "unknown wait status #{status.inspect}"
evt_exception info.update(:type => "unknown wait #{status.inspect}")
end
end
def set_tid_findpid(tid)
return if tid == @tid
if tid != @pid and pr = list_processes.find { |p| p.threads.include? tid }
set_pid pr.pid
end
set_tid tid
end
def do_check_target
return unless t = ::Process.waitpid(-1, ::Process::WNOHANG | ::Process::WALL)
# XXX all threads may have stopped, wait them now ?
set_tid_findpid t
update_waitpid $?
rescue ::Errno::ECHILD
end
def do_wait_target
t = ::Process.waitpid(-1, ::Process::WALL)
set_tid_findpid t
update_waitpid $?
rescue ::Errno::ECHILD
end
def do_continue
@ptrace.pid = tid
@ptrace.cont(@continuesignal)
end
def do_singlestep(*a)
@ptrace.pid = tid
@ptrace.singlestep(@continuesignal)
end
# use the PT_SYSCALL to break on next syscall
# regexp allowed to wait a specific syscall
def syscall(arg=nil)
arg = nil if arg and arg.strip == ''
return if not check_pre_run(:syscall, arg)
@target_syscall = arg
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
end
def syscall_wait(*a, &b)
syscall(*a, &b)
wait_target
end
# use the PT_SINGLEBLOCK to execute until the next branch
def singleblock
# record as singlestep to avoid evt_singlestep -> evt_exception
# step or block doesn't matter much here anyway
return if not check_pre_run(:singlestep)
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
end
def singleblock_wait(*a, &b)
singleblock(*a, &b)
wait_target
end
# woke up from a PT_SYSCALL
def evt_syscall(info={})
@state = :stopped
@info = "syscall #{info[:syscall]}"
callback_syscall[info] if callback_syscall
if @target_syscall and info[:syscall] !~ /^#@target_syscall$/i
resume_badbreak
else
@target_syscall = nil
end
end
# SIGTRAP + SIGINFO_TRAP_BRANCH = ?
def evt_branch(info={})
@state = :stopped
@info = "branch"
callback_branch[info] if callback_branch
end
# called during sys_execve in the new process
def evt_exec(info={})
@state = :stopped
@info = "#{info[:exec]} execve"
initialize_newpid
# XXX will receive a SIGTRAP, could hide it..
callback_exec[info] if callback_exec
# calling continue() here will loop back to TRAP+INFO_EXEC
end
def break
@breaking = true
kill 'STOP'
end
def kill(sig=nil)
return if not tid
# XXX tkill ?
::Process.kill(sig2signr(sig), tid)
rescue Errno::ESRCH
end
def pass_current_exception(bool=true)
if bool
@continuesignal = @saved_csig
else
@continuesignal = 0
end
end
def sig2signr(sig)
case sig
when nil, ''; 9
when Integer; sig
when String
sig = sig.upcase.sub(/^SIG_?/, '')
PTrace::SIGNAL[sig] || Integer(sig)
else raise "unhandled signal #{sig.inspect}"
end
end
# stop debugging the current process
def detach
del_all_breakpoints
each_tid {
@ptrace.pid = @tid
@ptrace.detach
@delete_thread = true
}
del_pid
end
def bpx(addr, *a, &b)
return hwbp(addr, :x, 1, *a, &b) if @has_pax_mprotect
super(addr, *a, &b)
end
# handles exceptions from PaX-style mprotect restrictions on bpx,
# transmute them to hwbp on the fly
def do_enable_bp(b)
super(b)
rescue ::Errno::EIO
if b.type == :bpx
@memory[b.address, 1] # check if we can read
# didn't raise: it's a PaX-style config
@has_pax_mprotect = true
b.del
hwbp(b.address, :x, 1, b.oneshot, b.condition, &b.action)
log 'PaX: bpx->hwbp'
else raise
end
end
def ui_command_setup(ui)
ui.new_command('syscall', 'waits for the target to do a syscall using PT_SYSCALL') { |arg| ui.wrap_run { syscall arg } }
ui.keyboard_callback[:f6] = lambda { ui.wrap_run { syscall } }
ui.new_command('signal_cont', 'set/get the continue signal (0 == unset)') { |arg|
case arg.to_s.strip
when ''; log "#{@continuesignal} (#{PTrace::SIGNAL[@continuesignal]})"
else @continuesignal = sig2signr(arg)
end
}
end
end
end
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/os/main'
require 'metasm/debug'
module Metasm
class PTrace
attr_accessor :buf, :pid
def self.open(target)
ptrace = new(target)
return ptrace if not block_given?
begin
yield ptrace
ensure
ptrace.detach
end
end
# calls PTRACE_TRACEME on the current (ruby) process
def self.traceme
new(::Process.pid, false).traceme
end
# creates a ptraced process (target = path)
# or opens a running process (target = pid)
# values for do_attach:
# :create => always fork+traceme+exec+wait
# :attach => always attach
# false/nil => same as attach, without actually calling PT_ATTACH (useful when the ruby process is already tracing pid)
# default/anything else: try to attach if pid is numeric, else create
def initialize(target, do_attach=true, &b)
case do_attach
when :create
init_create(target, &b)
when :attach
init_attach(target)
when :dup
raise ArgumentError unless target.kind_of?(PTrace)
@pid = target.pid
tweak_for_pid(@pid, target.tgcpu) # avoid re-parsing /proc/self/exe
when nil, false
@pid = Integer(target)
tweak_for_pid(@pid)
else
t = begin; Integer(target)
rescue ArgumentError, TypeError
end
t ? init_attach(t) : init_create(target, &b)
end
end
def init_attach(target)
@pid = Integer(target)
tweak_for_pid(@pid)
attach
wait
puts "Ptrace: attached to #@pid" if $DEBUG
end
def init_create(target, &b)
if not @pid = ::Process.fork
tweak_for_pid(::Process.pid)
traceme
b.call if b
::Process.exec(*target)
exit!(0)
end
wait
raise "could not exec #{target}" if $?.exited?
tweak_for_pid(@pid)
puts "Ptrace: attached to new #@pid" if $DEBUG
end
def wait
::Process.wait(@pid, ::Process::WALL)
end
attr_accessor :reg_off, :intsize, :syscallnr, :syscallreg
attr_accessor :packint, :packuint, :host_intsize, :host_syscallnr
attr_accessor :tgcpu
@@sys_ptrace = {}
# setup variables according to the target (ptrace interface, syscall nrs, ...)
def tweak_for_pid(pid=@pid, tgcpu=nil)
# use these for our syscalls PTRACE
@@host_csn ||= LinOS.open_process(::Process.pid).cpu.shortname
case @@host_csn
when 'ia32'
@packint = 'l'
@packuint = 'L'
@host_intsize = 4
@host_syscallnr = SYSCALLNR_I386
@reg_off = REGS_I386
when 'x64'
@packint = 'q'
@packuint = 'Q'
@host_intsize = 8
@host_syscallnr = SYSCALLNR_X86_64
@reg_off = REGS_X86_64
else raise 'unsupported architecture'
end
@tgcpu = tgcpu || LinOS.open_process(pid).cpu
# use these to interpret the child state
case @tgcpu.shortname
when 'ia32'
@syscallreg = 'ORIG_EAX'
@syscallnr = SYSCALLNR_I386
@intsize = 4
when 'x64'
@syscallreg = 'ORIG_RAX'
@syscallnr = SYSCALLNR_X86_64
@intsize = 8
else raise 'unsupported target architecture'
end
# buffer used in ptrace syscalls
@buf = [0].pack(@packint)
@sys_ptrace = @@sys_ptrace[@host_syscallnr['ptrace']] ||= setup_sys_ptrace(@host_syscallnr['ptrace'])
end
def setup_sys_ptrace(sysnr)
moo = Class.new(DynLdr)
case @@host_csn
when 'ia32'
# XXX compat lin2.4 ?
asm = <<EOS
#define off 3*4
push ebx
push esi
mov eax, #{sysnr}
mov ebx, [esp+off]
mov ecx, [esp+off+4]
mov edx, [esp+off+8]
mov esi, [esp+off+12]
call gs:[10h]
pop esi
pop ebx
ret
EOS
when 'x64'
asm = <<EOS
#define off 3*8
mov rax, #{sysnr}
//mov rdi, rdi
//mov rsi, rdi
//mov rdx, rdx
mov r10, rcx
syscall
ret
EOS
else raise 'unsupported target architecture'
end
moo.new_func_asm 'long ptrace(unsigned long, unsigned long, unsigned long, unsigned long)', asm
moo
end
def host_csn; @@host_csn end
def dup
self.class.new(self, :dup)
end
def str_ptr(str)
[str].pack('P').unpack(@packint).first
end
# interpret the value turned as an unsigned long
def bufval
@buf.unpack(@packint).first
end
# reads a memory range
def readmem(off, len)
decal = off % @host_intsize
buf = ''
if decal > 0
off -= decal
peekdata(off)
off += @host_intsize
buf << @buf[decal...@host_intsize]
end
offend = off + len
while off < offend
peekdata(off)
buf << @buf[0, @host_intsize]
off += @host_intsize
end
buf[0, len]
end
def writemem(off, str)
str.force_encoding('binary') if str.respond_to?(:force_encoding)
decal = off % @host_intsize
if decal > 0
off -= decal
peekdata(off)
str = @buf[0...decal] + str
end
decal = str.length % @host_intsize
if decal > 0
peekdata(off+str.length-decal)
str += @buf[decal...@host_intsize]
end
i = 0
while i < str.length
pokedata(off+i, str[i, @host_intsize])
i += @host_intsize
end
true
end
# linux/ptrace.h
COMMAND = {
:TRACEME => 0, :PEEKTEXT => 1,
:PEEKDATA => 2, :PEEKUSR => 3,
:POKETEXT => 4, :POKEDATA => 5,
:POKEUSR => 6, :CONT => 7,
:KILL => 8, :SINGLESTEP => 9,
:ATTACH => 16, :DETACH => 17,
:SYSCALL => 24,
# arch/x86/include/ptrace-abi.h
:GETREGS => 12, :SETREGS => 13,
:GETFPREGS => 14, :SETFPREGS => 15,
:GETFPXREGS => 18, :SETFPXREGS => 19,
:OLDSETOPTIONS => 21, :GET_THREAD_AREA => 25,
:SET_THREAD_AREA => 26, :ARCH_PRCTL => 30,
:SYSEMU => 31, :SYSEMU_SINGLESTEP=> 32,
:SINGLEBLOCK => 33,
# 0x4200-0x4300 are reserved for architecture-independent additions.
:SETOPTIONS => 0x4200, :GETEVENTMSG => 0x4201,
:GETSIGINFO => 0x4202, :SETSIGINFO => 0x4203
}
OPTIONS = {
# options set using PTRACE_SETOPTIONS
'TRACESYSGOOD' => 0x01, 'TRACEFORK' => 0x02,
'TRACEVFORK' => 0x04, 'TRACECLONE' => 0x08,
'TRACEEXEC' => 0x10, 'TRACEVFORKDONE'=> 0x20,
'TRACEEXIT' => 0x40, 'TRACESECCOMP' => 0x80,
}
WAIT_EXTENDEDRESULT = {
# Wait extended result codes for the above trace options.
'EVENT_FORK' => 1, 'EVENT_VFORK' => 2,
'EVENT_CLONE' => 3, 'EVENT_EXEC' => 4,
'EVENT_VFORK_DONE' => 5, 'EVENT_EXIT' => 6,
'EVENT_SECCOMP' => 7,
}
WAIT_EXTENDEDRESULT.update WAIT_EXTENDEDRESULT.invert
# block trace
BTS_O = { 'TRACE' => 1, 'SCHED' => 2, 'SIGNAL' => 4, 'ALLOC' => 8 }
BTS = { 'CONFIG' => 40, 'STATUS' => 41, 'SIZE' => 42,
'GET' => 43, 'CLEAR' => 44, 'DRAIN' => 45 }
REGS_I386 = {
'EBX' => 0, 'ECX' => 1, 'EDX' => 2, 'ESI' => 3,
'EDI' => 4, 'EBP' => 5, 'EAX' => 6, 'DS' => 7,
'ES' => 8, 'FS' => 9, 'GS' => 10, 'ORIG_EAX' => 11,
'EIP' => 12, 'CS' => 13, 'EFL' => 14, 'UESP'=> 15,
'EFLAGS' => 14, 'ESP' => 15,
'SS' => 16,
# from ptrace.c in kernel source & asm-i386/user.h
'DR0' => 63, 'DR1' => 64, 'DR2' => 65, 'DR3' => 66,
'DR4' => 67, 'DR5' => 68, 'DR6' => 69, 'DR7' => 70
}
REGS_X86_64 = {
'R15' => 0, 'R14' => 1, 'R13' => 2, 'R12' => 3,
'RBP' => 4, 'RBX' => 5, 'R11' => 6, 'R10' => 7,
'R9' => 8, 'R8' => 9, 'RAX' => 10, 'RCX' => 11,
'RDX' => 12, 'RSI' => 13, 'RDI' => 14, 'ORIG_RAX' => 15,
'RIP' => 16, 'CS' => 17, 'RFLAGS' => 18, 'RSP' => 19,
'SS' => 20, 'FS_BASE' => 21, 'GS_BASE' => 22, 'DS' => 23,
'ES' => 24, 'FS' => 25, 'GS' => 26,
#'FP_VALID' => 27,
#'387_XWD' => 28, '387_RIP' => 29, '387_RDP' => 30, '387_MXCSR' => 31,
#'FP0' => 32, 'FP1' => 34, 'FP2' => 36, 'FP3' => 38,
#'FP4' => 40, 'FP5' => 42, 'FP6' => 44, 'FP7' => 46,
#'XMM0' => 48, 'XMM1' => 52, 'XMM2' => 56, 'XMM3' => 60,
#'XMM4' => 64, 'XMM5' => 68, 'XMM6' => 72, 'XMM7' => 76,
#'FPAD0' => 80, 'FPAD11' => 91,
#'TSZ' => 92, 'DSZ' => 93, 'SSZ' => 94, 'CODE' => 95,
#'STK' => 96, 'SIG' => 97, 'PAD' => 98, 'U_AR0' => 99,
#'FPPTR' => 100, 'MAGIC' => 101, 'COMM0' => 102, 'COMM1' => 103,
#'COMM2' => 104, 'COMM3' => 105,
'DR0' => 106, 'DR1' => 107, 'DR2' => 108, 'DR3' => 109,
'DR4' => 110, 'DR5' => 111, 'DR6' => 112, 'DR7' => 113,
#'ERROR_CODE' => 114, 'FAULT_ADDR' => 115
}
SYSCALLNR_I386 = %w[restart_syscall exit fork read write open close waitpid creat link unlink execve chdir time
mknod chmod lchown break oldstat lseek getpid mount umount setuid getuid stime ptrace alarm oldfstat
pause utime stty gtty access nice ftime sync kill rename mkdir rmdir dup pipe times prof brk setgid
getgid signal geteuid getegid acct umount2 lock ioctl fcntl mpx setpgid ulimit oldolduname umask
chroot ustat dup2 getppid getpgrp setsid sigaction sgetmask ssetmask setreuid setregid sigsuspend
sigpending sethostname setrlimit getrlimit getrusage gettimeofday settimeofday getgroups setgroups
select symlink oldlstat readlink uselib swapon reboot readdir mmap munmap truncate ftruncate fchmod
fchown getpriority setpriority profil statfs fstatfs ioperm socketcall syslog setitimer getitimer
stat lstat fstat olduname iopl vhangup idle vm86old wait4 swapoff sysinfo ipc fsync sigreturn
clone setdomainname uname modify_ldt adjtimex mprotect sigprocmask create_module init_module
delete_module get_kernel_syms quotactl getpgid fchdir bdflush sysfs personality afs_syscall setfsuid
setfsgid _llseek getdents _newselect flock msync readv writev getsid fdatasync _sysctl mlock
munlock mlockall munlockall sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_yield sched_get_priority_max sched_get_priority_min sched_rr_get_interval nanosleep mremap
setresuid getresuid vm86 query_module poll nfsservctl setresgid getresgid prctl rt_sigreturn
rt_sigaction rt_sigprocmask rt_sigpending rt_sigtimedwait rt_sigqueueinfo rt_sigsuspend pread64
pwrite64 chown getcwd capget capset sigaltstack sendfile getpmsg putpmsg vfork ugetrlimit mmap2
truncate64 ftruncate64 stat64 lstat64 fstat64 lchown32 getuid32 getgid32 geteuid32 getegid32
setreuid32 setregid32 getgroups32 setgroups32 fchown32 setresuid32 getresuid32 setresgid32
getresgid32 chown32 setuid32 setgid32 setfsuid32 setfsgid32 pivot_root mincore madvise getdents64
fcntl64 sys_222 sys_223 gettid readahead setxattr lsetxattr fsetxattr getxattr lgetxattr fgetxattr
listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill sendfile64 futex
sched_setaffinity sched_getaffinity set_thread_area get_thread_area io_setup io_destroy io_getevents
io_submit io_cancel fadvise64 sys_251 exit_group lookup_dcookie epoll_create epoll_ctl epoll_wait
remap_file_pages set_tid_address timer_create timer_settime timer_gettime timer_getoverrun
timer_delete clock_settime clock_gettime clock_getres clock_nanosleep statfs64 fstatfs64 tgkill
utimes fadvise64_64 vserver mbind get_mempolicy set_mempolicy mq_open mq_unlink mq_timedsend
mq_timedreceive mq_notify mq_getsetattr kexec_load waitid sys_setaltroot add_key request_key keyctl
ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat mkdirat
mknodat fchownat futimesat fstatat64 unlinkat renameat linkat symlinkat readlinkat fchmodat
faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice sync_file_range tee vmsplice
move_pages getcpu epoll_pwait utimensat signalfd timerfd eventfd fallocate timerfd_settime
timerfd_gettime signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tg_sigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_I386.update SYSCALLNR_I386.invert
SYSCALLNR_X86_64 = %w[read write open close stat fstat lstat poll lseek mmap mprotect munmap brk rt_sigaction
rt_sigprocmask rt_sigreturn ioctl pread64 pwrite64 readv writev access pipe select sched_yield
mremap msync mincore madvise shmget shmat shmctl dup dup2 pause nanosleep getitimer alarm
setitimer getpid sendfile socket connect accept sendto recvfrom sendmsg recvmsg shutdown
bind listen getsockname getpeername socketpair setsockopt getsockopt clone fork vfork execve
exit wait4 kill uname semget semop semctl shmdt msgget msgsnd msgrcv msgctl fcntl flock
fsync fdatasync truncate ftruncate getdents getcwd chdir fchdir rename mkdir rmdir creat
link unlink symlink readlink chmod fchmod chown fchown lchown umask gettimeofday getrlimit
getrusage sysinfo times ptrace getuid syslog getgid setuid setgid geteuid getegid setpgid
getppid getpgrp setsid setreuid setregid getgroups setgroups setresuid getresuid setresgid
getresgid getpgid setfsuid setfsgid getsid capget capset rt_sigpending rt_sigtimedwait
rt_sigqueueinfo rt_sigsuspend sigaltstack utime mknod uselib personality ustat statfs fstatfs
sysfs getpriority setpriority sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_get_priority_max sched_get_priority_min sched_rr_get_interval mlock munlock mlockall
munlockall vhangup modify_ldt pivot_root _sysctl prctl arch_prctl adjtimex setrlimit chroot sync
acct settimeofday mount umount2 swapon swapoff reboot sethostname setdomainname iopl ioperm
create_module init_module delete_module get_kernel_syms query_module quotactl nfsservctl getpmsg
putpmsg afs_syscall tuxcall security gettid readahead setxattr lsetxattr fsetxattr getxattr
lgetxattr fgetxattr listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill time
futex sched_setaffinity sched_getaffinity set_thread_area io_setup io_destroy io_getevents io_submit
io_cancel get_thread_area lookup_dcookie epoll_create epoll_ctl_old epoll_wait_old remap_file_pages
getdents64 set_tid_address restart_syscall semtimedop fadvise64 timer_create timer_settime
timer_gettime timer_getoverrun timer_delete clock_settime clock_gettime clock_getres clock_nanosleep
exit_group epoll_wait epoll_ctl tgkill utimes vserver mbind set_mempolicy get_mempolicy mq_open
mq_unlink mq_timedsend mq_timedreceive mq_notify mq_getsetattr kexec_load waitid add_key request_key
keyctl ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat
mkdirat mknodat fchownat futimesat newfstatat unlinkat renameat linkat symlinkat readlinkat
fchmodat faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice tee sync_file_range
vmsplice move_pages utimensat epoll_pwait signalfd timerfd_create eventfd fallocate timerfd_settime
timerfd_gettime accept4 signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tgsigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_X86_64.update SYSCALLNR_X86_64.invert
SIGNAL = Signal.list.dup
SIGNAL.delete SIGNAL.index(0)
SIGNAL['TRAP'] ||= 5 # windows+gdbremote
SIGNAL.update SIGNAL.invert
# include/asm-generic/errno-base.h
ERRNO = %w[ERR0 EPERM ENOENT ESRCH EINTR EIO ENXIO E2BIG ENOEXEC EBADF ECHILD EAGAIN ENOMEM EACCES EFAULT
ENOTBLK EBUSY EEXIST EXDEV ENODEV ENOTDIR EISDIR EINVAL ENFILE EMFILE ENOTTY ETXTBSY EFBIG ENOSPC
ESPIPE EROFS EMLINK EPIPE EDOM ERANGE].inject({}) { |h, e| h.update e => h.length }
ERRNO.update ERRNO.invert
SIGINFO = {
# user-generated signal
'DETHREAD' => -7, # execve killing threads
'TKILL' => -6, 'SIGIO' => -5, 'ASYNCIO' => -4, 'MESGQ' => -3,
'TIMER' => -2, 'QUEUE' => -1, 'USER' => 0, 'KERNEL' => 0x80,
# ILL
'ILLOPC' => 1, 'ILLOPN' => 2, 'ILLADR' => 3, 'ILLTRP' => 4,
'PRVOPC' => 5, 'PRVREG' => 6, 'COPROC' => 7, 'BADSTK' => 8,
# FPE
'INTDIV' => 1, 'INTOVF' => 2, 'FLTDIV' => 3, 'FLTOVF' => 4,
'FLTUND' => 5, 'FLTRES' => 6, 'FLTINV' => 7, 'FLTSUB' => 8,
# SEGV
'MAPERR' => 1, 'ACCERR' => 2,
# BUS
'ADRALN' => 1, 'ADRERR' => 2, 'OBJERR' => 3, 'MCEERR_AR' => 4,
'MCEERR_AO' => 5,
# TRAP
'BRKPT' => 1, 'TRACE' => 2, 'BRANCH' => 3, 'HWBKPT' => 4,
# CHLD
'EXITED' => 1, 'KILLED' => 2, 'DUMPED' => 3, 'TRAPPED' => 4,
'STOPPED' => 5, 'CONTINUED' => 6,
# POLL
'POLL_IN' => 1, 'POLL_OUT' => 2, 'POLL_MSG' => 3, 'POLL_ERR' => 4,
'POLL_PRI' => 5, 'POLL_HUP' => 6
}
SIGINFO_C = <<EOS
typedef __int32 __pid_t;
typedef unsigned __int32 __uid_t;
typedef uintptr_t sigval_t;
typedef long __clock_t;
struct siginfo {
int si_signo;
int si_errno;
int si_code;
// int pad64;
union {
int _pad[128/4-3]; /* total >= 128b */
struct { /* kill(). */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
} _kill;
struct { /* POSIX.1b timers. */
int si_tid; /* Timer ID. */
int si_overrun; /* Overrun count. */
sigval_t si_sigval; /* Signal value. */
} _timer;
struct { /* POSIX.1b signals. */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
sigval_t si_sigval; /* Signal value. */
} _rt;
struct { /* SIGCHLD. */
__pid_t si_pid; /* Which child. */
__uid_t si_uid; /* Real user ID of sending process. */
int si_status; /* Exit value or signal. */
__clock_t si_utime;
__clock_t si_stime;
} _sigchld;
struct { /* SIGILL, SIGFPE, SIGSEGV, SIGBUS. */
uintptr_t si_addr; /* Faulting insn/memory ref. */
} _sigfault;
struct { /* SIGPOLL. */
long int si_band; /* Band event for SIGPOLL. */
int si_fd;
} _sigpoll;
struct { /* SIGSYS under SECCOMP */
uintptr_t si_calladdr; /* calling insn address */
int si_syscall; /* triggering syscall nr */
int si_arch; /* AUDIT_ARCH_* for syscall */
} _sigsys;
};
};
EOS
def sys_ptrace(req, pid, addr, data)
ret = @sys_ptrace.ptrace(req, pid, addr, data)
if ret < 0 and ret > -256
raise SystemCallError.new("ptrace #{COMMAND.index(req) || req}", -ret)
end
ret
end
def traceme
sys_ptrace(COMMAND[:TRACEME], 0, 0, 0)
end
def peektext(addr)
sys_ptrace(COMMAND[:PEEKTEXT], @pid, addr, @buf)
@buf
end
def peekdata(addr)
sys_ptrace(COMMAND[:PEEKDATA], @pid, addr, @buf)
@buf
end
def peekusr(addr)
sys_ptrace(COMMAND[:PEEKUSR], @pid, @host_intsize*addr, @buf)
@peekmask ||= (1 << ([@host_intsize, @intsize].min*8)) - 1
bufval & @peekmask
end
def poketext(addr, data)
sys_ptrace(COMMAND[:POKETEXT], @pid, addr, data.unpack(@packint).first)
end
def pokedata(addr, data)
sys_ptrace(COMMAND[:POKEDATA], @pid, addr, data.unpack(@packint).first)
end
def pokeusr(addr, data)
sys_ptrace(COMMAND[:POKEUSR], @pid, @host_intsize*addr, data)
end
def getregs(buf=nil)
buf = buf.str if buf.respond_to?(:str) # AllocCStruct
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND[:GETREGS], @pid, 0, buf)
buf
end
def setregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETREGS], @pid, 0, buf)
end
def getfpregs(buf=nil)
buf = buf.str if buf.respond_to?(:str)
buf ||= [0].pack('C')*1024
sys_ptrace(COMMAND[:GETFPREGS], @pid, 0, buf)
buf
end
def setfpregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETFPREGS], @pid, 0, buf)
end
def getfpxregs(buf=nil)
buf = buf.str if buf.respond_to?(:str)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND[:GETFPXREGS], @pid, 0, buf)
buf
end
def setfpxregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETFPXREGS], @pid, 0, buf)
end
def get_thread_area(addr)
sys_ptrace(COMMAND[:GET_THREAD_AREA], @pid, addr, @buf)
bufval
end
def set_thread_area(addr, data)
sys_ptrace(COMMAND[:SET_THREAD_AREA], @pid, addr, data)
end
def prctl(addr, data)
sys_ptrace(COMMAND[:ARCH_PRCTL], @pid, addr, data)
end
def cont(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:CONT], @pid, 0, sig)
end
def kill
sys_ptrace(COMMAND[:KILL], @pid, 0, 0)
end
def singlestep(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SINGLESTEP], @pid, 0, sig)
end
def singleblock(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SINGLEBLOCK], @pid, 0, sig)
end
def syscall(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SYSCALL], @pid, 0, sig)
end
def attach
sys_ptrace(COMMAND[:ATTACH], @pid, 0, 0)
end
def detach
sys_ptrace(COMMAND[:DETACH], @pid, 0, 0)
end
def setoptions(*opt)
opt = opt.inject(0) { |b, o| b |= o.kind_of?(Integer) ? o : OPTIONS[o] }
sys_ptrace(COMMAND[:SETOPTIONS], @pid, 0, opt)
end
# retrieve pid of cld for EVENT_CLONE/FORK, exitcode for EVENT_EXIT
def geteventmsg
sys_ptrace(COMMAND[:GETEVENTMSG], @pid, 0, @buf)
bufval
end
def cp
@cp ||= @tgcpu.new_cparser
end
def siginfo
@siginfo ||= (
cp.parse SIGINFO_C if not cp.toplevel.struct['siginfo']
cp.alloc_c_struct('siginfo')
)
end
def getsiginfo
sys_ptrace(COMMAND[:GETSIGINFO], @pid, 0, siginfo.str)
siginfo
end
def setsiginfo(si=siginfo)
si = si.str if si.respond_to?(:str)
sys_ptrace(COMMAND[:SETSIGINFO], @pid, 0, si)
end
end
class LinOS < OS
class Process < OS::Process
# returns/create a LinuxRemoteString
def memory
@memory ||= LinuxRemoteString.new(pid)
end
attr_writer :memory
def debugger
@debugger ||= LinDebugger.new(@pid)
end
attr_writer :debugger
# returns the list of loaded Modules, incl start address & path
# read from /proc/pid/maps
def modules
list = []
seen = {}
File.readlines("/proc/#{pid}/maps").each { |l|
# 08048000-08064000 r-xp 000000 08:01 4234 /usr/bin/true
l = l.split
next if l.length < 6 or seen[l[-1]]
seen[l[-1]] = true
m = Module.new
m.addr = l[0].to_i(16)
m.path = l[-1]
list << m
}
list
rescue
[]
end
# return a list of [addr_start, length, perms, file]
def mappings
list = []
File.readlines("/proc/#{pid}/maps").each { |l|
l = l.split
addrstart, addrend = l[0].split('-').map { |i| i.to_i 16 }
list << [addrstart, addrend-addrstart, l[1], l[5]]
}
list
rescue
[]
end
# returns a list of threads sharing this process address space
# read from /proc/pid/task/
def threads
Dir.entries("/proc/#{pid}/task/").grep(/^\d+$/).map { |tid| tid.to_i }
rescue
# TODO handle pthread stuff (eg 2.4 kernels)
[pid]
end
# return the invocation commandline, from /proc/pid/cmdline
# this is manipulable by the target itself
def cmdline
@cmdline ||= File.read("/proc/#{pid}/cmdline") rescue ''
end
attr_writer :cmdline
def path
cmdline.split(0.chr)[0]
end
# returns the address size of the process, based on its #cpu
def addrsz
cpu.size
end
# returns the CPU for the process, by reading /proc/pid/exe
def cpu
e = ELF.load_file("/proc/#{pid}/exe")
# dont decode shdr/phdr, this is 2x faster for repeated debugger spawn
e.decode_header(0, false, false)
e.cpu
end
def terminate
kill
end
def kill(signr=9)
::Process.kill(signr, @pid)
end
end
class << self
# returns an array of Processes, with pid/module listing
def list_processes
Dir.entries('/proc').grep(/^\d+$/).map { |pid| Process.new(pid.to_i) }
end
# return a Process for the specified pid if it exists in /proc
def open_process(pid)
Process.new(pid) if check_process(pid)
end
def check_process(pid)
File.directory?("/proc/#{pid}")
end
# create a LinDebugger on the target pid/binary
def create_debugger(path)
LinDebugger.new(path)
end
end # class << self
end
class LinuxRemoteString < VirtualString
attr_accessor :pid, :readfd
attr_accessor :dbg
# returns a virtual string proxying the specified process memory range
# reads are cached (4096 aligned bytes read at once), from /proc/pid/mem
# writes are done directly by ptrace
def initialize(pid, addr_start=0, length=nil, dbg=nil)
@pid = pid
length ||= 1 << (dbg ? dbg.cpu.size : (LinOS.open_process(@pid).addrsz rescue 32))
@readfd = File.open("/proc/#@pid/mem", 'rb') rescue nil
@dbg = dbg if dbg
super(addr_start, length)
end
def dup(addr = @addr_start, len = @length)
self.class.new(@pid, addr, len, dbg)
end
def do_ptrace(needproc)
if dbg
dbg.switch_context(@pid) {
st = dbg.state
next if st != :stopped
if needproc
# we will try to access /proc/pid/mem
# if the main thread is still running, fallback to ptrace.readmem instead
pst = (dbg.tid == @pid ? st : dbg.tid_stuff[@pid][:state])
if pst != :stopped
savedreadfd = @readfd
@readfd = nil
begin
yield dbg.ptrace
ensure
@readfd = savedreadfd
end
else
yield dbg.ptrace
end
else
yield dbg.ptrace
end
}
else
PTrace.open(@pid) { |ptrace| yield ptrace }
end
end
def rewrite_at(addr, data)
# target must be stopped
wr = do_ptrace(false) { |ptrace| ptrace.writemem(addr, data) }
raise "couldn't ptrace_write at #{'%x' % addr}" if not wr
end
def get_page(addr, len=@pagelength)
do_ptrace(true) { |ptrace|
begin
if readfd and addr < (1<<63)
# 1<<63: ruby seek = 'too big to fit longlong', linux read = EINVAL
@readfd.pos = addr
@readfd.read len
elsif addr < (1<<(ptrace.host_intsize*8))
# can reach 1<<64 with peek_data only if ptrace accepts 64bit args
ptrace.readmem(addr, len)
end
rescue Errno::EIO, Errno::ESRCH
nil
end
}
end
end
class PTraceContext_Ia32 < PTrace
C_STRUCT = <<EOS
struct user_regs_struct_ia32 {
unsigned __int32 ebx;
unsigned __int32 ecx;
unsigned __int32 edx;
unsigned __int32 esi;
unsigned __int32 edi;
unsigned __int32 ebp;
unsigned __int32 eax;
unsigned __int32 ds;
unsigned __int32 es;
unsigned __int32 fs;
unsigned __int32 gs;
unsigned __int32 orig_eax;
unsigned __int32 eip;
unsigned __int32 cs;
unsigned __int32 eflags;
unsigned __int32 esp;
unsigned __int32 ss;
};
struct user_fxsr_struct_ia32 {
unsigned __int16 cwd;
unsigned __int16 swd;
unsigned __int16 twd;
unsigned __int16 fop;
unsigned __int32 fip;
unsigned __int32 fcs;
unsigned __int32 foo;
unsigned __int32 fos;
unsigned __int32 mxcsr;
unsigned __int32 reserved;
unsigned __int32 st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */
unsigned __int32 xmm_space[32]; /* 8*16 bytes for each XMM-reg = 128 bytes */
unsigned __int32 padding[56];
};
EOS
def initialize(ptrace, pid=ptrace.pid)
super(ptrace, :dup)
@pid = pid
@cp = ptrace.cp
init
end
def init
@gpr = @@gpr_ia32 ||= [:ebx, :ecx, :edx, :esi, :edi, :ebp, :eax,
:ds, :es, :fs, :gs, :orig_eax, :eip, :cs, :eflags,
:esp, :ss].inject({}) { |h, r| h.update r => true }
@gpr_peek = @@gpr_peek_ia32 ||= (0..7).inject({}) { |h, i|
h.update "dr#{i}".to_sym => REGS_I386["DR#{i}"] }
@gpr_sub = @@gpr_sub_ia32 ||= gpr_sub_init
@xmm = @@xmm_ia32 ||= [:cwd, :swd, :twd, :fop, :fip, :fcs, :foo,
:fos, :mxcsr].inject({}) { |h, r| h.update r => true }
@cp.parse C_STRUCT if not @cp.toplevel.struct['user_regs_struct_ia32']
@gpr_st = @xmm_st = nil
end
# :bh => [:ebx, 0xff, 8]
# XXX similar to Reg.symbolic... DRY
def gpr_sub_init
ret = {}
%w[a b c d].each { |r|
b = "e#{r}x".to_sym
ret["#{r}x".to_sym] = [b, 0xffff]
ret["#{r}l".to_sym] = [b, 0xff]
ret["#{r}h".to_sym] = [b, 0xff, 8]
}
%w[sp bp si di].each { |r|
b = "e#{r}".to_sym
ret[r.to_sym] = [b, 0xffff]
}
ret[:orig_rax] = [:orig_eax, 0xffff_ffff]
ret
end
def do_getregs
st = cp.alloc_c_struct('user_regs_struct_ia32')
getregs(st)
st
end
def do_setregs(st=@gpr_st)
setregs(st)
end
def do_getxmm
st = cp.alloc_c_struct('user_fxsr_struct_ia32')
getfpxregs(st)
st
end
def do_setxmm(st=@xmm_st)
setfpxregs(st)
end
def get_reg(r)
r = r.downcase if r == 'ORIG_EAX' or r == 'ORIG_RAX'
rs = r.to_sym
if @gpr[rs]
@gpr_st ||= do_getregs
@gpr_st[rs]
elsif o = @gpr_peek[rs]
peekusr(o)
elsif o = @gpr_sub[rs]
v = get_reg(o[0])
v >>= o[2] if o[2]
v &= o[1]
elsif @xmm[rs]
@xmm_st ||= do_getxmm
@xmm_st[rs]
else
case r.to_s
when /^st(\d?)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
[fu[4*i], fu[4*i+1], fu[4*i+2]].pack('L*').unpack('D').first # XXX
when /^mmx?(\d)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i] | (fu[4*i + 1] << 32)
when /^xmm(\d+)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.xmm_space
fu[4*i] | (fu[4*i + 1] << 32) | (fu[4*i + 2] << 64) | (fu[4*i + 3] << 96)
# TODO when /^ymm(\d+)$/i
else raise "unknown register name #{r}"
end
end
end
def set_reg(r, v)
r = r.downcase if r == 'ORIG_EAX' or r == 'ORIG_RAX'
rs = r.to_sym
if @gpr[rs]
@gpr_st ||= do_getregs
@gpr_st[rs] = v
do_setregs
elsif o = @gpr_peek[rs]
pokeusr(o, v)
elsif o = @gpr_sub[rs]
vo = get_reg(o[0])
msk = o[1]
v &= o[1]
if o[2]
msk <<= o[2]
v <<= o[2]
end
v |= vo & ~msk
set_reg(o[0], v)
elsif @xmm[rs]
@xmm_st ||= do_getxmm
@xmm_st[rs] = v
do_setxmm
else
case r.to_s
when /^st(\d?)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i], fu[4*i+1], fu[4*i+2] = [v, -1].pack('DL').unpack('L*') # XXX
do_setxmm
when /^mmx?(\d)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i] = v & 0xffff_ffff
fu[4*i + 1] = (v >> 32) & 0xffff_ffff
do_setxmm
when /^xmm(\d+)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.xmm_space
fu[4*i] = v & 0xffff_ffff
fu[4*i + 1] = (v >> 32) & 0xffff_ffff
fu[4*i + 2] = (v >> 64) & 0xffff_ffff
fu[4*i + 3] = (v >> 96) & 0xffff_ffff
do_setxmm
# TODO when /^ymm(\d+)$/i
else raise "unknown register name #{r}"
end
end
end
end
class PTraceContext_X64 < PTraceContext_Ia32
C_STRUCT = <<EOS
struct user_regs_struct_x64 {
unsigned __int64 r15;
unsigned __int64 r14;
unsigned __int64 r13;
unsigned __int64 r12;
unsigned __int64 rbp;
unsigned __int64 rbx;
unsigned __int64 r11;
unsigned __int64 r10;
unsigned __int64 r9;
unsigned __int64 r8;
unsigned __int64 rax;
unsigned __int64 rcx;
unsigned __int64 rdx;
unsigned __int64 rsi;
unsigned __int64 rdi;
unsigned __int64 orig_rax;
unsigned __int64 rip;
unsigned __int64 cs;
unsigned __int64 rflags;
unsigned __int64 rsp;
unsigned __int64 ss;
unsigned __int64 fs_base;
unsigned __int64 gs_base;
unsigned __int64 ds;
unsigned __int64 es;
unsigned __int64 fs;
unsigned __int64 gs;
};
struct user_i387_struct_x64 {
unsigned __int16 cwd;
unsigned __int16 swd;
unsigned __int16 twd; /* Note this is not the same as the 32bit/x87/FSAVE twd */
unsigned __int16 fop;
unsigned __int64 rip;
unsigned __int64 rdp;
unsigned __int32 mxcsr;
unsigned __int32 mxcsr_mask;
unsigned __int32 st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */
unsigned __int32 xmm_space[64]; /* 16*16 bytes for each XMM-reg = 256 bytes */
unsigned __int32 padding[24];
// YMM ?
};
EOS
def init
@gpr = @@gpr_x64 ||= [:r15, :r14, :r13, :r12, :rbp, :rbx, :r11,
:r10, :r9, :r8, :rax, :rcx, :rdx, :rsi, :rdi, :orig_rax,
:rip, :cs, :rflags, :rsp, :ss, :fs_base, :gs_base, :ds,
:es, :fs, :gs].inject({}) { |h, r| h.update r => true }
@gpr_peek = @@gpr_peek_x64 ||= (0..7).inject({}) { |h, i|
h.update "dr#{i}".to_sym => REGS_X86_64["DR#{i}"] }
@gpr_sub = @@gpr_sub_x64 ||= gpr_sub_init
@xmm = @@xmm_x64 ||= [:cwd, :swd, :twd, :fop, :rip, :rdp, :mxcsr,
:mxcsr_mask].inject({}) { |h, r| h.update r => true }
@cp.parse C_STRUCT if not @cp.toplevel.struct['user_regs_struct_x64']
@gpr_st = @xmm_st = nil
end
def gpr_sub_init
ret = {}
%w[a b c d].each { |r|
b = "r#{r}x".to_sym
ret["e#{r}x".to_sym] = [b, 0xffff_ffff]
ret[ "#{r}x".to_sym] = [b, 0xffff]
ret[ "#{r}l".to_sym] = [b, 0xff]
ret[ "#{r}h".to_sym] = [b, 0xff, 8]
}
%w[sp bp si di].each { |r|
b = "r#{r}".to_sym
ret["e#{r}".to_sym] = [b, 0xffff_ffff]
ret[ "#{r}".to_sym] = [b, 0xffff]
ret["#{r}l".to_sym] = [b, 0xff]
}
(8..15).each { |i|
b = "r#{i}".to_sym
ret["r#{i}d"] = [b, 0xffff_ffff]
ret["r#{i}w"] = [b, 0xffff]
ret["r#{i}b"] = [b, 0xff]
}
ret[:eip] = [:rip, 0xffff_ffff]
ret[:eflags] = [:rflags, 0xffff_ffff]
ret[:orig_eax] = [:orig_rax, 0xffff_ffff]
ret
end
def do_getregs
st = cp.alloc_c_struct('user_regs_struct_x64')
getregs(st)
st
end
def do_setregs(st=@gpr_st)
setregs(st)
end
def do_getxmm
st = cp.alloc_c_struct('user_i387_struct_x64')
getfpregs(st)
st
end
def do_setxmm(st=@xmm_st)
setfpregs(st)
end
end
module ::Process
WALL = 0x40000000 if not defined? WALL
WCLONE = 0x80000000 if not defined? WCLONE
end
# this class implements a high-level API over the ptrace debugging primitives
class LinDebugger < Debugger
# ptrace is per-process or per-thread ?
attr_accessor :ptrace, :continuesignal, :has_pax_mprotect, :target_syscall
attr_accessor :callback_syscall, :callback_branch, :callback_exec
def initialize(pidpath=nil, &b)
super()
@pid_stuff_list << :has_pax_mprotect << :ptrace << :breaking << :os_process
@tid_stuff_list << :continuesignal << :saved_csig << :ctx << :target_syscall
# by default, break on all signals except SIGWINCH (terminal resize notification)
@pass_all_exceptions = lambda { |e| e[:signal] == 'WINCH' }
@callback_syscall = lambda { |i| log "syscall #{i[:syscall]}" }
@callback_exec = lambda { |i| log "execve #{os_process.path}" }
return if not pidpath
t = begin; Integer(pidpath)
rescue ArgumentError, TypeError
end
t ? attach(t) : create_process(pidpath, &b)
end
def shortname; 'lindbg'; end
# attach to a running process and all its threads
def attach(pid, do_attach=:attach)
pt = PTrace.new(pid, do_attach)
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
list_threads.each { |tid| attach_thread(tid) if tid != @pid }
set_tid @pid
end
# create a process and debug it
# if given a block, the block is run in the context of the ruby subprocess
# after the fork() and before exec()ing the target binary
# you can use it to eg tweak file descriptors:
# tg_stdin_r, tg_stdin_w = IO.pipe
# create_process('/bin/cat') { tg_stdin_w.close ; $stdin.reopen(tg_stdin_r) }
# tg_stdin_w.write 'lol'
def create_process(path, &b)
pt = PTrace.new(path, :create, &b)
# TODO save path, allow restart etc
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
end
def initialize_cpu
@cpu = os_process.cpu
# need to init @ptrace here, before init_dasm calls gui.swapin XXX this stinks
@ptrace = PTrace.new(@pid, false)
if @cpu.size == 64 and @ptrace.reg_off['EAX']
hack_x64_32
end
set_tid @pid
set_thread_options
end
def initialize_memory
@memory = os_process.memory = LinuxRemoteString.new(@pid, 0, nil, self)
end
def os_process
@os_process ||= LinOS.open_process(@pid)
end
def list_threads
os_process.threads
end
def list_processes
LinOS.list_processes
end
def check_pid(pid)
LinOS.check_process(pid)
end
def mappings
os_process.mappings
end
def modules
os_process.modules
end
# We're a 32bit process debugging a 64bit target
# the ptrace kernel interface we use only allow us a 32bit-like target access
# With this we advertize the cpu as having eax..edi registers (the only one we
# can access), while still decoding x64 instructions (whose addr < 4G)
def hack_x64_32
log "WARNING: debugging a 64bit process from a 32bit debugger is a very bad idea !"
ia32 = Ia32.new
@cpu.instance_variable_set('@dbg_register_pc', ia32.dbg_register_pc)
@cpu.instance_variable_set('@dbg_register_sp', ia32.dbg_register_sp)
@cpu.instance_variable_set('@dbg_register_flags', ia32.dbg_register_flags)
@cpu.instance_variable_set('@dbg_register_list', ia32.dbg_register_list)
@cpu.instance_variable_set('@dbg_register_size', ia32.dbg_register_size)
end
# attach a thread of the current process
def attach_thread(tid)
set_tid tid
@ptrace.pid = tid
@ptrace.attach
@state = :stopped
::Process.waitpid(tid, ::Process::WALL)
log "attached thread #{tid}"
set_thread_options
rescue Errno::ESRCH
# raced, thread quitted already
del_tid
end
# set the debugee ptrace options (notify clone/exec/exit, and fork/vfork depending on @trace_children)
def set_thread_options
opts = %w[TRACESYSGOOD TRACECLONE TRACEEXEC TRACEEXIT]
opts += %w[TRACEFORK TRACEVFORK TRACEVFORKDONE] if trace_children
@ptrace.pid = @tid
@ptrace.setoptions(*opts)
end
# update the current pid relative to tracing children (@trace_children only effects newly traced pid/tid)
def do_trace_children
each_tid { set_thread_options }
end
def invalidate
@ctx = nil
super()
end
# current thread register values accessor
def ctx
@ctx ||= case @ptrace.host_csn
when 'ia32'; PTraceContext_Ia32.new(@ptrace, @tid)
when 'x64'; PTraceContext_X64.new(@ptrace, @tid)
else raise '8==D'
end
end
def get_reg_value(r)
return 0 if @state != :stopped
ctx.get_reg(r)
rescue Errno::ESRCH
0
end
def set_reg_value(r, v)
ctx.set_reg(r, v)
end
def update_waitpid(status)
invalidate
@continuesignal = 0
@state = :stopped # allow get_reg (for eg pt_syscall)
info = { :status => status }
if status.exited?
info.update :exitcode => status.exitstatus
if @tid == @pid # XXX
evt_endprocess info
else
evt_endthread info
end
elsif status.signaled?
info.update :signal => (PTrace::SIGNAL[status.termsig] || status.termsig)
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
elsif status.stopped?
sig = status.stopsig & 0x7f
signame = PTrace::SIGNAL[sig]
if signame == 'TRAP'
if status.stopsig & 0x80 > 0
# XXX int80 in x64 => syscallnr32 ?
evt_syscall info.update(:syscall => @ptrace.syscallnr[get_reg_value(@ptrace.syscallreg)])
elsif (status >> 16) > 0
case PTrace::WAIT_EXTENDEDRESULT[status >> 16]
when 'EVENT_FORK', 'EVENT_VFORK'
# parent notification of a fork
# child receives STOP (may have already happened)
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_CLONE'
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_EXIT'
@ptrace.pid = @tid
info.update :exitcode => @ptrace.geteventmsg
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
when 'EVENT_VFORKDONE'
resume_badbreak
when 'EVENT_EXEC'
evt_exec info
end
else
@ptrace.pid = @tid
si = @ptrace.getsiginfo
case si.si_code
when PTrace::SIGINFO['BRKPT'],
PTrace::SIGINFO['KERNEL'] # \xCC prefer KERNEL to BRKPT
evt_bpx
when PTrace::SIGINFO['TRACE']
evt_singlestep # singlestep/singleblock
when PTrace::SIGINFO['BRANCH']
evt_branch # XXX BTS?
when PTrace::SIGINFO['HWBKPT']
evt_hwbp
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
evt_exception info
end
end
elsif signame == 'STOP' and @info == 'new'
# new thread break on creation (eg after fork + TRACEFORK)
if @pid == @tid
attach(@pid, false)
evt_newprocess info
else
evt_newthread info
end
elsif signame == 'STOP' and @breaking
@state = :stopped
@info = 'break'
@breaking.call if @breaking.kind_of? Proc
@breaking = nil
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
if signame == 'SEGV'
# need more data on access violation (for bpm)
info.update :type => 'access violation'
@ptrace.pid = @tid
si = @ptrace.getsiginfo
access = case si.si_code
when PTrace::SIGINFO['MAPERR']; :r # XXX write access to unmapped => ?
when PTrace::SIGINFO['ACCERR']; :w
end
info.update :fault_addr => si.si_addr, :fault_access => access
end
evt_exception info
end
else
log "unknown wait status #{status.inspect}"
evt_exception info.update(:type => "unknown wait #{status.inspect}")
end
end
def set_tid_findpid(tid)
return if tid == @tid
if tid != @pid and !@tid_stuff[tid]
if kv = @pid_stuff.find { |k, v| v[:tid_stuff] and v[:tid_stuff][tid] }
set_pid kv[0]
elsif pr = list_processes.find { |p| p.threads.include?(tid) }
set_pid pr.pid
end
end
set_tid tid
end
def do_check_target
return unless t = ::Process.waitpid(-1, ::Process::WNOHANG | ::Process::WALL)
# XXX all threads may have stopped, wait them now ?
set_tid_findpid t
update_waitpid $?
true
rescue ::Errno::ECHILD
end
def do_wait_target
t = ::Process.waitpid(-1, ::Process::WALL)
set_tid_findpid t
update_waitpid $?
rescue ::Errno::ECHILD
end
def do_continue
@state = :running
@ptrace.pid = tid
@ptrace.cont(@continuesignal)
end
def do_singlestep(*a)
@state = :running
@ptrace.pid = tid
@ptrace.singlestep(@continuesignal)
end
# use the PT_SYSCALL to break on next syscall
# regexp allowed to wait a specific syscall
def syscall(arg=nil)
arg = nil if arg and arg.strip == ''
if b = check_breakpoint_cause and b.hash_shared.find { |bb| bb.state == :active }
singlestep_bp(b) {
next if not check_pre_run(:syscall, arg)
@target_syscall = arg
@state = :running
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
}
else
return if not check_pre_run(:syscall, arg)
@target_syscall = arg
@state = :running
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
end
end
def syscall_wait(*a, &b)
syscall(*a, &b)
wait_target
end
# use the PT_SINGLEBLOCK to execute until the next branch
def singleblock
# record as singlestep to avoid evt_singlestep -> evt_exception
# step or block doesn't matter much here anyway
if b = check_breakpoint_cause and b.hash_shared.find { |bb| bb.state == :active }
singlestep_bp(b) {
next if not check_pre_run(:singlestep)
@state = :running
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
}
else
return if not check_pre_run(:singlestep)
@state = :running
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
end
end
def singleblock_wait(*a, &b)
singleblock(*a, &b)
wait_target
end
# woke up from a PT_SYSCALL
def evt_syscall(info={})
@state = :stopped
@info = "syscall #{info[:syscall]}"
callback_syscall[info] if callback_syscall
if @target_syscall and info[:syscall] !~ /^#@target_syscall$/i
resume_badbreak
else
@target_syscall = nil
end
end
# SIGTRAP + SIGINFO_TRAP_BRANCH = ?
def evt_branch(info={})
@state = :stopped
@info = "branch"
callback_branch[info] if callback_branch
end
# called during sys_execve in the new process
def evt_exec(info={})
@state = :stopped
@info = "#{info[:exec]} execve"
initialize_newpid
# XXX will receive a SIGTRAP, could hide it..
callback_exec[info] if callback_exec
# calling continue() here will loop back to TRAP+INFO_EXEC
end
def break(&b)
@breaking = b || true
kill 'STOP'
end
def kill(sig=nil)
return if not tid
# XXX tkill ?
::Process.kill(sig2signr(sig), tid)
rescue Errno::ESRCH
end
def pass_current_exception(bool=true)
if bool
@continuesignal = @saved_csig
else
@continuesignal = 0
end
end
def sig2signr(sig)
case sig
when nil, ''; 9
when Integer; sig
when String
sig = sig.upcase.sub(/^SIG_?/, '')
PTrace::SIGNAL[sig] || Integer(sig)
else raise "unhandled signal #{sig.inspect}"
end
end
# stop debugging the current process
def detach
if @state == :running
# must be stopped so we can rm bps
self.break { detach }
mypid = @pid
wait_target
# after syscall(), wait will return once for interrupted syscall,
# and we need to wait more for the break callback to kick in
if @pid == mypid and @state == :stopped and @info =~ /syscall/
do_continue
check_target
end
return
end
del_all_breakpoints
each_tid {
@ptrace.pid = @tid
@ptrace.detach rescue nil
@delete_thread = true
}
del_pid
end
def bpx(addr, *a, &b)
return hwbp(addr, :x, 1, *a, &b) if @has_pax_mprotect
super(addr, *a, &b)
end
# handles exceptions from PaX-style mprotect restrictions on bpx,
# transmute them to hwbp on the fly
def do_enable_bp(b)
super(b)
rescue ::Errno::EIO
if b.type == :bpx
@memory[b.address, 1] # check if we can read
# didn't raise: it's a PaX-style config
@has_pax_mprotect = true
b.del
hwbp(b.address, :x, 1, b.oneshot, b.condition, &b.action)
log 'PaX: bpx->hwbp'
else raise
end
end
def ui_command_setup(ui)
ui.new_command('syscall', 'waits for the target to do a syscall using PT_SYSCALL') { |arg| ui.wrap_run { syscall arg } }
ui.keyboard_callback[:f6] = lambda { ui.wrap_run { syscall } }
ui.new_command('signal_cont', 'set/get the continue signal (0 == unset)') { |arg|
case arg.to_s.strip
when ''; log "#{@continuesignal} (#{PTrace::SIGNAL[@continuesignal]})"
else @continuesignal = sig2signr(arg)
end
}
end
end
end
lindbg: fix wait_target after thread_attach
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/os/main'
require 'metasm/debug'
module Metasm
class PTrace
attr_accessor :buf, :pid
def self.open(target)
ptrace = new(target)
return ptrace if not block_given?
begin
yield ptrace
ensure
ptrace.detach
end
end
# calls PTRACE_TRACEME on the current (ruby) process
def self.traceme
new(::Process.pid, false).traceme
end
# creates a ptraced process (target = path)
# or opens a running process (target = pid)
# values for do_attach:
# :create => always fork+traceme+exec+wait
# :attach => always attach
# false/nil => same as attach, without actually calling PT_ATTACH (useful when the ruby process is already tracing pid)
# default/anything else: try to attach if pid is numeric, else create
def initialize(target, do_attach=true, &b)
case do_attach
when :create
init_create(target, &b)
when :attach
init_attach(target)
when :dup
raise ArgumentError unless target.kind_of?(PTrace)
@pid = target.pid
tweak_for_pid(@pid, target.tgcpu) # avoid re-parsing /proc/self/exe
when nil, false
@pid = Integer(target)
tweak_for_pid(@pid)
else
t = begin; Integer(target)
rescue ArgumentError, TypeError
end
t ? init_attach(t) : init_create(target, &b)
end
end
def init_attach(target)
@pid = Integer(target)
tweak_for_pid(@pid)
attach
wait
puts "Ptrace: attached to #@pid" if $DEBUG
end
def init_create(target, &b)
if not @pid = ::Process.fork
tweak_for_pid(::Process.pid)
traceme
b.call if b
::Process.exec(*target)
exit!(0)
end
wait
raise "could not exec #{target}" if $?.exited?
tweak_for_pid(@pid)
puts "Ptrace: attached to new #@pid" if $DEBUG
end
def wait
::Process.wait(@pid, ::Process::WALL)
end
attr_accessor :reg_off, :intsize, :syscallnr, :syscallreg
attr_accessor :packint, :packuint, :host_intsize, :host_syscallnr
attr_accessor :tgcpu
@@sys_ptrace = {}
# setup variables according to the target (ptrace interface, syscall nrs, ...)
def tweak_for_pid(pid=@pid, tgcpu=nil)
# use these for our syscalls PTRACE
@@host_csn ||= LinOS.open_process(::Process.pid).cpu.shortname
case @@host_csn
when 'ia32'
@packint = 'l'
@packuint = 'L'
@host_intsize = 4
@host_syscallnr = SYSCALLNR_I386
@reg_off = REGS_I386
when 'x64'
@packint = 'q'
@packuint = 'Q'
@host_intsize = 8
@host_syscallnr = SYSCALLNR_X86_64
@reg_off = REGS_X86_64
else raise 'unsupported architecture'
end
@tgcpu = tgcpu || LinOS.open_process(pid).cpu
# use these to interpret the child state
case @tgcpu.shortname
when 'ia32'
@syscallreg = 'ORIG_EAX'
@syscallnr = SYSCALLNR_I386
@intsize = 4
when 'x64'
@syscallreg = 'ORIG_RAX'
@syscallnr = SYSCALLNR_X86_64
@intsize = 8
else raise 'unsupported target architecture'
end
# buffer used in ptrace syscalls
@buf = [0].pack(@packint)
@sys_ptrace = @@sys_ptrace[@host_syscallnr['ptrace']] ||= setup_sys_ptrace(@host_syscallnr['ptrace'])
end
def setup_sys_ptrace(sysnr)
moo = Class.new(DynLdr)
case @@host_csn
when 'ia32'
# XXX compat lin2.4 ?
asm = <<EOS
#define off 3*4
push ebx
push esi
mov eax, #{sysnr}
mov ebx, [esp+off]
mov ecx, [esp+off+4]
mov edx, [esp+off+8]
mov esi, [esp+off+12]
call gs:[10h]
pop esi
pop ebx
ret
EOS
when 'x64'
asm = <<EOS
#define off 3*8
mov rax, #{sysnr}
//mov rdi, rdi
//mov rsi, rdi
//mov rdx, rdx
mov r10, rcx
syscall
ret
EOS
else raise 'unsupported target architecture'
end
moo.new_func_asm 'long ptrace(unsigned long, unsigned long, unsigned long, unsigned long)', asm
moo
end
def host_csn; @@host_csn end
def dup
self.class.new(self, :dup)
end
def str_ptr(str)
[str].pack('P').unpack(@packint).first
end
# interpret the value turned as an unsigned long
def bufval
@buf.unpack(@packint).first
end
# reads a memory range
def readmem(off, len)
decal = off % @host_intsize
buf = ''
if decal > 0
off -= decal
peekdata(off)
off += @host_intsize
buf << @buf[decal...@host_intsize]
end
offend = off + len
while off < offend
peekdata(off)
buf << @buf[0, @host_intsize]
off += @host_intsize
end
buf[0, len]
end
def writemem(off, str)
str.force_encoding('binary') if str.respond_to?(:force_encoding)
decal = off % @host_intsize
if decal > 0
off -= decal
peekdata(off)
str = @buf[0...decal] + str
end
decal = str.length % @host_intsize
if decal > 0
peekdata(off+str.length-decal)
str += @buf[decal...@host_intsize]
end
i = 0
while i < str.length
pokedata(off+i, str[i, @host_intsize])
i += @host_intsize
end
true
end
# linux/ptrace.h
COMMAND = {
:TRACEME => 0, :PEEKTEXT => 1,
:PEEKDATA => 2, :PEEKUSR => 3,
:POKETEXT => 4, :POKEDATA => 5,
:POKEUSR => 6, :CONT => 7,
:KILL => 8, :SINGLESTEP => 9,
:ATTACH => 16, :DETACH => 17,
:SYSCALL => 24,
# arch/x86/include/ptrace-abi.h
:GETREGS => 12, :SETREGS => 13,
:GETFPREGS => 14, :SETFPREGS => 15,
:GETFPXREGS => 18, :SETFPXREGS => 19,
:OLDSETOPTIONS => 21, :GET_THREAD_AREA => 25,
:SET_THREAD_AREA => 26, :ARCH_PRCTL => 30,
:SYSEMU => 31, :SYSEMU_SINGLESTEP=> 32,
:SINGLEBLOCK => 33,
# 0x4200-0x4300 are reserved for architecture-independent additions.
:SETOPTIONS => 0x4200, :GETEVENTMSG => 0x4201,
:GETSIGINFO => 0x4202, :SETSIGINFO => 0x4203
}
OPTIONS = {
# options set using PTRACE_SETOPTIONS
'TRACESYSGOOD' => 0x01, 'TRACEFORK' => 0x02,
'TRACEVFORK' => 0x04, 'TRACECLONE' => 0x08,
'TRACEEXEC' => 0x10, 'TRACEVFORKDONE'=> 0x20,
'TRACEEXIT' => 0x40, 'TRACESECCOMP' => 0x80,
}
WAIT_EXTENDEDRESULT = {
# Wait extended result codes for the above trace options.
'EVENT_FORK' => 1, 'EVENT_VFORK' => 2,
'EVENT_CLONE' => 3, 'EVENT_EXEC' => 4,
'EVENT_VFORK_DONE' => 5, 'EVENT_EXIT' => 6,
'EVENT_SECCOMP' => 7,
}
WAIT_EXTENDEDRESULT.update WAIT_EXTENDEDRESULT.invert
# block trace
BTS_O = { 'TRACE' => 1, 'SCHED' => 2, 'SIGNAL' => 4, 'ALLOC' => 8 }
BTS = { 'CONFIG' => 40, 'STATUS' => 41, 'SIZE' => 42,
'GET' => 43, 'CLEAR' => 44, 'DRAIN' => 45 }
REGS_I386 = {
'EBX' => 0, 'ECX' => 1, 'EDX' => 2, 'ESI' => 3,
'EDI' => 4, 'EBP' => 5, 'EAX' => 6, 'DS' => 7,
'ES' => 8, 'FS' => 9, 'GS' => 10, 'ORIG_EAX' => 11,
'EIP' => 12, 'CS' => 13, 'EFL' => 14, 'UESP'=> 15,
'EFLAGS' => 14, 'ESP' => 15,
'SS' => 16,
# from ptrace.c in kernel source & asm-i386/user.h
'DR0' => 63, 'DR1' => 64, 'DR2' => 65, 'DR3' => 66,
'DR4' => 67, 'DR5' => 68, 'DR6' => 69, 'DR7' => 70
}
REGS_X86_64 = {
'R15' => 0, 'R14' => 1, 'R13' => 2, 'R12' => 3,
'RBP' => 4, 'RBX' => 5, 'R11' => 6, 'R10' => 7,
'R9' => 8, 'R8' => 9, 'RAX' => 10, 'RCX' => 11,
'RDX' => 12, 'RSI' => 13, 'RDI' => 14, 'ORIG_RAX' => 15,
'RIP' => 16, 'CS' => 17, 'RFLAGS' => 18, 'RSP' => 19,
'SS' => 20, 'FS_BASE' => 21, 'GS_BASE' => 22, 'DS' => 23,
'ES' => 24, 'FS' => 25, 'GS' => 26,
#'FP_VALID' => 27,
#'387_XWD' => 28, '387_RIP' => 29, '387_RDP' => 30, '387_MXCSR' => 31,
#'FP0' => 32, 'FP1' => 34, 'FP2' => 36, 'FP3' => 38,
#'FP4' => 40, 'FP5' => 42, 'FP6' => 44, 'FP7' => 46,
#'XMM0' => 48, 'XMM1' => 52, 'XMM2' => 56, 'XMM3' => 60,
#'XMM4' => 64, 'XMM5' => 68, 'XMM6' => 72, 'XMM7' => 76,
#'FPAD0' => 80, 'FPAD11' => 91,
#'TSZ' => 92, 'DSZ' => 93, 'SSZ' => 94, 'CODE' => 95,
#'STK' => 96, 'SIG' => 97, 'PAD' => 98, 'U_AR0' => 99,
#'FPPTR' => 100, 'MAGIC' => 101, 'COMM0' => 102, 'COMM1' => 103,
#'COMM2' => 104, 'COMM3' => 105,
'DR0' => 106, 'DR1' => 107, 'DR2' => 108, 'DR3' => 109,
'DR4' => 110, 'DR5' => 111, 'DR6' => 112, 'DR7' => 113,
#'ERROR_CODE' => 114, 'FAULT_ADDR' => 115
}
SYSCALLNR_I386 = %w[restart_syscall exit fork read write open close waitpid creat link unlink execve chdir time
mknod chmod lchown break oldstat lseek getpid mount umount setuid getuid stime ptrace alarm oldfstat
pause utime stty gtty access nice ftime sync kill rename mkdir rmdir dup pipe times prof brk setgid
getgid signal geteuid getegid acct umount2 lock ioctl fcntl mpx setpgid ulimit oldolduname umask
chroot ustat dup2 getppid getpgrp setsid sigaction sgetmask ssetmask setreuid setregid sigsuspend
sigpending sethostname setrlimit getrlimit getrusage gettimeofday settimeofday getgroups setgroups
select symlink oldlstat readlink uselib swapon reboot readdir mmap munmap truncate ftruncate fchmod
fchown getpriority setpriority profil statfs fstatfs ioperm socketcall syslog setitimer getitimer
stat lstat fstat olduname iopl vhangup idle vm86old wait4 swapoff sysinfo ipc fsync sigreturn
clone setdomainname uname modify_ldt adjtimex mprotect sigprocmask create_module init_module
delete_module get_kernel_syms quotactl getpgid fchdir bdflush sysfs personality afs_syscall setfsuid
setfsgid _llseek getdents _newselect flock msync readv writev getsid fdatasync _sysctl mlock
munlock mlockall munlockall sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_yield sched_get_priority_max sched_get_priority_min sched_rr_get_interval nanosleep mremap
setresuid getresuid vm86 query_module poll nfsservctl setresgid getresgid prctl rt_sigreturn
rt_sigaction rt_sigprocmask rt_sigpending rt_sigtimedwait rt_sigqueueinfo rt_sigsuspend pread64
pwrite64 chown getcwd capget capset sigaltstack sendfile getpmsg putpmsg vfork ugetrlimit mmap2
truncate64 ftruncate64 stat64 lstat64 fstat64 lchown32 getuid32 getgid32 geteuid32 getegid32
setreuid32 setregid32 getgroups32 setgroups32 fchown32 setresuid32 getresuid32 setresgid32
getresgid32 chown32 setuid32 setgid32 setfsuid32 setfsgid32 pivot_root mincore madvise getdents64
fcntl64 sys_222 sys_223 gettid readahead setxattr lsetxattr fsetxattr getxattr lgetxattr fgetxattr
listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill sendfile64 futex
sched_setaffinity sched_getaffinity set_thread_area get_thread_area io_setup io_destroy io_getevents
io_submit io_cancel fadvise64 sys_251 exit_group lookup_dcookie epoll_create epoll_ctl epoll_wait
remap_file_pages set_tid_address timer_create timer_settime timer_gettime timer_getoverrun
timer_delete clock_settime clock_gettime clock_getres clock_nanosleep statfs64 fstatfs64 tgkill
utimes fadvise64_64 vserver mbind get_mempolicy set_mempolicy mq_open mq_unlink mq_timedsend
mq_timedreceive mq_notify mq_getsetattr kexec_load waitid sys_setaltroot add_key request_key keyctl
ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat mkdirat
mknodat fchownat futimesat fstatat64 unlinkat renameat linkat symlinkat readlinkat fchmodat
faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice sync_file_range tee vmsplice
move_pages getcpu epoll_pwait utimensat signalfd timerfd eventfd fallocate timerfd_settime
timerfd_gettime signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tg_sigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_I386.update SYSCALLNR_I386.invert
SYSCALLNR_X86_64 = %w[read write open close stat fstat lstat poll lseek mmap mprotect munmap brk rt_sigaction
rt_sigprocmask rt_sigreturn ioctl pread64 pwrite64 readv writev access pipe select sched_yield
mremap msync mincore madvise shmget shmat shmctl dup dup2 pause nanosleep getitimer alarm
setitimer getpid sendfile socket connect accept sendto recvfrom sendmsg recvmsg shutdown
bind listen getsockname getpeername socketpair setsockopt getsockopt clone fork vfork execve
exit wait4 kill uname semget semop semctl shmdt msgget msgsnd msgrcv msgctl fcntl flock
fsync fdatasync truncate ftruncate getdents getcwd chdir fchdir rename mkdir rmdir creat
link unlink symlink readlink chmod fchmod chown fchown lchown umask gettimeofday getrlimit
getrusage sysinfo times ptrace getuid syslog getgid setuid setgid geteuid getegid setpgid
getppid getpgrp setsid setreuid setregid getgroups setgroups setresuid getresuid setresgid
getresgid getpgid setfsuid setfsgid getsid capget capset rt_sigpending rt_sigtimedwait
rt_sigqueueinfo rt_sigsuspend sigaltstack utime mknod uselib personality ustat statfs fstatfs
sysfs getpriority setpriority sched_setparam sched_getparam sched_setscheduler sched_getscheduler
sched_get_priority_max sched_get_priority_min sched_rr_get_interval mlock munlock mlockall
munlockall vhangup modify_ldt pivot_root _sysctl prctl arch_prctl adjtimex setrlimit chroot sync
acct settimeofday mount umount2 swapon swapoff reboot sethostname setdomainname iopl ioperm
create_module init_module delete_module get_kernel_syms query_module quotactl nfsservctl getpmsg
putpmsg afs_syscall tuxcall security gettid readahead setxattr lsetxattr fsetxattr getxattr
lgetxattr fgetxattr listxattr llistxattr flistxattr removexattr lremovexattr fremovexattr tkill time
futex sched_setaffinity sched_getaffinity set_thread_area io_setup io_destroy io_getevents io_submit
io_cancel get_thread_area lookup_dcookie epoll_create epoll_ctl_old epoll_wait_old remap_file_pages
getdents64 set_tid_address restart_syscall semtimedop fadvise64 timer_create timer_settime
timer_gettime timer_getoverrun timer_delete clock_settime clock_gettime clock_getres clock_nanosleep
exit_group epoll_wait epoll_ctl tgkill utimes vserver mbind set_mempolicy get_mempolicy mq_open
mq_unlink mq_timedsend mq_timedreceive mq_notify mq_getsetattr kexec_load waitid add_key request_key
keyctl ioprio_set ioprio_get inotify_init inotify_add_watch inotify_rm_watch migrate_pages openat
mkdirat mknodat fchownat futimesat newfstatat unlinkat renameat linkat symlinkat readlinkat
fchmodat faccessat pselect6 ppoll unshare set_robust_list get_robust_list splice tee sync_file_range
vmsplice move_pages utimensat epoll_pwait signalfd timerfd_create eventfd fallocate timerfd_settime
timerfd_gettime accept4 signalfd4 eventfd2 epoll_create1 dup3 pipe2 inotify_init1 preadv pwritev
rt_tgsigqueueinfo perf_counter_open].inject({}) { |h, sc| h.update sc => h.length }
SYSCALLNR_X86_64.update SYSCALLNR_X86_64.invert
SIGNAL = Signal.list.dup
SIGNAL.delete SIGNAL.index(0)
SIGNAL['TRAP'] ||= 5 # windows+gdbremote
SIGNAL.update SIGNAL.invert
# include/asm-generic/errno-base.h
ERRNO = %w[ERR0 EPERM ENOENT ESRCH EINTR EIO ENXIO E2BIG ENOEXEC EBADF ECHILD EAGAIN ENOMEM EACCES EFAULT
ENOTBLK EBUSY EEXIST EXDEV ENODEV ENOTDIR EISDIR EINVAL ENFILE EMFILE ENOTTY ETXTBSY EFBIG ENOSPC
ESPIPE EROFS EMLINK EPIPE EDOM ERANGE].inject({}) { |h, e| h.update e => h.length }
ERRNO.update ERRNO.invert
SIGINFO = {
# user-generated signal
'DETHREAD' => -7, # execve killing threads
'TKILL' => -6, 'SIGIO' => -5, 'ASYNCIO' => -4, 'MESGQ' => -3,
'TIMER' => -2, 'QUEUE' => -1, 'USER' => 0, 'KERNEL' => 0x80,
# ILL
'ILLOPC' => 1, 'ILLOPN' => 2, 'ILLADR' => 3, 'ILLTRP' => 4,
'PRVOPC' => 5, 'PRVREG' => 6, 'COPROC' => 7, 'BADSTK' => 8,
# FPE
'INTDIV' => 1, 'INTOVF' => 2, 'FLTDIV' => 3, 'FLTOVF' => 4,
'FLTUND' => 5, 'FLTRES' => 6, 'FLTINV' => 7, 'FLTSUB' => 8,
# SEGV
'MAPERR' => 1, 'ACCERR' => 2,
# BUS
'ADRALN' => 1, 'ADRERR' => 2, 'OBJERR' => 3, 'MCEERR_AR' => 4,
'MCEERR_AO' => 5,
# TRAP
'BRKPT' => 1, 'TRACE' => 2, 'BRANCH' => 3, 'HWBKPT' => 4,
# CHLD
'EXITED' => 1, 'KILLED' => 2, 'DUMPED' => 3, 'TRAPPED' => 4,
'STOPPED' => 5, 'CONTINUED' => 6,
# POLL
'POLL_IN' => 1, 'POLL_OUT' => 2, 'POLL_MSG' => 3, 'POLL_ERR' => 4,
'POLL_PRI' => 5, 'POLL_HUP' => 6
}
SIGINFO_C = <<EOS
typedef __int32 __pid_t;
typedef unsigned __int32 __uid_t;
typedef uintptr_t sigval_t;
typedef long __clock_t;
struct siginfo {
int si_signo;
int si_errno;
int si_code;
// int pad64;
union {
int _pad[128/4-3]; /* total >= 128b */
struct { /* kill(). */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
} _kill;
struct { /* POSIX.1b timers. */
int si_tid; /* Timer ID. */
int si_overrun; /* Overrun count. */
sigval_t si_sigval; /* Signal value. */
} _timer;
struct { /* POSIX.1b signals. */
__pid_t si_pid; /* Sending process ID. */
__uid_t si_uid; /* Real user ID of sending process. */
sigval_t si_sigval; /* Signal value. */
} _rt;
struct { /* SIGCHLD. */
__pid_t si_pid; /* Which child. */
__uid_t si_uid; /* Real user ID of sending process. */
int si_status; /* Exit value or signal. */
__clock_t si_utime;
__clock_t si_stime;
} _sigchld;
struct { /* SIGILL, SIGFPE, SIGSEGV, SIGBUS. */
uintptr_t si_addr; /* Faulting insn/memory ref. */
} _sigfault;
struct { /* SIGPOLL. */
long int si_band; /* Band event for SIGPOLL. */
int si_fd;
} _sigpoll;
struct { /* SIGSYS under SECCOMP */
uintptr_t si_calladdr; /* calling insn address */
int si_syscall; /* triggering syscall nr */
int si_arch; /* AUDIT_ARCH_* for syscall */
} _sigsys;
};
};
EOS
def sys_ptrace(req, pid, addr, data)
ret = @sys_ptrace.ptrace(req, pid, addr, data)
if ret < 0 and ret > -256
raise SystemCallError.new("ptrace #{COMMAND.index(req) || req}", -ret)
end
ret
end
def traceme
sys_ptrace(COMMAND[:TRACEME], 0, 0, 0)
end
def peektext(addr)
sys_ptrace(COMMAND[:PEEKTEXT], @pid, addr, @buf)
@buf
end
def peekdata(addr)
sys_ptrace(COMMAND[:PEEKDATA], @pid, addr, @buf)
@buf
end
def peekusr(addr)
sys_ptrace(COMMAND[:PEEKUSR], @pid, @host_intsize*addr, @buf)
@peekmask ||= (1 << ([@host_intsize, @intsize].min*8)) - 1
bufval & @peekmask
end
def poketext(addr, data)
sys_ptrace(COMMAND[:POKETEXT], @pid, addr, data.unpack(@packint).first)
end
def pokedata(addr, data)
sys_ptrace(COMMAND[:POKEDATA], @pid, addr, data.unpack(@packint).first)
end
def pokeusr(addr, data)
sys_ptrace(COMMAND[:POKEUSR], @pid, @host_intsize*addr, data)
end
def getregs(buf=nil)
buf = buf.str if buf.respond_to?(:str) # AllocCStruct
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND[:GETREGS], @pid, 0, buf)
buf
end
def setregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETREGS], @pid, 0, buf)
end
def getfpregs(buf=nil)
buf = buf.str if buf.respond_to?(:str)
buf ||= [0].pack('C')*1024
sys_ptrace(COMMAND[:GETFPREGS], @pid, 0, buf)
buf
end
def setfpregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETFPREGS], @pid, 0, buf)
end
def getfpxregs(buf=nil)
buf = buf.str if buf.respond_to?(:str)
buf ||= [0].pack('C')*512
sys_ptrace(COMMAND[:GETFPXREGS], @pid, 0, buf)
buf
end
def setfpxregs(buf)
buf = buf.str if buf.respond_to?(:str)
sys_ptrace(COMMAND[:SETFPXREGS], @pid, 0, buf)
end
def get_thread_area(addr)
sys_ptrace(COMMAND[:GET_THREAD_AREA], @pid, addr, @buf)
bufval
end
def set_thread_area(addr, data)
sys_ptrace(COMMAND[:SET_THREAD_AREA], @pid, addr, data)
end
def prctl(addr, data)
sys_ptrace(COMMAND[:ARCH_PRCTL], @pid, addr, data)
end
def cont(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:CONT], @pid, 0, sig)
end
def kill
sys_ptrace(COMMAND[:KILL], @pid, 0, 0)
end
def singlestep(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SINGLESTEP], @pid, 0, sig)
end
def singleblock(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SINGLEBLOCK], @pid, 0, sig)
end
def syscall(sig = nil)
sig ||= 0
sys_ptrace(COMMAND[:SYSCALL], @pid, 0, sig)
end
def attach
sys_ptrace(COMMAND[:ATTACH], @pid, 0, 0)
end
def detach
sys_ptrace(COMMAND[:DETACH], @pid, 0, 0)
end
def setoptions(*opt)
opt = opt.inject(0) { |b, o| b |= o.kind_of?(Integer) ? o : OPTIONS[o] }
sys_ptrace(COMMAND[:SETOPTIONS], @pid, 0, opt)
end
# retrieve pid of cld for EVENT_CLONE/FORK, exitcode for EVENT_EXIT
def geteventmsg
sys_ptrace(COMMAND[:GETEVENTMSG], @pid, 0, @buf)
bufval
end
def cp
@cp ||= @tgcpu.new_cparser
end
def siginfo
@siginfo ||= (
cp.parse SIGINFO_C if not cp.toplevel.struct['siginfo']
cp.alloc_c_struct('siginfo')
)
end
def getsiginfo
sys_ptrace(COMMAND[:GETSIGINFO], @pid, 0, siginfo.str)
siginfo
end
def setsiginfo(si=siginfo)
si = si.str if si.respond_to?(:str)
sys_ptrace(COMMAND[:SETSIGINFO], @pid, 0, si)
end
end
class LinOS < OS
class Process < OS::Process
# returns/create a LinuxRemoteString
def memory
@memory ||= LinuxRemoteString.new(pid)
end
attr_writer :memory
def debugger
@debugger ||= LinDebugger.new(@pid)
end
attr_writer :debugger
# returns the list of loaded Modules, incl start address & path
# read from /proc/pid/maps
def modules
list = []
seen = {}
File.readlines("/proc/#{pid}/maps").each { |l|
# 08048000-08064000 r-xp 000000 08:01 4234 /usr/bin/true
l = l.split
next if l.length < 6 or seen[l[-1]]
seen[l[-1]] = true
m = Module.new
m.addr = l[0].to_i(16)
m.path = l[-1]
list << m
}
list
rescue
[]
end
# return a list of [addr_start, length, perms, file]
def mappings
list = []
File.readlines("/proc/#{pid}/maps").each { |l|
l = l.split
addrstart, addrend = l[0].split('-').map { |i| i.to_i 16 }
list << [addrstart, addrend-addrstart, l[1], l[5]]
}
list
rescue
[]
end
# returns a list of threads sharing this process address space
# read from /proc/pid/task/
def threads
Dir.entries("/proc/#{pid}/task/").grep(/^\d+$/).map { |tid| tid.to_i }
rescue
# TODO handle pthread stuff (eg 2.4 kernels)
[pid]
end
# return the invocation commandline, from /proc/pid/cmdline
# this is manipulable by the target itself
def cmdline
@cmdline ||= File.read("/proc/#{pid}/cmdline") rescue ''
end
attr_writer :cmdline
def path
cmdline.split(0.chr)[0]
end
# returns the address size of the process, based on its #cpu
def addrsz
cpu.size
end
# returns the CPU for the process, by reading /proc/pid/exe
def cpu
e = ELF.load_file("/proc/#{pid}/exe")
# dont decode shdr/phdr, this is 2x faster for repeated debugger spawn
e.decode_header(0, false, false)
e.cpu
end
def terminate
kill
end
def kill(signr=9)
::Process.kill(signr, @pid)
end
end
class << self
# returns an array of Processes, with pid/module listing
def list_processes
Dir.entries('/proc').grep(/^\d+$/).map { |pid| Process.new(pid.to_i) }
end
# return a Process for the specified pid if it exists in /proc
def open_process(pid)
Process.new(pid) if check_process(pid)
end
def check_process(pid)
File.directory?("/proc/#{pid}")
end
# create a LinDebugger on the target pid/binary
def create_debugger(path)
LinDebugger.new(path)
end
end # class << self
end
class LinuxRemoteString < VirtualString
attr_accessor :pid, :readfd
attr_accessor :dbg
# returns a virtual string proxying the specified process memory range
# reads are cached (4096 aligned bytes read at once), from /proc/pid/mem
# writes are done directly by ptrace
def initialize(pid, addr_start=0, length=nil, dbg=nil)
@pid = pid
length ||= 1 << (dbg ? dbg.cpu.size : (LinOS.open_process(@pid).addrsz rescue 32))
@readfd = File.open("/proc/#@pid/mem", 'rb') rescue nil
@dbg = dbg if dbg
super(addr_start, length)
end
def dup(addr = @addr_start, len = @length)
self.class.new(@pid, addr, len, dbg)
end
def do_ptrace(needproc)
if dbg
dbg.switch_context(@pid) {
st = dbg.state
next if st != :stopped
if needproc
# we will try to access /proc/pid/mem
# if the main thread is still running, fallback to ptrace.readmem instead
pst = (dbg.tid == @pid ? st : dbg.tid_stuff[@pid][:state])
if pst != :stopped
savedreadfd = @readfd
@readfd = nil
begin
yield dbg.ptrace
ensure
@readfd = savedreadfd
end
else
yield dbg.ptrace
end
else
yield dbg.ptrace
end
}
else
PTrace.open(@pid) { |ptrace| yield ptrace }
end
end
def rewrite_at(addr, data)
# target must be stopped
wr = do_ptrace(false) { |ptrace| ptrace.writemem(addr, data) }
raise "couldn't ptrace_write at #{'%x' % addr}" if not wr
end
def get_page(addr, len=@pagelength)
do_ptrace(true) { |ptrace|
begin
if readfd and addr < (1<<63)
# 1<<63: ruby seek = 'too big to fit longlong', linux read = EINVAL
@readfd.pos = addr
@readfd.read len
elsif addr < (1<<(ptrace.host_intsize*8))
# can reach 1<<64 with peek_data only if ptrace accepts 64bit args
ptrace.readmem(addr, len)
end
rescue Errno::EIO, Errno::ESRCH
nil
end
}
end
end
class PTraceContext_Ia32 < PTrace
C_STRUCT = <<EOS
struct user_regs_struct_ia32 {
unsigned __int32 ebx;
unsigned __int32 ecx;
unsigned __int32 edx;
unsigned __int32 esi;
unsigned __int32 edi;
unsigned __int32 ebp;
unsigned __int32 eax;
unsigned __int32 ds;
unsigned __int32 es;
unsigned __int32 fs;
unsigned __int32 gs;
unsigned __int32 orig_eax;
unsigned __int32 eip;
unsigned __int32 cs;
unsigned __int32 eflags;
unsigned __int32 esp;
unsigned __int32 ss;
};
struct user_fxsr_struct_ia32 {
unsigned __int16 cwd;
unsigned __int16 swd;
unsigned __int16 twd;
unsigned __int16 fop;
unsigned __int32 fip;
unsigned __int32 fcs;
unsigned __int32 foo;
unsigned __int32 fos;
unsigned __int32 mxcsr;
unsigned __int32 reserved;
unsigned __int32 st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */
unsigned __int32 xmm_space[32]; /* 8*16 bytes for each XMM-reg = 128 bytes */
unsigned __int32 padding[56];
};
EOS
def initialize(ptrace, pid=ptrace.pid)
super(ptrace, :dup)
@pid = pid
@cp = ptrace.cp
init
end
def init
@gpr = @@gpr_ia32 ||= [:ebx, :ecx, :edx, :esi, :edi, :ebp, :eax,
:ds, :es, :fs, :gs, :orig_eax, :eip, :cs, :eflags,
:esp, :ss].inject({}) { |h, r| h.update r => true }
@gpr_peek = @@gpr_peek_ia32 ||= (0..7).inject({}) { |h, i|
h.update "dr#{i}".to_sym => REGS_I386["DR#{i}"] }
@gpr_sub = @@gpr_sub_ia32 ||= gpr_sub_init
@xmm = @@xmm_ia32 ||= [:cwd, :swd, :twd, :fop, :fip, :fcs, :foo,
:fos, :mxcsr].inject({}) { |h, r| h.update r => true }
@cp.parse C_STRUCT if not @cp.toplevel.struct['user_regs_struct_ia32']
@gpr_st = @xmm_st = nil
end
# :bh => [:ebx, 0xff, 8]
# XXX similar to Reg.symbolic... DRY
def gpr_sub_init
ret = {}
%w[a b c d].each { |r|
b = "e#{r}x".to_sym
ret["#{r}x".to_sym] = [b, 0xffff]
ret["#{r}l".to_sym] = [b, 0xff]
ret["#{r}h".to_sym] = [b, 0xff, 8]
}
%w[sp bp si di].each { |r|
b = "e#{r}".to_sym
ret[r.to_sym] = [b, 0xffff]
}
ret[:orig_rax] = [:orig_eax, 0xffff_ffff]
ret
end
def do_getregs
st = cp.alloc_c_struct('user_regs_struct_ia32')
getregs(st)
st
end
def do_setregs(st=@gpr_st)
setregs(st)
end
def do_getxmm
st = cp.alloc_c_struct('user_fxsr_struct_ia32')
getfpxregs(st)
st
end
def do_setxmm(st=@xmm_st)
setfpxregs(st)
end
def get_reg(r)
r = r.downcase if r == 'ORIG_EAX' or r == 'ORIG_RAX'
rs = r.to_sym
if @gpr[rs]
@gpr_st ||= do_getregs
@gpr_st[rs]
elsif o = @gpr_peek[rs]
peekusr(o)
elsif o = @gpr_sub[rs]
v = get_reg(o[0])
v >>= o[2] if o[2]
v &= o[1]
elsif @xmm[rs]
@xmm_st ||= do_getxmm
@xmm_st[rs]
else
case r.to_s
when /^st(\d?)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
[fu[4*i], fu[4*i+1], fu[4*i+2]].pack('L*').unpack('D').first # XXX
when /^mmx?(\d)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i] | (fu[4*i + 1] << 32)
when /^xmm(\d+)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.xmm_space
fu[4*i] | (fu[4*i + 1] << 32) | (fu[4*i + 2] << 64) | (fu[4*i + 3] << 96)
# TODO when /^ymm(\d+)$/i
else raise "unknown register name #{r}"
end
end
end
def set_reg(r, v)
r = r.downcase if r == 'ORIG_EAX' or r == 'ORIG_RAX'
rs = r.to_sym
if @gpr[rs]
@gpr_st ||= do_getregs
@gpr_st[rs] = v
do_setregs
elsif o = @gpr_peek[rs]
pokeusr(o, v)
elsif o = @gpr_sub[rs]
vo = get_reg(o[0])
msk = o[1]
v &= o[1]
if o[2]
msk <<= o[2]
v <<= o[2]
end
v |= vo & ~msk
set_reg(o[0], v)
elsif @xmm[rs]
@xmm_st ||= do_getxmm
@xmm_st[rs] = v
do_setxmm
else
case r.to_s
when /^st(\d?)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i], fu[4*i+1], fu[4*i+2] = [v, -1].pack('DL').unpack('L*') # XXX
do_setxmm
when /^mmx?(\d)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.st_space
fu[4*i] = v & 0xffff_ffff
fu[4*i + 1] = (v >> 32) & 0xffff_ffff
do_setxmm
when /^xmm(\d+)$/i
i = $1.to_i
@xmm_st ||= do_getxmm
fu = @xmm_st.xmm_space
fu[4*i] = v & 0xffff_ffff
fu[4*i + 1] = (v >> 32) & 0xffff_ffff
fu[4*i + 2] = (v >> 64) & 0xffff_ffff
fu[4*i + 3] = (v >> 96) & 0xffff_ffff
do_setxmm
# TODO when /^ymm(\d+)$/i
else raise "unknown register name #{r}"
end
end
end
end
class PTraceContext_X64 < PTraceContext_Ia32
C_STRUCT = <<EOS
struct user_regs_struct_x64 {
unsigned __int64 r15;
unsigned __int64 r14;
unsigned __int64 r13;
unsigned __int64 r12;
unsigned __int64 rbp;
unsigned __int64 rbx;
unsigned __int64 r11;
unsigned __int64 r10;
unsigned __int64 r9;
unsigned __int64 r8;
unsigned __int64 rax;
unsigned __int64 rcx;
unsigned __int64 rdx;
unsigned __int64 rsi;
unsigned __int64 rdi;
unsigned __int64 orig_rax;
unsigned __int64 rip;
unsigned __int64 cs;
unsigned __int64 rflags;
unsigned __int64 rsp;
unsigned __int64 ss;
unsigned __int64 fs_base;
unsigned __int64 gs_base;
unsigned __int64 ds;
unsigned __int64 es;
unsigned __int64 fs;
unsigned __int64 gs;
};
struct user_i387_struct_x64 {
unsigned __int16 cwd;
unsigned __int16 swd;
unsigned __int16 twd; /* Note this is not the same as the 32bit/x87/FSAVE twd */
unsigned __int16 fop;
unsigned __int64 rip;
unsigned __int64 rdp;
unsigned __int32 mxcsr;
unsigned __int32 mxcsr_mask;
unsigned __int32 st_space[32]; /* 8*16 bytes for each FP-reg = 128 bytes */
unsigned __int32 xmm_space[64]; /* 16*16 bytes for each XMM-reg = 256 bytes */
unsigned __int32 padding[24];
// YMM ?
};
EOS
def init
@gpr = @@gpr_x64 ||= [:r15, :r14, :r13, :r12, :rbp, :rbx, :r11,
:r10, :r9, :r8, :rax, :rcx, :rdx, :rsi, :rdi, :orig_rax,
:rip, :cs, :rflags, :rsp, :ss, :fs_base, :gs_base, :ds,
:es, :fs, :gs].inject({}) { |h, r| h.update r => true }
@gpr_peek = @@gpr_peek_x64 ||= (0..7).inject({}) { |h, i|
h.update "dr#{i}".to_sym => REGS_X86_64["DR#{i}"] }
@gpr_sub = @@gpr_sub_x64 ||= gpr_sub_init
@xmm = @@xmm_x64 ||= [:cwd, :swd, :twd, :fop, :rip, :rdp, :mxcsr,
:mxcsr_mask].inject({}) { |h, r| h.update r => true }
@cp.parse C_STRUCT if not @cp.toplevel.struct['user_regs_struct_x64']
@gpr_st = @xmm_st = nil
end
def gpr_sub_init
ret = {}
%w[a b c d].each { |r|
b = "r#{r}x".to_sym
ret["e#{r}x".to_sym] = [b, 0xffff_ffff]
ret[ "#{r}x".to_sym] = [b, 0xffff]
ret[ "#{r}l".to_sym] = [b, 0xff]
ret[ "#{r}h".to_sym] = [b, 0xff, 8]
}
%w[sp bp si di].each { |r|
b = "r#{r}".to_sym
ret["e#{r}".to_sym] = [b, 0xffff_ffff]
ret[ "#{r}".to_sym] = [b, 0xffff]
ret["#{r}l".to_sym] = [b, 0xff]
}
(8..15).each { |i|
b = "r#{i}".to_sym
ret["r#{i}d"] = [b, 0xffff_ffff]
ret["r#{i}w"] = [b, 0xffff]
ret["r#{i}b"] = [b, 0xff]
}
ret[:eip] = [:rip, 0xffff_ffff]
ret[:eflags] = [:rflags, 0xffff_ffff]
ret[:orig_eax] = [:orig_rax, 0xffff_ffff]
ret
end
def do_getregs
st = cp.alloc_c_struct('user_regs_struct_x64')
getregs(st)
st
end
def do_setregs(st=@gpr_st)
setregs(st)
end
def do_getxmm
st = cp.alloc_c_struct('user_i387_struct_x64')
getfpregs(st)
st
end
def do_setxmm(st=@xmm_st)
setfpregs(st)
end
end
module ::Process
WALL = 0x40000000 if not defined? WALL
WCLONE = 0x80000000 if not defined? WCLONE
end
# this class implements a high-level API over the ptrace debugging primitives
class LinDebugger < Debugger
# ptrace is per-process or per-thread ?
attr_accessor :ptrace, :continuesignal, :has_pax_mprotect, :target_syscall, :cached_waitpid
attr_accessor :callback_syscall, :callback_branch, :callback_exec
def initialize(pidpath=nil, &b)
super()
@pid_stuff_list << :has_pax_mprotect << :ptrace << :breaking << :os_process
@tid_stuff_list << :continuesignal << :saved_csig << :ctx << :target_syscall
# by default, break on all signals except SIGWINCH (terminal resize notification)
@pass_all_exceptions = lambda { |e| e[:signal] == 'WINCH' }
@callback_syscall = lambda { |i| log "syscall #{i[:syscall]}" }
@callback_exec = lambda { |i| log "execve #{os_process.path}" }
@cached_waitpid = []
return if not pidpath
t = begin; Integer(pidpath)
rescue ArgumentError, TypeError
end
t ? attach(t) : create_process(pidpath, &b)
end
def shortname; 'lindbg'; end
# attach to a running process and all its threads
def attach(pid, do_attach=:attach)
pt = PTrace.new(pid, do_attach)
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
list_threads.each { |tid| attach_thread(tid) if tid != @pid }
set_tid @pid
end
# create a process and debug it
# if given a block, the block is run in the context of the ruby subprocess
# after the fork() and before exec()ing the target binary
# you can use it to eg tweak file descriptors:
# tg_stdin_r, tg_stdin_w = IO.pipe
# create_process('/bin/cat') { tg_stdin_w.close ; $stdin.reopen(tg_stdin_r) }
# tg_stdin_w.write 'lol'
def create_process(path, &b)
pt = PTrace.new(path, :create, &b)
# TODO save path, allow restart etc
set_context(pt.pid, pt.pid) # swapout+init_newpid
log "attached #@pid"
end
def initialize_cpu
@cpu = os_process.cpu
# need to init @ptrace here, before init_dasm calls gui.swapin XXX this stinks
@ptrace = PTrace.new(@pid, false)
if @cpu.size == 64 and @ptrace.reg_off['EAX']
hack_x64_32
end
set_tid @pid
set_thread_options
end
def initialize_memory
@memory = os_process.memory = LinuxRemoteString.new(@pid, 0, nil, self)
end
def os_process
@os_process ||= LinOS.open_process(@pid)
end
def list_threads
os_process.threads
end
def list_processes
LinOS.list_processes
end
def check_pid(pid)
LinOS.check_process(pid)
end
def mappings
os_process.mappings
end
def modules
os_process.modules
end
# We're a 32bit process debugging a 64bit target
# the ptrace kernel interface we use only allow us a 32bit-like target access
# With this we advertize the cpu as having eax..edi registers (the only one we
# can access), while still decoding x64 instructions (whose addr < 4G)
def hack_x64_32
log "WARNING: debugging a 64bit process from a 32bit debugger is a very bad idea !"
ia32 = Ia32.new
@cpu.instance_variable_set('@dbg_register_pc', ia32.dbg_register_pc)
@cpu.instance_variable_set('@dbg_register_sp', ia32.dbg_register_sp)
@cpu.instance_variable_set('@dbg_register_flags', ia32.dbg_register_flags)
@cpu.instance_variable_set('@dbg_register_list', ia32.dbg_register_list)
@cpu.instance_variable_set('@dbg_register_size', ia32.dbg_register_size)
end
# attach a thread of the current process
def attach_thread(tid)
set_tid tid
@ptrace.pid = tid
@ptrace.attach
@state = :stopped
# store this waitpid so that we can return it in a future check_target
::Process.waitpid(tid, ::Process::WALL)
# XXX can $? be safely stored?
@cached_waitpid << [tid, $?.dup]
log "attached thread #{tid}"
set_thread_options
rescue Errno::ESRCH
# raced, thread quitted already
del_tid
end
# set the debugee ptrace options (notify clone/exec/exit, and fork/vfork depending on @trace_children)
def set_thread_options
opts = %w[TRACESYSGOOD TRACECLONE TRACEEXEC TRACEEXIT]
opts += %w[TRACEFORK TRACEVFORK TRACEVFORKDONE] if trace_children
@ptrace.pid = @tid
@ptrace.setoptions(*opts)
end
# update the current pid relative to tracing children (@trace_children only effects newly traced pid/tid)
def do_trace_children
each_tid { set_thread_options }
end
def invalidate
@ctx = nil
super()
end
# current thread register values accessor
def ctx
@ctx ||= case @ptrace.host_csn
when 'ia32'; PTraceContext_Ia32.new(@ptrace, @tid)
when 'x64'; PTraceContext_X64.new(@ptrace, @tid)
else raise '8==D'
end
end
def get_reg_value(r)
return 0 if @state != :stopped
ctx.get_reg(r)
rescue Errno::ESRCH
0
end
def set_reg_value(r, v)
ctx.set_reg(r, v)
end
def update_waitpid(status)
invalidate
@continuesignal = 0
@state = :stopped # allow get_reg (for eg pt_syscall)
info = { :status => status }
if status.exited?
info.update :exitcode => status.exitstatus
if @tid == @pid # XXX
evt_endprocess info
else
evt_endthread info
end
elsif status.signaled?
info.update :signal => (PTrace::SIGNAL[status.termsig] || status.termsig)
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
elsif status.stopped?
sig = status.stopsig & 0x7f
signame = PTrace::SIGNAL[sig]
if signame == 'TRAP'
if status.stopsig & 0x80 > 0
# XXX int80 in x64 => syscallnr32 ?
evt_syscall info.update(:syscall => @ptrace.syscallnr[get_reg_value(@ptrace.syscallreg)])
elsif (status >> 16) > 0
case PTrace::WAIT_EXTENDEDRESULT[status >> 16]
when 'EVENT_FORK', 'EVENT_VFORK'
# parent notification of a fork
# child receives STOP (may have already happened)
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_CLONE'
#cld = @ptrace.geteventmsg
resume_badbreak
when 'EVENT_EXIT'
@ptrace.pid = @tid
info.update :exitcode => @ptrace.geteventmsg
if @tid == @pid
evt_endprocess info
else
evt_endthread info
end
when 'EVENT_VFORKDONE'
resume_badbreak
when 'EVENT_EXEC'
evt_exec info
end
else
@ptrace.pid = @tid
si = @ptrace.getsiginfo
case si.si_code
when PTrace::SIGINFO['BRKPT'],
PTrace::SIGINFO['KERNEL'] # \xCC prefer KERNEL to BRKPT
evt_bpx
when PTrace::SIGINFO['TRACE']
evt_singlestep # singlestep/singleblock
when PTrace::SIGINFO['BRANCH']
evt_branch # XXX BTS?
when PTrace::SIGINFO['HWBKPT']
evt_hwbp
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
evt_exception info
end
end
elsif signame == 'STOP' and @info == 'new'
# new thread break on creation (eg after fork + TRACEFORK)
if @pid == @tid
attach(@pid, false)
evt_newprocess info
else
evt_newthread info
end
elsif signame == 'STOP' and @breaking
@state = :stopped
@info = 'break'
@breaking.call if @breaking.kind_of? Proc
@breaking = nil
else
@saved_csig = @continuesignal = sig
info.update :signal => signame, :type => "SIG#{signame}"
if signame == 'SEGV'
# need more data on access violation (for bpm)
info.update :type => 'access violation'
@ptrace.pid = @tid
si = @ptrace.getsiginfo
access = case si.si_code
when PTrace::SIGINFO['MAPERR']; :r # XXX write access to unmapped => ?
when PTrace::SIGINFO['ACCERR']; :w
end
info.update :fault_addr => si.si_addr, :fault_access => access
end
evt_exception info
end
else
log "unknown wait status #{status.inspect}"
evt_exception info.update(:type => "unknown wait #{status.inspect}")
end
end
def set_tid_findpid(tid)
return if tid == @tid
if tid != @pid and !@tid_stuff[tid]
if kv = @pid_stuff.find { |k, v| v[:tid_stuff] and v[:tid_stuff][tid] }
set_pid kv[0]
elsif pr = list_processes.find { |p| p.threads.include?(tid) }
set_pid pr.pid
end
end
set_tid tid
end
def do_check_target
if @cached_waitpid.empty?
t = ::Process.waitpid(-1, ::Process::WNOHANG | ::Process::WALL)
st = $?
else
t, st = @cached_waitpid.shift
end
return if not t
set_tid_findpid t
update_waitpid st
true
rescue ::Errno::ECHILD
end
def do_wait_target
if @cached_waitpid.empty?
t = ::Process.waitpid(-1, ::Process::WALL)
st = $?
else
t, st = @cached_waitpid.shift
end
set_tid_findpid t
update_waitpid st
rescue ::Errno::ECHILD
end
def do_continue
@state = :running
@ptrace.pid = tid
@ptrace.cont(@continuesignal)
end
def do_singlestep(*a)
@state = :running
@ptrace.pid = tid
@ptrace.singlestep(@continuesignal)
end
# use the PT_SYSCALL to break on next syscall
# regexp allowed to wait a specific syscall
def syscall(arg=nil)
arg = nil if arg and arg.strip == ''
if b = check_breakpoint_cause and b.hash_shared.find { |bb| bb.state == :active }
singlestep_bp(b) {
next if not check_pre_run(:syscall, arg)
@target_syscall = arg
@state = :running
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
}
else
return if not check_pre_run(:syscall, arg)
@target_syscall = arg
@state = :running
@ptrace.pid = @tid
@ptrace.syscall(@continuesignal)
end
end
def syscall_wait(*a, &b)
syscall(*a, &b)
wait_target
end
# use the PT_SINGLEBLOCK to execute until the next branch
def singleblock
# record as singlestep to avoid evt_singlestep -> evt_exception
# step or block doesn't matter much here anyway
if b = check_breakpoint_cause and b.hash_shared.find { |bb| bb.state == :active }
singlestep_bp(b) {
next if not check_pre_run(:singlestep)
@state = :running
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
}
else
return if not check_pre_run(:singlestep)
@state = :running
@ptrace.pid = @tid
@ptrace.singleblock(@continuesignal)
end
end
def singleblock_wait(*a, &b)
singleblock(*a, &b)
wait_target
end
# woke up from a PT_SYSCALL
def evt_syscall(info={})
@state = :stopped
@info = "syscall #{info[:syscall]}"
callback_syscall[info] if callback_syscall
if @target_syscall and info[:syscall] !~ /^#@target_syscall$/i
resume_badbreak
else
@target_syscall = nil
end
end
# SIGTRAP + SIGINFO_TRAP_BRANCH = ?
def evt_branch(info={})
@state = :stopped
@info = "branch"
callback_branch[info] if callback_branch
end
# called during sys_execve in the new process
def evt_exec(info={})
@state = :stopped
@info = "#{info[:exec]} execve"
initialize_newpid
# XXX will receive a SIGTRAP, could hide it..
callback_exec[info] if callback_exec
# calling continue() here will loop back to TRAP+INFO_EXEC
end
def break(&b)
@breaking = b || true
kill 'STOP'
end
def kill(sig=nil)
return if not tid
# XXX tkill ?
::Process.kill(sig2signr(sig), tid)
rescue Errno::ESRCH
end
def pass_current_exception(bool=true)
if bool
@continuesignal = @saved_csig
else
@continuesignal = 0
end
end
def sig2signr(sig)
case sig
when nil, ''; 9
when Integer; sig
when String
sig = sig.upcase.sub(/^SIG_?/, '')
PTrace::SIGNAL[sig] || Integer(sig)
else raise "unhandled signal #{sig.inspect}"
end
end
# stop debugging the current process
def detach
if @state == :running
# must be stopped so we can rm bps
self.break { detach }
mypid = @pid
wait_target
# after syscall(), wait will return once for interrupted syscall,
# and we need to wait more for the break callback to kick in
if @pid == mypid and @state == :stopped and @info =~ /syscall/
do_continue
check_target
end
return
end
del_all_breakpoints
each_tid {
@ptrace.pid = @tid
@ptrace.detach rescue nil
@delete_thread = true
}
del_pid
end
def bpx(addr, *a, &b)
return hwbp(addr, :x, 1, *a, &b) if @has_pax_mprotect
super(addr, *a, &b)
end
# handles exceptions from PaX-style mprotect restrictions on bpx,
# transmute them to hwbp on the fly
def do_enable_bp(b)
super(b)
rescue ::Errno::EIO
if b.type == :bpx
@memory[b.address, 1] # check if we can read
# didn't raise: it's a PaX-style config
@has_pax_mprotect = true
b.del
hwbp(b.address, :x, 1, b.oneshot, b.condition, &b.action)
log 'PaX: bpx->hwbp'
else raise
end
end
def ui_command_setup(ui)
ui.new_command('syscall', 'waits for the target to do a syscall using PT_SYSCALL') { |arg| ui.wrap_run { syscall arg } }
ui.keyboard_callback[:f6] = lambda { ui.wrap_run { syscall } }
ui.new_command('signal_cont', 'set/get the continue signal (0 == unset)') { |arg|
case arg.to_s.strip
when ''; log "#{@continuesignal} (#{PTrace::SIGNAL[@continuesignal]})"
else @continuesignal = sig2signr(arg)
end
}
end
end
end
|
# Our CLI Controller
class LocalSkiReport::CLI
attr_accessor :resort
STATES_WITH_RESORTS = [
{ :midwest => ["Illinois", "Indiana", "Iowa", "Kansas", "Michigan", "Minnesota", "Missouri", "Ohio","Wisconsin"] },
{ :northeast => ["Connecticut", "Maine", "Massachusetts", "New Hampshire", "New Jersey", "New York", "Pennsylvania", "Rhode Island", "Vermont"] },
{ :northwest => ["Alaska", "Idaho", "Oregon", "Washington"] },
{ :rockies => ["Colorado", "Montana", "New Mexico", "Utah", "Wyoming"] },
{ :southeast => ["Alabama", "Georgia", "Maryland", "North Carolina", "Tennessee", "Virginia", "West Virginia"] },
{ :west_coast => ["Arizona", "California", "Nevada"] }
]
def call
greeting
menu
exit_msg
end
def menu
region_num = self.select_region
separator(50)
list_states(region_num)
state_num = select_state
separator(50)
user_region = get_key(region_num)
user_state = get_state(region_num, user_region, state_num) #gets state "string"
resort_list = list_resorts(user_state)
select_resort(resort_list)
display_table
input = nil
while input != "exit"
puts "Type: 'more' for detailed report, 'new' for new search, 'exit' to Quit."
input = gets.strip.downcase
case input
when "resort"
menu
when "more"
more_info
end
end
end
def greeting
separator(40)
puts "Welcome to Local Ski Report gem"
separator(40)
puts "Let's Get Your Local Ski Report"
puts " "
end
def get_key(num)
STATES_WITH_RESORTS[num].keys[0]
end
def get_state(reg_num, user_reg, st_num)
STATES_WITH_RESORTS[reg_num][user_reg][st_num].downcase.gsub(" ", "-") #Get users choosen State Regions Array
end
def list_resorts(state)
resorts = LocalSkiReport::Scraper.scrap_resorts_page(state)
resorts.each_with_index { |r,i| puts "#{i+1}. #{r.name}" }
resorts
end
def list_regions
i = 1
STATES_WITH_RESORTS.each do |region|
region.each_key do |k|
puts "#{i}. #{k.to_s.gsub("_", " ").upcase}"
i += 1
end
end
end
def list_states(user_request)
STATES_WITH_RESORTS[user_request].each_value do |states|
states.each_with_index { |state, i| puts "#{i+1}. #{state}" }
end
end
def select_region
list_regions
separator(50)
puts "Which region would you like to check? type number: "
gets.chomp.to_i - 1
end
def select_resort(resorts_arr)
separator(50)
puts "Select a Resort or Ski-Area for the latest Ski Report: "
x = gets.chomp.to_i - 1
@resort = resorts_arr[x]
end
def select_state
separator(50)
puts "Which State would you like to check? type number: "
gets.chomp.to_i - 1
end
def separator(num)
puts "-" * num
end
def more_info
url = @resort.url
report = @resort.reports.first
LocalSkiReport::Scraper.scrap_report_page(report, url)
table = report.xt_report
puts table
end
def display_table
table = @resort.reports[0].report
puts table
end
def exit_msg
separator(60)
puts "Check back later for the latest Ski reports."
puts "Thanks for using Local Ski Reports gem!"
separator(60)
end
end
renamed #display_table to #display_report, #more_info is now #display_xt_report
# Our CLI Controller
class LocalSkiReport::CLI
attr_accessor :resort
STATES_WITH_RESORTS = [
{ :midwest => ["Illinois", "Indiana", "Iowa", "Kansas", "Michigan", "Minnesota", "Missouri", "Ohio","Wisconsin"] },
{ :northeast => ["Connecticut", "Maine", "Massachusetts", "New Hampshire", "New Jersey", "New York", "Pennsylvania", "Rhode Island", "Vermont"] },
{ :northwest => ["Alaska", "Idaho", "Oregon", "Washington"] },
{ :rockies => ["Colorado", "Montana", "New Mexico", "Utah", "Wyoming"] },
{ :southeast => ["Alabama", "Georgia", "Maryland", "North Carolina", "Tennessee", "Virginia", "West Virginia"] },
{ :west_coast => ["Arizona", "California", "Nevada"] }
]
def call
greeting
menu
exit_msg
end
def menu
region_num = self.select_region
separator(50)
list_states(region_num)
state_num = select_state
separator(50)
user_region = get_key(region_num)
user_state = get_state(region_num, user_region, state_num) #gets state "string"
resort_list = list_resorts(user_state)
select_resort(resort_list)
display_report
input = nil
while input != "exit"
puts "Type: 'more' for detailed report, 'new' for new search, 'exit' to Quit."
input = gets.strip.downcase
case input
when "resort"
menu
when "more"
display_xt_report
end
end
end
def greeting
separator(40)
puts "Welcome to Local Ski Report gem"
separator(40)
puts "Let's Get Your Local Ski Report"
puts " "
end
def get_key(num)
STATES_WITH_RESORTS[num].keys[0]
end
def get_state(reg_num, user_reg, st_num)
STATES_WITH_RESORTS[reg_num][user_reg][st_num].downcase.gsub(" ", "-") #Get users choosen State Regions Array
end
def list_resorts(state)
resorts = LocalSkiReport::Scraper.scrap_resorts_page(state)
resorts.each_with_index { |r,i| puts "#{i+1}. #{r.name}" }
resorts
end
def list_regions
i = 1
STATES_WITH_RESORTS.each do |region|
region.each_key do |k|
puts "#{i}. #{k.to_s.gsub("_", " ").upcase}"
i += 1
end
end
end
def list_states(user_request)
STATES_WITH_RESORTS[user_request].each_value do |states|
states.each_with_index { |state, i| puts "#{i+1}. #{state}" }
end
end
def select_region
list_regions
separator(50)
puts "Which region would you like to check? type number: "
gets.chomp.to_i - 1
end
def select_resort(resorts_arr)
separator(50)
puts "Select a Resort or Ski-Area for the latest Ski Report: "
x = gets.chomp.to_i - 1
@resort = resorts_arr[x]
end
def select_state
separator(50)
puts "Which State would you like to check? type number: "
gets.chomp.to_i - 1
end
def separator(num)
puts "-" * num
end
def display_xt_report
url = @resort.url
report = @resort.reports.first
LocalSkiReport::Scraper.scrap_report_page(report, url)
table = report.xt_report
puts table
end
def display_report
table = @resort.reports[0].report
puts table
end
def exit_msg
separator(60)
puts "Check back later for the latest Ski reports."
puts "Thanks for using Local Ski Reports gem!"
separator(60)
end
end |
#!/usr/bin/env ruby
module LogLineParser
module Ltsv
LABEL_SEPARATOR = ":"
TAB = "\t"
##
# Label names are borrowed from
# http://ltsv.org/
FORMAT_STRING_LABEL_TABLE = {
"%t" => "time",
"%h" => "host",
"%{X-Forwarded-For}i" => "forwardedfor",
"%l" => "ident",
"%u" => "user",
"%r" => "req",
"%m" => "method",
"%U%q" => "uri",
"%H" => "protocol",
"%>s" => "status",
"%B" => "size",
"%b" => "size",
"%I" => "reqsize",
"%{Referer}i" => "referer",
"%{User-agent}i" => "ua",
"%{Host}i" => "vhost",
"%D" => "reqtime_microsec",
"%T" => "reqtime",
"%{X-Cache}o" => "cache",
"%{X-Runtime}o" => "runtime",
# "-" => "apptime",
}
def self.format_strings_to_labels(format_strings)
format_strings.map do |string|
FORMAT_STRING_LABEL_TABLE[string]||string
end
end
def self.to_ltsv(labels, values)
fields = labels.zip(values).map {|field| field.join(LABEL_SEPARATOR) }
fields.join(TAB)
end
end
end
removed a blank line for keeping a consistent coding style
#!/usr/bin/env ruby
module LogLineParser
module Ltsv
LABEL_SEPARATOR = ":"
TAB = "\t"
##
# Label names are borrowed from
# http://ltsv.org/
FORMAT_STRING_LABEL_TABLE = {
"%t" => "time",
"%h" => "host",
"%{X-Forwarded-For}i" => "forwardedfor",
"%l" => "ident",
"%u" => "user",
"%r" => "req",
"%m" => "method",
"%U%q" => "uri",
"%H" => "protocol",
"%>s" => "status",
"%B" => "size",
"%b" => "size",
"%I" => "reqsize",
"%{Referer}i" => "referer",
"%{User-agent}i" => "ua",
"%{Host}i" => "vhost",
"%D" => "reqtime_microsec",
"%T" => "reqtime",
"%{X-Cache}o" => "cache",
"%{X-Runtime}o" => "runtime",
# "-" => "apptime",
}
def self.format_strings_to_labels(format_strings)
format_strings.map do |string|
FORMAT_STRING_LABEL_TABLE[string]||string
end
end
def self.to_ltsv(labels, values)
fields = labels.zip(values).map {|field| field.join(LABEL_SEPARATOR) }
fields.join(TAB)
end
end
end
|
require "logstash/inputs/base"
require "logstash/namespace"
# This input allows you to receive events over XMPP/Jabber.
#
# This plugin can be used for accepting events from humans or applications
# XMPP, or you can use it for PubSub or general message passing for logstash to
# logstash.
class LogStash::Inputs::Xmpp < LogStash::Inputs::Base
config_name "xmpp"
plugin_status "beta"
# The user or resource ID, like foo@example.com.
config :user, :validate => :string, :required => :true
# The xmpp password for the user/identity.
config :password, :validate => :password, :required => :true
# if muc/multi-user-chat required, give the name of the room that
# you want to join: room@conference.domain/nick
config :rooms, :validate => :array
# The xmpp server to connect to. This is optional. If you omit this setting,
# the host on the user/identity is used. (foo.com for user@foo.com)
config :host, :validate => :string
# Set to true to enable greater debugging in XMPP. Useful for debugging
# network/authentication erros.
config :debug, :validate => :boolean, :default => false
public
def register
require 'xmpp4r' # xmpp4r gem
Jabber::debug = true if @debug
@client = Jabber::Client.new(Jabber::JID.new(@user))
@client.connect(@host) # it is ok if host is nil
@client.auth(@password.value)
@client.send(Jabber::Presence.new.set_type(:available))
# load the MUC Client if we are joining rooms.
require 'xmpp4r/muc/helper/simplemucclient' if @rooms && !@rooms.empty?
end # def register
public
def run(queue)
if @rooms
@rooms.each do |room| # handle muc messages in different rooms
@muc = Jabber::MUC::SimpleMUCClient.new(@client)
@muc.join(room)
@muc.on_message do |time,from,body|
e = to_event(body, "#{room}/#{from}")
queue << e if e
end # @muc.on_message
end # @rooms.each
end # if @rooms
@client.add_message_callback do |msg| # handle direct/private messages
source = "xmpp://#{msg.from.node}@#{msg.from.domain}/#{msg.from.resource}"
# accept normal msgs (skip presence updates, etc)
if msg.body != nil
e = to_event(msg.body, source)
queue << e
end
end # @client.add_message_callback
end # def run
end # def class LogStash:Inputs::Xmpp
block the 'run' method until xmpp4r has a way to reasonably block until the client dies.
require "logstash/inputs/base"
require "logstash/namespace"
# This input allows you to receive events over XMPP/Jabber.
#
# This plugin can be used for accepting events from humans or applications
# XMPP, or you can use it for PubSub or general message passing for logstash to
# logstash.
class LogStash::Inputs::Xmpp < LogStash::Inputs::Base
config_name "xmpp"
plugin_status "beta"
# The user or resource ID, like foo@example.com.
config :user, :validate => :string, :required => :true
# The xmpp password for the user/identity.
config :password, :validate => :password, :required => :true
# if muc/multi-user-chat required, give the name of the room that
# you want to join: room@conference.domain/nick
config :rooms, :validate => :array
# The xmpp server to connect to. This is optional. If you omit this setting,
# the host on the user/identity is used. (foo.com for user@foo.com)
config :host, :validate => :string
# Set to true to enable greater debugging in XMPP. Useful for debugging
# network/authentication erros.
config :debug, :validate => :boolean, :default => false
public
def register
require 'xmpp4r' # xmpp4r gem
Jabber::debug = true if @debug
@client = Jabber::Client.new(Jabber::JID.new(@user))
@client.connect(@host) # it is ok if host is nil
@client.auth(@password.value)
@client.send(Jabber::Presence.new.set_type(:available))
# load the MUC Client if we are joining rooms.
require 'xmpp4r/muc/helper/simplemucclient' if @rooms && !@rooms.empty?
end # def register
public
def run(queue)
if @rooms
@rooms.each do |room| # handle muc messages in different rooms
@muc = Jabber::MUC::SimpleMUCClient.new(@client)
@muc.join(room)
@muc.on_message do |time,from,body|
e = to_event(body, "#{room}/#{from}")
queue << e if e
end # @muc.on_message
end # @rooms.each
end # if @rooms
@client.add_message_callback do |msg| # handle direct/private messages
source = "xmpp://#{msg.from.node}@#{msg.from.domain}/#{msg.from.resource}"
# accept normal msgs (skip presence updates, etc)
if msg.body != nil
e = to_event(msg.body, source)
queue << e
end
end # @client.add_message_callback
sleep
end # def run
end # def class LogStash:Inputs::Xmpp
|
require 'json'
require 'socket'
require 'blather/client'
module Lurch
module Clients
class Jabber < Blather::Client
def initialize
super
@socket = TCPSocket.new 'localhost', 2013
register_handler :message, :chat? do |message|
on_message(message)
end
register_handler :subscription, :request? do |request|
write request.approve!
end
listen
end
def on_message(message)
if message.body && message.body != ''
event = {:service => :command, :user => message.from.strip!.to_s, :message => message.body}
@socket.sendmsg event.to_json
end
end
def listen
jid = 'samueljamesbell@gmail.com'
Thread.new do
loop do
message = @socket.gets
write Blather::Stanza::Message.new(jid, message) if connected? && message
end
end
end
end
end
end
Changed Jabber socket connection to Lurch to 0.0.0.0 instead of localhost
require 'json'
require 'socket'
require 'blather/client'
module Lurch
module Clients
class Jabber < Blather::Client
def initialize
super
@socket = TCPSocket.new '0.0.0.0', 2013
register_handler :message, :chat? do |message|
on_message(message)
end
register_handler :subscription, :request? do |request|
write request.approve!
end
listen
end
def on_message(message)
if message.body && message.body != ''
event = {:service => :command, :user => message.from.strip!.to_s, :message => message.body}
@socket.sendmsg event.to_json
end
end
def listen
jid = 'samueljamesbell@gmail.com'
Thread.new do
loop do
message = @socket.gets
write Blather::Stanza::Message.new(jid, message) if connected? && message
end
end
end
end
end
end
|
module MaZMQ
class LoadBalancer
# Aprovechar el tiempo de timeout para seguir mandando a los restantes
def initialize(use_em=true)
# [] rr.connect('tcp://127.0.0.1')
# only REQ / REP pattern
@current_message = nil
@use_em = use_em
#@handler = MaZMQ::HandlerPool.new(ports, @use_em)
@sockets = []
#ports.each do |port|
# socket = MaZMQ::Request.new(use_em) # TODO debugging only
#end
@current = available
@timeout = nil # TODO individual timeouts for different sockets
@state = :idle
# @max_timeouts = 5 # TODO
# @max_timeouted = 1
# @max_retries
end
def connect(protocol, address, port)
# validate as in SocketHandler
request = MaZMQ::Request.new(@use_em)
request.connect(protocol, address, port)
@sockets << request
@current ||= available
end
def timeout(secs)
@timeout = secs
#@handler.timeout(secs)
@sockets.each do |s|
s.timeout secs
end
end
def send_string(msg)
case @state
when :idle
@current_message = msg
@state = :sending
@current.send_string(@current_message)
when :retry
@state = :sending
@current.send_string(@current_message)
when :sending
return false
end
end
def recv_string
msg = case @current.state
when :sending then @current.recv_string
when :idle, :timeout then false
end
if @timeout and @current.state == :timeout
rotate!
@state = :retry
@current.send_string @current_message
return false
end
return msg
end
def on_timeout(&block)
return false if not @use_em
@sockets.each do |socket|
socket.on_timeout {
self.rotate!
@state = :retry
self.send_string @current_message
block.call
}
end
end
def on_read(&block)
return false if not @use_em
@sockets.each do |socket|
socket.on_read { |msg|
self.rotate!
@state = :idle
block.call(msg)
}
end
end
def available
@sockets.select{|s| s.state == :idle}.first
end
def rotate!(timeout=false)
#@sockets.delete_at(0)
@sockets.push(@sockets.shift)
if timeout
@sockets.delete_at(-1)
end
@current = available
#@state = :idle
end
end
end
Unified working LoadBalancer which works on EM
module MaZMQ
class LoadBalancer
# Aprovechar el tiempo de timeout para seguir mandando a los restantes
def initialize(use_em=true)
@current_message = nil
@use_em = use_em
@sockets = []
@current = available
@timeout = nil # TODO individual timeouts for different sockets
@state = :idle
# @max_timeouts = 5 # TODO
# @max_timeouted = 1
# @max_retries
end
def connect(protocol, address, port)
# validate as in SocketHandler
request = MaZMQ::Request.new(@use_em)
request.connect(protocol, address, port)
@sockets << request
@current ||= available
end
def timeout(secs)
@timeout = secs
@sockets.each do |s|
s.timeout @timeout
end
end
def send_string(msg)
case @state
when :idle
@current_message = msg
@state = :sending
@current.send_string(@current_message)
when :retry
@state = :sending
@current.send_string(@current_message)
when :sending
return false
end
end
def recv_string
msg = case @current.state
when :idle then false
when :sending then @current.recv_string
when :timeout then false
end
# chequear @use_em = false
#if @timeout and @current.state == :timeout
# rotate!
# @state = :retry
# @current.send_string @current_message
# return false
#end
return msg
end
def on_timeout(&block)
return false if not @use_em
@sockets.each do |socket|
socket.on_timeout {
self.rotate!(true)
@state = :retry
self.send_string @current_message
block.call
}
end
end
def on_read(&block)
return false if not @use_em
@sockets.each do |socket|
socket.on_read { |msg|
self.rotate!
@state = :idle
block.call(msg)
}
end
end
def available
@sockets.select{|s| s.state == :idle}.first
end
def rotate!(timeout=false)
#@sockets.delete_at(0)
@sockets.push(@sockets.shift)
if timeout
@sockets.delete_at(-1)
end
@current = available
end
end
end
|
module MailAddress
VERSION = "1.2.2"
end
v1.2.4
module MailAddress
VERSION = "1.2.4"
end
|
require 'mass_can_can/mock_rule'
require 'mass_can_can/authorizer'
module MassCanCan
module Ability
extend ActiveSupport::Concern
include CanCan::Ability
included do
# load all the models
Dir.glob(Rails.root + 'app/models/*.rb').each { |file| require file }
# and define the default security
ActiveRecord::Base.descendants.each do |c|
c.class_eval do
define_singleton_method(:active_authorizer) do
wl = [""] | Authorizer.instance.whitelist[c.name.tableize.to_sym].to_a
{:default => ActiveModel::MassAssignmentSecurity::WhiteList.new(wl)}
end
end
end
end
def can(*args, &block)
super
rule = MockRule.new(true, *args, &block)
if rule.attributes && rule.actions.include?(:update)
rule.subjects.each do |subject|
Authorizer.instance.whitelist[subject] ||= Set[]
Authorizer.instance.whitelist[subject] |= rule.attributes.map(&:to_s).to_set
end
end
end
def cannot(*args, &block)
super
rule = MockRule.new(true, *args, &block)
if rule.attributes && rule.actions.include?(:update)
rule.subjects.each do |subject|
Authorizer.instance.whitelist[subject] ||= Set[]
Authorizer.instance.whitelist[subject].reject! {|a| rule.attributes.map(&:to_s).include? a}
end
end
end
end
end
Consistent (and prettier) use of space around { and }
require 'mass_can_can/mock_rule'
require 'mass_can_can/authorizer'
module MassCanCan
module Ability
extend ActiveSupport::Concern
include CanCan::Ability
included do
# load all the models
Dir.glob(Rails.root + 'app/models/*.rb').each { |file| require file }
# and define the default security
ActiveRecord::Base.descendants.each do |c|
c.class_eval do
define_singleton_method(:active_authorizer) do
wl = [""] | Authorizer.instance.whitelist[c.name.tableize.to_sym].to_a
{ default: ActiveModel::MassAssignmentSecurity::WhiteList.new(wl) }
end
end
end
end
def can(*args, &block)
super
rule = MockRule.new(true, *args, &block)
if rule.attributes && rule.actions.include?(:update)
rule.subjects.each do |subject|
Authorizer.instance.whitelist[subject] ||= Set[]
Authorizer.instance.whitelist[subject] |= rule.attributes.map(&:to_s).to_set
end
end
end
def cannot(*args, &block)
super
rule = MockRule.new(true, *args, &block)
if rule.attributes && rule.actions.include?(:update)
rule.subjects.each do |subject|
Authorizer.instance.whitelist[subject] ||= Set[]
Authorizer.instance.whitelist[subject].reject! { |a| rule.attributes.map(&:to_s).include?(a) }
end
end
end
end
end
|
Create a helper for materialize will paginate gem
class MaterializeRenderer < MaterializePagination::Rails
# @return [String] rendered pagination link
def page_number(page)
classes = ['waves-effect', ('active' if page == current_page)].join(' ')
tag :li, link(page, page, :rel => rel_value(page)), class: classes
end
end
|
module MelissaData
VERSION = "0.1.0"
end
Update version to 0.1.1 for release
module MelissaData
VERSION = "0.1.1"
end
|
require "mina/bundler"
set :clockwork_dir, -> { "#{fetch(:current_path)}" }
set :clockwork_file, -> { "#{fetch(:current_path)}/clock.rb" }
set :clockwork_identifier, -> { File.basename(clockwork_file, ".rb") }
set :clockwork_pid_dir, -> { "#{fetch(:shared_path)}/tmp/pids" }
set :clockwork_log_dir, -> { "#{fetch(:shared_path)}/log" }
set :clockworkd_cmd, -> {
if set?(:bundle_prefix)
"#{fetch(:bundle_prefix)} clockworkd"
elsif set?(:bundle_bin)
"#{fetch(:bundle_bin)} exec clockworkd"
else
"clockworkd"
end
}
set :clockworkd_full_cmd, -> {
%[cd #{fetch(:current_path)} && #{fetch(:clockworkd_cmd)} \\
-c '#{fetch(:clockwork_file)}' \\
-i '#{fetch(:clockwork_identifier)}' \\
-d '#{fetch(:clockwork_dir)}' \\
--pid-dir '#{fetch(:clockwork_pid_dir)}' \\
--log --log-dir '#{fetch(:clockwork_log_dir)}']
}
namespace :clockwork do
# mina clockwork:start
desc "Start clockwork daemon"
task start: :environment do
comment "Starting clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} start"
end
# mina clockwork:stop
desc "Stop clockwork daemon"
task stop: :environment do
comment "Stopping clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} stop"
end
# mina clockwork:restart
desc "Restart clockwork daemon"
task restart: :environment do
comment "Restarting clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} restart"
end
end
Remove 'mina/bundler' require stmt
set :clockwork_dir, -> { "#{fetch(:current_path)}" }
set :clockwork_file, -> { "#{fetch(:current_path)}/clock.rb" }
set :clockwork_identifier, -> { File.basename(clockwork_file, ".rb") }
set :clockwork_pid_dir, -> { "#{fetch(:shared_path)}/tmp/pids" }
set :clockwork_log_dir, -> { "#{fetch(:shared_path)}/log" }
set :clockworkd_cmd, -> {
if set?(:bundle_prefix)
"#{fetch(:bundle_prefix)} clockworkd"
elsif set?(:bundle_bin)
"#{fetch(:bundle_bin)} exec clockworkd"
else
"clockworkd"
end
}
set :clockworkd_full_cmd, -> {
%[cd #{fetch(:current_path)} && #{fetch(:clockworkd_cmd)} \\
-c '#{fetch(:clockwork_file)}' \\
-i '#{fetch(:clockwork_identifier)}' \\
-d '#{fetch(:clockwork_dir)}' \\
--pid-dir '#{fetch(:clockwork_pid_dir)}' \\
--log --log-dir '#{fetch(:clockwork_log_dir)}']
}
namespace :clockwork do
# mina clockwork:start
desc "Start clockwork daemon"
task start: :environment do
comment "Starting clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} start"
end
# mina clockwork:stop
desc "Stop clockwork daemon"
task stop: :environment do
comment "Stopping clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} stop"
end
# mina clockwork:restart
desc "Restart clockwork daemon"
task restart: :environment do
comment "Restarting clockwork daemon"
command "#{fetch(:clockworkd_full_cmd)} restart"
end
end
|
module Mina
module Unicorn
VERSION = "0.0.1"
end
end
Increment version to 0.0.2
module Mina
module Unicorn
VERSION = "0.0.2"
end
end
|
module ParamsUtils
def self.strip_rails_defaults(params)
params.except(:controller, :action, :utf8)
end
def self.strip_ordering_params(params)
params = strip_rails_defaults(params)
params.except(:sort, :dir)
end
def self.strip_empty(params)
params.delete_if { |_, v| v.empty? }
params.delete_if { |_, v| v == {"(1i)"=>"", "(2i)"=>"", "(3i)"=>""} }
end
def self.is_set_date?(date)
if date["(1i)"].present? && date["(2i)"].present? && date["(3i)"].present?
true
else
false
end
end
def self.strip_to_search_params(params)
params = strip_rails_defaults(params)
params = strip_ordering_params(params)
strip_empty(params)
end
def self.to_formatted_date(date_params)
# Turns a date_select tag to a date object
d = Date.new(
date_params['(1i)'].to_i,
date_params['(2i)'].to_i,
date_params['(3i)'].to_i
)
d.strftime("%d %b %Y")
end
end
Date formatting
module ParamsUtils
def self.strip_rails_defaults(params)
params.except(:controller, :action, :utf8)
end
def self.strip_ordering_params(params)
params = strip_rails_defaults(params)
params.except(:sort, :dir)
end
def self.strip_empty(params)
params.delete_if { |_, v| v.empty? }
params.delete_if { |_, v| v == {"(1i)"=>"", "(2i)"=>"", "(3i)"=>""} }
end
def self.is_set_date?(date)
if date["(1i)"].present? && date["(2i)"].present? && date["(3i)"].present?
true
else
false
end
end
def self.strip_to_search_params(params)
params = strip_rails_defaults(params)
params = strip_ordering_params(params)
strip_empty(params)
end
def self.to_formatted_date(date_params)
# Turns a date_select tag to a date object
d = Date.new(
date_params['(1i)'].to_i,
date_params['(2i)'].to_i,
date_params['(3i)'].to_i
)
d.strftime("%d/%m/%Y")
end
end
|
require 'bigdecimal'
module Mondrian
module OLAP
class Result
def initialize(connection, raw_cell_set)
@connection = connection
@raw_cell_set = raw_cell_set
end
attr_reader :raw_cell_set
def axes_count
axes.length
end
def axis_names
@axis_names ||= axis_positions(:getName)
end
def axis_full_names
@axis_full_names ||= axis_positions(:getUniqueName)
end
def axis_members
@axis_members ||= axis_positions(:to_member)
end
AXIS_SYMBOLS = [:column, :row, :page, :section, :chapter]
AXIS_SYMBOLS.each_with_index do |axis, i|
define_method :"#{axis}_names" do
axis_names[i]
end
define_method :"#{axis}_full_names" do
axis_full_names[i]
end
define_method :"#{axis}_members" do
axis_members[i]
end
end
def values(*axes_sequence)
values_using(:getValue, axes_sequence)
end
def formatted_values(*axes_sequence)
values_using(:getFormattedValue, axes_sequence)
end
def values_using(values_method, axes_sequence = [])
if axes_sequence.empty?
axes_sequence = (0...axes_count).to_a.reverse
elsif axes_sequence.size != axes_count
raise ArgumentError, "axes sequence size is not equal to result axes count"
end
recursive_values(values_method, axes_sequence, 0)
end
# format results in simple HTML table
def to_html(options = {})
case axes_count
when 1
builder = Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |doc|
doc.table do
doc.tr do
column_full_names.each do |column_full_name|
column_full_name = column_full_name.join(',') if column_full_name.is_a?(Array)
doc.th column_full_name, :align => 'right'
end
end
doc.tr do
(options[:formatted] ? formatted_values : values).each do |value|
doc.td value, :align => 'right'
end
end
end
end
builder.doc.to_html
when 2
builder = Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |doc|
doc.table do
doc.tr do
doc.th
column_full_names.each do |column_full_name|
column_full_name = column_full_name.join(',') if column_full_name.is_a?(Array)
doc.th column_full_name, :align => 'right'
end
end
(options[:formatted] ? formatted_values : values).each_with_index do |row, i|
doc.tr do
row_full_name = row_full_names[i].is_a?(Array) ? row_full_names[i].join(',') : row_full_names[i]
doc.th row_full_name, :align => 'left'
row.each do |cell|
doc.td cell, :align => 'right'
end
end
end
end
end
builder.doc.to_html
else
raise ArgumentError, "just columns and rows axes are supported"
end
end
# Specify drill through cell position, for example, as
# :row => 0, :cell => 1
# Specify max returned rows with :max_rows parameter
# Specify returned fields (as list of MDX levels and measures) with :return parameter
# Specify measures which at least one should not be empty (NULL) with :nonempty parameter
def drill_through(params = {})
Error.wrap_native_exception do
cell_params = []
axes_count.times do |i|
axis_symbol = AXIS_SYMBOLS[i]
raise ArgumentError, "missing position #{axis_symbol.inspect}" unless axis_position = params[axis_symbol]
cell_params << Java::JavaLang::Integer.new(axis_position)
end
raw_cell = @raw_cell_set.getCell(cell_params)
DrillThrough.from_raw_cell(raw_cell, params)
end
end
class DrillThrough
def self.from_raw_cell(raw_cell, params = {})
max_rows = params[:max_rows] || -1
# workaround to avoid calling raw_cell.drillThroughInternal private method
# which fails when running inside TorqueBox
cell_field = raw_cell.java_class.declared_field('cell')
cell_field.accessible = true
rolap_cell = cell_field.value(raw_cell)
if params[:return] || rolap_cell.canDrillThrough
sql_statement = drill_through_internal(rolap_cell, params)
raw_result_set = sql_statement.getWrappedResultSet
new(raw_result_set)
end
end
def initialize(raw_result_set)
@raw_result_set = raw_result_set
end
def column_types
@column_types ||= (1..metadata.getColumnCount).map{|i| metadata.getColumnTypeName(i).to_sym}
end
def column_names
@column_names ||= begin
# if PostgreSQL then use getBaseColumnName as getColumnName returns empty string
if metadata.respond_to?(:getBaseColumnName)
(1..metadata.getColumnCount).map{|i| metadata.getBaseColumnName(i)}
else
(1..metadata.getColumnCount).map{|i| metadata.getColumnName(i)}
end
end
end
def table_names
@table_names ||= begin
# if PostgreSQL then use getBaseTableName as getTableName returns empty string
if metadata.respond_to?(:getBaseTableName)
(1..metadata.getColumnCount).map{|i| metadata.getBaseTableName(i)}
else
(1..metadata.getColumnCount).map{|i| metadata.getTableName(i)}
end
end
end
def column_labels
@column_labels ||= (1..metadata.getColumnCount).map{|i| metadata.getColumnLabel(i)}
end
def fetch
if @raw_result_set.next
row_values = []
column_types.each_with_index do |column_type, i|
row_values << Result.java_to_ruby_value(@raw_result_set.getObject(i+1), column_type)
end
row_values
else
@raw_result_set.close
nil
end
end
def rows
@rows ||= begin
rows_values = []
while row_values = fetch
rows_values << row_values
end
rows_values
end
end
private
def metadata
@metadata ||= @raw_result_set.getMetaData
end
# modified RolapCell drillThroughInternal method
def self.drill_through_internal(rolap_cell, params)
max_rows = params[:max_rows] || -1
result_field = rolap_cell.java_class.declared_field('result')
result_field.accessible = true
result = result_field.value(rolap_cell)
sql = generate_drill_through_sql(rolap_cell, result, params)
# Choose the appropriate scrollability. If we need to start from an
# offset row, it is useful that the cursor is scrollable, but not
# essential.
statement = result.getExecution.getMondrianStatement
execution = Java::MondrianServer::Execution.new(statement, 0)
connection = statement.getMondrianConnection
result_set_type = Java::JavaSql::ResultSet::TYPE_FORWARD_ONLY
result_set_concurrency = Java::JavaSql::ResultSet::CONCUR_READ_ONLY
schema = statement.getSchema
dialect = schema.getDialect
Java::MondrianRolap::RolapUtil.executeQuery(
connection.getDataSource,
sql,
nil,
max_rows,
-1, # firstRowOrdinal
Java::MondrianRolap::SqlStatement::StatementLocus.new(
execution,
"RolapCell.drillThrough",
"Error in drill through",
Java::MondrianServerMonitor::SqlStatementEvent::Purpose::DRILL_THROUGH, 0
),
result_set_type,
result_set_concurrency,
nil
)
end
def self.generate_drill_through_sql(rolap_cell, result, params)
nonempty_columns, return_fields = parse_return_fields(result, params)
return_expressions = return_fields.map{|field| field[:member]}
sql_non_extended = rolap_cell.getDrillThroughSQL(return_expressions, false)
sql_extended = rolap_cell.getDrillThroughSQL(return_expressions, true)
if sql_non_extended =~ /\Aselect (.*) from (.*) where (.*) order by (.*)\Z/
non_extended_from = $2
non_extended_where = $3
# the latest Mondrian version sometimes returns sql_non_extended without order by
elsif sql_non_extended =~ /\Aselect (.*) from (.*) where (.*)\Z/
non_extended_from = $2
non_extended_where = $3
# if drill through total measure with just all members selection
elsif sql_non_extended =~ /\Aselect (.*) from (.*)\Z/
non_extended_from = $2
non_extended_where = "1 = 1" # dummy true condition
else
raise ArgumentError, "cannot parse drill through SQL: #{sql_non_extended}"
end
if sql_extended =~ /\Aselect (.*) from (.*) where (.*) order by (.*)\Z/
extended_select = $1
extended_from = $2
extended_where = $3
extended_order_by = $4
# if only measures are selected then there will be no order by
elsif sql_extended =~ /\Aselect (.*) from (.*) where (.*)\Z/
extended_select = $1
extended_from = $2
extended_where = $3
extended_order_by = ''
else
raise ArgumentError, "cannot parse drill through SQL: #{sql_extended}"
end
if return_fields.present?
new_select_columns = []
new_order_by_columns = []
new_group_by_columns = []
group_by = params[:group_by]
return_fields.size.times do |i|
column_alias = return_fields[i][:column_alias]
new_select_columns << if column_expression = return_fields[i][:column_expression]
new_order_by_columns << column_expression
new_group_by_columns << column_expression if group_by && return_fields[i][:type] != :measure
"#{column_expression} AS #{column_alias}"
else
"'' AS #{column_alias}"
end
end
new_select = new_select_columns.join(', ')
new_order_by = new_order_by_columns.join(', ')
new_group_by = new_group_by_columns.join(', ')
else
new_select = extended_select
new_order_by = extended_order_by
new_group_by = ''
end
new_from_parts = non_extended_from.split(/,\s*/)
outer_join_from_parts = extended_from.split(/,\s*/) - new_from_parts
where_parts = extended_where.split(' and ')
# reverse outer_join_from_parts to support dimensions with several table joins
# where join with detailed level table should be constructed first
outer_join_from_parts.reverse.each do |part|
part_elements = part.split(/\s+/)
# first is original table, then optional 'as' and the last is alias
table_name = part_elements.first
table_alias = part_elements.last
join_conditions = where_parts.select do |where_part|
where_part.include?(" = #{table_alias}.")
end
outer_join = " left outer join #{part} on (#{join_conditions.join(' and ')})"
left_table_alias = join_conditions.first.split('.').first
if left_table_from_part = new_from_parts.detect{|from_part| from_part.include?(left_table_alias)}
left_table_from_part << outer_join
else
raise ArgumentError, "cannot extract outer join left table #{left_table_alias} in drill through SQL: #{sql_extended}"
end
end
new_from = new_from_parts.join(', ')
new_where = non_extended_where
if nonempty_columns && !nonempty_columns.empty?
not_null_condition = nonempty_columns.map{|c| "(#{c}) IS NOT NULL"}.join(' OR ')
new_where += " AND (#{not_null_condition})"
end
sql = "select #{new_select} from #{new_from} where #{new_where}"
sql << " group by #{new_group_by}" unless new_group_by.empty?
sql << " order by #{new_order_by}" unless new_order_by.empty?
sql
end
def self.parse_return_fields(result, params)
nonempty_columns = []
return_fields = []
if params[:return] || params[:nonempty]
rolap_cube = result.getCube
schema_reader = rolap_cube.getSchemaReader
dialect = result.getCube.getSchema.getDialect
sql_query = Java::mondrian.rolap.sql.SqlQuery.new(dialect)
if fields = params[:return]
fields = fields.split(/,\s*/) if fields.is_a? String
fields.each do |field|
return_fields << case field
when /\AName\((.*)\)\z/i then
{ member_full_name: $1, type: :name }
when /\AProperty\((.*)\s*,\s*'(.*)'\)\z/i then
{ member_full_name: $1, type: :property, name: $2 }
else
{ member_full_name: field }
end
end
return_fields.size.times do | i |
member_full_name = return_fields[i][:member_full_name]
begin
segment_list = Java::MondrianOlap::Util.parseIdentifier(member_full_name)
rescue Java::JavaLang::IllegalArgumentException
raise ArgumentError, "invalid return field #{member_full_name}"
end
# if this is property field then the name is initilized already
return_fields[i][:name] ||= segment_list.to_a.last.name
level_or_member = schema_reader.lookupCompound rolap_cube, segment_list, false, 0
return_fields[i][:member] = level_or_member
if level_or_member.is_a? Java::MondrianOlap::Member
raise ArgumentError, "cannot use calculated member #{member_full_name} as return field" if level_or_member.isCalculated
elsif !level_or_member.is_a? Java::MondrianOlap::Level
raise ArgumentError, "return field #{member_full_name} should be level or measure"
end
return_fields[i][:column_expression] = case return_fields[i][:type]
when :name
if level_or_member.respond_to? :getNameExp
level_or_member.getNameExp.getExpression sql_query
end
when :property
if property = level_or_member.getProperties.to_a.detect{|p| p.getName == return_fields[i][:name]}
# property.getExp is a protected method therefore
# use a workaround to get the value from the field
f = property.java_class.declared_field("exp")
f.accessible = true
if column = f.value(property)
column.getExpression sql_query
end
end
else
if level_or_member.respond_to? :getKeyExp
return_fields[i][:type] = :key
level_or_member.getKeyExp.getExpression sql_query
else
return_fields[i][:type] = :measure
column_expression = level_or_member.getMondrianDefExpression.getExpression sql_query
if params[:group_by]
level_or_member.getAggregator.getExpression column_expression
else
column_expression
end
end
end
column_alias = if return_fields[i][:type] == :key
"#{return_fields[i][:name]} (Key)"
else
return_fields[i][:name]
end
return_fields[i][:column_alias] = dialect.quoteIdentifier(column_alias)
end
end
if nonempty_fields = params[:nonempty]
nonempty_fields = nonempty_fields.split(/,\s*/) if nonempty_fields.is_a?(String)
nonempty_columns = nonempty_fields.map do |nonempty_field|
begin
segment_list = Java::MondrianOlap::Util.parseIdentifier(nonempty_field)
rescue Java::JavaLang::IllegalArgumentException
raise ArgumentError, "invalid return field #{nonempty_field}"
end
member = schema_reader.lookupCompound rolap_cube, segment_list, false, 0
if member.is_a? Java::MondrianOlap::Member
raise ArgumentError, "cannot use calculated member #{nonempty_field} as nonempty field" if member.isCalculated
sql_query = member.getStarMeasure.getSqlQuery
member.getStarMeasure.generateExprString(sql_query)
else
raise ArgumentError, "nonempty field #{nonempty_field} should be measure"
end
end
end
end
[nonempty_columns, return_fields]
end
end
def self.java_to_ruby_value(value, column_type = nil)
case value
when Numeric, String
value
when Java::JavaMath::BigDecimal
BigDecimal(value.to_s)
when Java::JavaSql::Clob
clob_to_string(value)
else
value
end
end
private
def self.clob_to_string(value)
if reader = value.getCharacterStream
buffered_reader = Java::JavaIo::BufferedReader.new(reader)
result = []
while str = buffered_reader.readLine
result << str
end
result.join("\n")
end
ensure
if buffered_reader
buffered_reader.close
elsif reader
reader.close
end
end
def axes
@axes ||= @raw_cell_set.getAxes
end
def axis_positions(map_method, join_with=false)
axes.map do |axis|
axis.getPositions.map do |position|
names = position.getMembers.map do |member|
if map_method == :to_member
Member.new(member)
else
member.send(map_method)
end
end
if names.size == 1
names[0]
elsif join_with
names.join(join_with)
else
names
end
end
end
end
AXIS_SYMBOL_TO_NUMBER = {
:columns => 0,
:rows => 1,
:pages => 2,
:sections => 3,
:chapters => 4
}.freeze
def recursive_values(value_method, axes_sequence, current_index, cell_params=[])
if axis_number = axes_sequence[current_index]
axis_number = AXIS_SYMBOL_TO_NUMBER[axis_number] if axis_number.is_a?(Symbol)
positions_size = axes[axis_number].getPositions.size
(0...positions_size).map do |i|
cell_params[axis_number] = Java::JavaLang::Integer.new(i)
recursive_values(value_method, axes_sequence, current_index + 1, cell_params)
end
else
self.class.java_to_ruby_value(@raw_cell_set.getCell(cell_params).send(value_method))
end
end
end
end
end
Parse correctly drill through query also with newlines
custom hierarchy issue level key expression contains newlines
require 'bigdecimal'
module Mondrian
module OLAP
class Result
def initialize(connection, raw_cell_set)
@connection = connection
@raw_cell_set = raw_cell_set
end
attr_reader :raw_cell_set
def axes_count
axes.length
end
def axis_names
@axis_names ||= axis_positions(:getName)
end
def axis_full_names
@axis_full_names ||= axis_positions(:getUniqueName)
end
def axis_members
@axis_members ||= axis_positions(:to_member)
end
AXIS_SYMBOLS = [:column, :row, :page, :section, :chapter]
AXIS_SYMBOLS.each_with_index do |axis, i|
define_method :"#{axis}_names" do
axis_names[i]
end
define_method :"#{axis}_full_names" do
axis_full_names[i]
end
define_method :"#{axis}_members" do
axis_members[i]
end
end
def values(*axes_sequence)
values_using(:getValue, axes_sequence)
end
def formatted_values(*axes_sequence)
values_using(:getFormattedValue, axes_sequence)
end
def values_using(values_method, axes_sequence = [])
if axes_sequence.empty?
axes_sequence = (0...axes_count).to_a.reverse
elsif axes_sequence.size != axes_count
raise ArgumentError, "axes sequence size is not equal to result axes count"
end
recursive_values(values_method, axes_sequence, 0)
end
# format results in simple HTML table
def to_html(options = {})
case axes_count
when 1
builder = Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |doc|
doc.table do
doc.tr do
column_full_names.each do |column_full_name|
column_full_name = column_full_name.join(',') if column_full_name.is_a?(Array)
doc.th column_full_name, :align => 'right'
end
end
doc.tr do
(options[:formatted] ? formatted_values : values).each do |value|
doc.td value, :align => 'right'
end
end
end
end
builder.doc.to_html
when 2
builder = Nokogiri::XML::Builder.new(:encoding => 'UTF-8') do |doc|
doc.table do
doc.tr do
doc.th
column_full_names.each do |column_full_name|
column_full_name = column_full_name.join(',') if column_full_name.is_a?(Array)
doc.th column_full_name, :align => 'right'
end
end
(options[:formatted] ? formatted_values : values).each_with_index do |row, i|
doc.tr do
row_full_name = row_full_names[i].is_a?(Array) ? row_full_names[i].join(',') : row_full_names[i]
doc.th row_full_name, :align => 'left'
row.each do |cell|
doc.td cell, :align => 'right'
end
end
end
end
end
builder.doc.to_html
else
raise ArgumentError, "just columns and rows axes are supported"
end
end
# Specify drill through cell position, for example, as
# :row => 0, :cell => 1
# Specify max returned rows with :max_rows parameter
# Specify returned fields (as list of MDX levels and measures) with :return parameter
# Specify measures which at least one should not be empty (NULL) with :nonempty parameter
def drill_through(params = {})
Error.wrap_native_exception do
cell_params = []
axes_count.times do |i|
axis_symbol = AXIS_SYMBOLS[i]
raise ArgumentError, "missing position #{axis_symbol.inspect}" unless axis_position = params[axis_symbol]
cell_params << Java::JavaLang::Integer.new(axis_position)
end
raw_cell = @raw_cell_set.getCell(cell_params)
DrillThrough.from_raw_cell(raw_cell, params)
end
end
class DrillThrough
def self.from_raw_cell(raw_cell, params = {})
max_rows = params[:max_rows] || -1
# workaround to avoid calling raw_cell.drillThroughInternal private method
# which fails when running inside TorqueBox
cell_field = raw_cell.java_class.declared_field('cell')
cell_field.accessible = true
rolap_cell = cell_field.value(raw_cell)
if params[:return] || rolap_cell.canDrillThrough
sql_statement = drill_through_internal(rolap_cell, params)
raw_result_set = sql_statement.getWrappedResultSet
new(raw_result_set)
end
end
def initialize(raw_result_set)
@raw_result_set = raw_result_set
end
def column_types
@column_types ||= (1..metadata.getColumnCount).map{|i| metadata.getColumnTypeName(i).to_sym}
end
def column_names
@column_names ||= begin
# if PostgreSQL then use getBaseColumnName as getColumnName returns empty string
if metadata.respond_to?(:getBaseColumnName)
(1..metadata.getColumnCount).map{|i| metadata.getBaseColumnName(i)}
else
(1..metadata.getColumnCount).map{|i| metadata.getColumnName(i)}
end
end
end
def table_names
@table_names ||= begin
# if PostgreSQL then use getBaseTableName as getTableName returns empty string
if metadata.respond_to?(:getBaseTableName)
(1..metadata.getColumnCount).map{|i| metadata.getBaseTableName(i)}
else
(1..metadata.getColumnCount).map{|i| metadata.getTableName(i)}
end
end
end
def column_labels
@column_labels ||= (1..metadata.getColumnCount).map{|i| metadata.getColumnLabel(i)}
end
def fetch
if @raw_result_set.next
row_values = []
column_types.each_with_index do |column_type, i|
row_values << Result.java_to_ruby_value(@raw_result_set.getObject(i+1), column_type)
end
row_values
else
@raw_result_set.close
nil
end
end
def rows
@rows ||= begin
rows_values = []
while row_values = fetch
rows_values << row_values
end
rows_values
end
end
private
def metadata
@metadata ||= @raw_result_set.getMetaData
end
# modified RolapCell drillThroughInternal method
def self.drill_through_internal(rolap_cell, params)
max_rows = params[:max_rows] || -1
result_field = rolap_cell.java_class.declared_field('result')
result_field.accessible = true
result = result_field.value(rolap_cell)
sql = generate_drill_through_sql(rolap_cell, result, params)
# Choose the appropriate scrollability. If we need to start from an
# offset row, it is useful that the cursor is scrollable, but not
# essential.
statement = result.getExecution.getMondrianStatement
execution = Java::MondrianServer::Execution.new(statement, 0)
connection = statement.getMondrianConnection
result_set_type = Java::JavaSql::ResultSet::TYPE_FORWARD_ONLY
result_set_concurrency = Java::JavaSql::ResultSet::CONCUR_READ_ONLY
schema = statement.getSchema
dialect = schema.getDialect
Java::MondrianRolap::RolapUtil.executeQuery(
connection.getDataSource,
sql,
nil,
max_rows,
-1, # firstRowOrdinal
Java::MondrianRolap::SqlStatement::StatementLocus.new(
execution,
"RolapCell.drillThrough",
"Error in drill through",
Java::MondrianServerMonitor::SqlStatementEvent::Purpose::DRILL_THROUGH, 0
),
result_set_type,
result_set_concurrency,
nil
)
end
def self.generate_drill_through_sql(rolap_cell, result, params)
nonempty_columns, return_fields = parse_return_fields(result, params)
return_expressions = return_fields.map{|field| field[:member]}
sql_non_extended = rolap_cell.getDrillThroughSQL(return_expressions, false)
sql_extended = rolap_cell.getDrillThroughSQL(return_expressions, true)
if sql_non_extended =~ /\Aselect (.*) from (.*) where (.*) order by (.*)\Z/m
non_extended_from = $2
non_extended_where = $3
# the latest Mondrian version sometimes returns sql_non_extended without order by
elsif sql_non_extended =~ /\Aselect (.*) from (.*) where (.*)\Z/m
non_extended_from = $2
non_extended_where = $3
# if drill through total measure with just all members selection
elsif sql_non_extended =~ /\Aselect (.*) from (.*)\Z/m
non_extended_from = $2
non_extended_where = "1 = 1" # dummy true condition
else
raise ArgumentError, "cannot parse drill through SQL: #{sql_non_extended}"
end
if sql_extended =~ /\Aselect (.*) from (.*) where (.*) order by (.*)\Z/m
extended_select = $1
extended_from = $2
extended_where = $3
extended_order_by = $4
# if only measures are selected then there will be no order by
elsif sql_extended =~ /\Aselect (.*) from (.*) where (.*)\Z/m
extended_select = $1
extended_from = $2
extended_where = $3
extended_order_by = ''
else
raise ArgumentError, "cannot parse drill through SQL: #{sql_extended}"
end
if return_fields.present?
new_select_columns = []
new_order_by_columns = []
new_group_by_columns = []
group_by = params[:group_by]
return_fields.size.times do |i|
column_alias = return_fields[i][:column_alias]
new_select_columns << if column_expression = return_fields[i][:column_expression]
new_order_by_columns << column_expression
new_group_by_columns << column_expression if group_by && return_fields[i][:type] != :measure
"#{column_expression} AS #{column_alias}"
else
"'' AS #{column_alias}"
end
end
new_select = new_select_columns.join(', ')
new_order_by = new_order_by_columns.join(', ')
new_group_by = new_group_by_columns.join(', ')
else
new_select = extended_select
new_order_by = extended_order_by
new_group_by = ''
end
new_from_parts = non_extended_from.split(/,\s*/)
outer_join_from_parts = extended_from.split(/,\s*/) - new_from_parts
where_parts = extended_where.split(' and ')
# reverse outer_join_from_parts to support dimensions with several table joins
# where join with detailed level table should be constructed first
outer_join_from_parts.reverse.each do |part|
part_elements = part.split(/\s+/)
# first is original table, then optional 'as' and the last is alias
table_name = part_elements.first
table_alias = part_elements.last
join_conditions = where_parts.select do |where_part|
where_part.include?(" = #{table_alias}.")
end
outer_join = " left outer join #{part} on (#{join_conditions.join(' and ')})"
left_table_alias = join_conditions.first.split('.').first
if left_table_from_part = new_from_parts.detect{|from_part| from_part.include?(left_table_alias)}
left_table_from_part << outer_join
else
raise ArgumentError, "cannot extract outer join left table #{left_table_alias} in drill through SQL: #{sql_extended}"
end
end
new_from = new_from_parts.join(', ')
new_where = non_extended_where
if nonempty_columns && !nonempty_columns.empty?
not_null_condition = nonempty_columns.map{|c| "(#{c}) IS NOT NULL"}.join(' OR ')
new_where += " AND (#{not_null_condition})"
end
sql = "select #{new_select} from #{new_from} where #{new_where}"
sql << " group by #{new_group_by}" unless new_group_by.empty?
sql << " order by #{new_order_by}" unless new_order_by.empty?
sql
end
def self.parse_return_fields(result, params)
nonempty_columns = []
return_fields = []
if params[:return] || params[:nonempty]
rolap_cube = result.getCube
schema_reader = rolap_cube.getSchemaReader
dialect = result.getCube.getSchema.getDialect
sql_query = Java::mondrian.rolap.sql.SqlQuery.new(dialect)
if fields = params[:return]
fields = fields.split(/,\s*/) if fields.is_a? String
fields.each do |field|
return_fields << case field
when /\AName\((.*)\)\z/i then
{ member_full_name: $1, type: :name }
when /\AProperty\((.*)\s*,\s*'(.*)'\)\z/i then
{ member_full_name: $1, type: :property, name: $2 }
else
{ member_full_name: field }
end
end
return_fields.size.times do | i |
member_full_name = return_fields[i][:member_full_name]
begin
segment_list = Java::MondrianOlap::Util.parseIdentifier(member_full_name)
rescue Java::JavaLang::IllegalArgumentException
raise ArgumentError, "invalid return field #{member_full_name}"
end
# if this is property field then the name is initilized already
return_fields[i][:name] ||= segment_list.to_a.last.name
level_or_member = schema_reader.lookupCompound rolap_cube, segment_list, false, 0
return_fields[i][:member] = level_or_member
if level_or_member.is_a? Java::MondrianOlap::Member
raise ArgumentError, "cannot use calculated member #{member_full_name} as return field" if level_or_member.isCalculated
elsif !level_or_member.is_a? Java::MondrianOlap::Level
raise ArgumentError, "return field #{member_full_name} should be level or measure"
end
return_fields[i][:column_expression] = case return_fields[i][:type]
when :name
if level_or_member.respond_to? :getNameExp
level_or_member.getNameExp.getExpression sql_query
end
when :property
if property = level_or_member.getProperties.to_a.detect{|p| p.getName == return_fields[i][:name]}
# property.getExp is a protected method therefore
# use a workaround to get the value from the field
f = property.java_class.declared_field("exp")
f.accessible = true
if column = f.value(property)
column.getExpression sql_query
end
end
else
if level_or_member.respond_to? :getKeyExp
return_fields[i][:type] = :key
level_or_member.getKeyExp.getExpression sql_query
else
return_fields[i][:type] = :measure
column_expression = level_or_member.getMondrianDefExpression.getExpression sql_query
if params[:group_by]
level_or_member.getAggregator.getExpression column_expression
else
column_expression
end
end
end
column_alias = if return_fields[i][:type] == :key
"#{return_fields[i][:name]} (Key)"
else
return_fields[i][:name]
end
return_fields[i][:column_alias] = dialect.quoteIdentifier(column_alias)
end
end
if nonempty_fields = params[:nonempty]
nonempty_fields = nonempty_fields.split(/,\s*/) if nonempty_fields.is_a?(String)
nonempty_columns = nonempty_fields.map do |nonempty_field|
begin
segment_list = Java::MondrianOlap::Util.parseIdentifier(nonempty_field)
rescue Java::JavaLang::IllegalArgumentException
raise ArgumentError, "invalid return field #{nonempty_field}"
end
member = schema_reader.lookupCompound rolap_cube, segment_list, false, 0
if member.is_a? Java::MondrianOlap::Member
raise ArgumentError, "cannot use calculated member #{nonempty_field} as nonempty field" if member.isCalculated
sql_query = member.getStarMeasure.getSqlQuery
member.getStarMeasure.generateExprString(sql_query)
else
raise ArgumentError, "nonempty field #{nonempty_field} should be measure"
end
end
end
end
[nonempty_columns, return_fields]
end
end
def self.java_to_ruby_value(value, column_type = nil)
case value
when Numeric, String
value
when Java::JavaMath::BigDecimal
BigDecimal(value.to_s)
when Java::JavaSql::Clob
clob_to_string(value)
else
value
end
end
private
def self.clob_to_string(value)
if reader = value.getCharacterStream
buffered_reader = Java::JavaIo::BufferedReader.new(reader)
result = []
while str = buffered_reader.readLine
result << str
end
result.join("\n")
end
ensure
if buffered_reader
buffered_reader.close
elsif reader
reader.close
end
end
def axes
@axes ||= @raw_cell_set.getAxes
end
def axis_positions(map_method, join_with=false)
axes.map do |axis|
axis.getPositions.map do |position|
names = position.getMembers.map do |member|
if map_method == :to_member
Member.new(member)
else
member.send(map_method)
end
end
if names.size == 1
names[0]
elsif join_with
names.join(join_with)
else
names
end
end
end
end
AXIS_SYMBOL_TO_NUMBER = {
:columns => 0,
:rows => 1,
:pages => 2,
:sections => 3,
:chapters => 4
}.freeze
def recursive_values(value_method, axes_sequence, current_index, cell_params=[])
if axis_number = axes_sequence[current_index]
axis_number = AXIS_SYMBOL_TO_NUMBER[axis_number] if axis_number.is_a?(Symbol)
positions_size = axes[axis_number].getPositions.size
(0...positions_size).map do |i|
cell_params[axis_number] = Java::JavaLang::Integer.new(i)
recursive_values(value_method, axes_sequence, current_index + 1, cell_params)
end
else
self.class.java_to_ruby_value(@raw_cell_set.getCell(cell_params).send(value_method))
end
end
end
end
end
|
require 'sdbm'
module Moneta
module Adapters
# SDBM backend
# @api public
class SDBM
include Defaults
include DBMAdapter
include IncrementSupport
include CreateSupport
include EachKeySupport
# @param [Hash] options
# @option options [String] :file Database file
# @option options [::SDBM] :backend Use existing backend instance
def initialize(options = {})
@backend = options[:backend] ||
begin
raise ArgumentError, 'Option :file is required' unless options[:file]
::SDBM.new(options[:file])
end
end
end
end
end
SDBM: inherit Adapter
require 'sdbm'
module Moneta
module Adapters
# SDBM backend
# @api public
class SDBM < Adapter
include DBMAdapter
include IncrementSupport
include CreateSupport
include EachKeySupport
# @!method initialize(options = {})
# @param [Hash] options
# @option options [String] :file Database file
# @option options [::SDBM] :backend Use existing backend instance
backend { |file:| ::SDBM.new(file) }
end
end
end
|
# Copyright (C) 2014-2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Grid
# Represents a view of the GridFS in the database.
#
# @since 2.0.0
class FSBucket
extend Forwardable
# The default root prefix.
#
# @since 2.0.0
DEFAULT_ROOT = 'fs'.freeze
# The specification for the chunks collection index.
#
# @since 2.0.0
CHUNKS_INDEX = { :files_id => 1, :n => 1 }.freeze
# The specification for the files collection index.
#
# @since 2.1.0
FILES_INDEX = { filename: 1, uploadDate: 1 }.freeze
# @return [ Collection ] chunks_collection The chunks collection.
#
# @since 2.0.0
attr_reader :chunks_collection
# @return [ Database ] database The database.
#
# @since 2.0.0
attr_reader :database
# @return [ Collection ] files_collection The files collection.
#
# @since 2.0.0
attr_reader :files_collection
# @return [ Hash ] options The FSBucket options.
#
# @since 2.1.0
attr_reader :options
# Get client from the database.
#
# @since 2.1.0
def_delegators :database,
:client
# Find files collection documents matching a given selector.
#
# @example Find files collection documents by a filename.
# fs.find(filename: 'file.txt')
#
# @param [ Hash ] selector The selector to use in the find.
# @param [ Hash ] options The options for the find.
#
# @option options [ Integer ] :batch_size The number of documents returned in each batch
# of results from MongoDB.
# @option options [ Integer ] :limit The max number of docs to return from the query.
# @option options [ true, false ] :no_cursor_timeout The server normally times out idle
# cursors after an inactivity period (10 minutes) to prevent excess memory use.
# Set this option to prevent that.
# @option options [ Integer ] :skip The number of docs to skip before returning results.
# @option options [ Hash ] :sort The key and direction pairs by which the result set
# will be sorted.
#
# @return [ CollectionView ] The collection view.
#
# @since 2.1.0
def find(selector = nil, options = {})
opts = options.merge(read: read_preference) if read_preference
files_collection.find(selector, opts || options)
end
# Find a file in the GridFS.
#
# @example Find a file by its id.
# fs.find_one(_id: id)
#
# @example Find a file by its filename.
# fs.find_one(filename: 'test.txt')
#
# @param [ Hash ] selector The selector.
#
# @return [ Grid::File ] The file.
#
# @since 2.0.0
#
# @deprecated Please use #find instead with a limit of -1.
# Will be removed in version 3.0.
def find_one(selector = nil)
file_info = files_collection.find(selector).first
return nil unless file_info
chunks = chunks_collection.find(:files_id => file_info[:_id]).sort(:n => 1)
Grid::File.new(chunks.to_a, Options::Mapper.transform(file_info, Grid::File::Info::MAPPINGS.invert))
end
# Insert a single file into the GridFS.
#
# @example Insert a single file.
# fs.insert_one(file)
#
# @param [ Grid::File ] file The file to insert.
#
# @return [ BSON::ObjectId ] The file id.
#
# @since 2.0.0
#
# @deprecated Please use #upload_from_stream or #open_upload_stream instead.
# Will be removed in version 3.0.
def insert_one(file)
@indexes ||= ensure_indexes!
chunks_collection.insert_many(file.chunks)
files_collection.insert_one(file.info)
file.id
end
# Create the GridFS.
#
# @example Create the GridFS.
# Grid::FSBucket.new(database)
#
# @param [ Database ] database The database the files reside in.
# @param [ Hash ] options The GridFS options.
#
# @option options [ String ] :fs_name The prefix for the files and chunks
# collections.
# @option options [ String ] :bucket_name The prefix for the files and chunks
# collections.
# @option options [ Integer ] :chunk_size Override the default chunk
# size.
# @option options [ String ] :read The read preference.
# @option options [ Session ] :session The session to use.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @since 2.0.0
def initialize(database, options = {})
@database = database
@options = options.dup
=begin WriteConcern object support
if @options[:write_concern].is_a?(WriteConcern::Base)
# Cache the instance so that we do not needlessly reconstruct it.
@write_concern = @options[:write_concern]
@options[:write_concern] = @write_concern.options
end
=end
@options.freeze
@chunks_collection = database[chunks_name]
@files_collection = database[files_name]
end
# Get the prefix for the GridFS
#
# @example Get the prefix.
# fs.prefix
#
# @return [ String ] The GridFS prefix.
#
# @since 2.0.0
def prefix
@options[:fs_name] || @options[:bucket_name]|| DEFAULT_ROOT
end
# Remove a single file from the GridFS.
#
# @example Remove a file from the GridFS.
# fs.delete_one(file)
#
# @param [ Grid::File ] file The file to remove.
#
# @return [ Result ] The result of the remove.
#
# @since 2.0.0
def delete_one(file)
delete(file.id)
end
# Remove a single file, identified by its id from the GridFS.
#
# @example Remove a file from the GridFS.
# fs.delete(id)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to remove.
#
# @return [ Result ] The result of the remove.
#
# @raise [ Error::FileNotFound ] If the file is not found.
#
# @since 2.1.0
def delete(id)
result = files_collection.find({ :_id => id }, @options).delete_one
chunks_collection.find({ :files_id => id }, @options).delete_many
raise Error::FileNotFound.new(id, :id) if result.n == 0
result
end
# Opens a stream from which a file can be downloaded, specified by id.
#
# @example Open a stream from which a file can be downloaded.
# fs.open_download_stream(id)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to read.
# @param [ Hash ] options The options.
#
# @option options [ BSON::Document ] :file_info_doc For internal
# driver use only. A BSON document to use as file information.
#
# @return [ Stream::Read ] The stream to read from.
#
# @yieldparam [ Hash ] The read stream.
#
# @since 2.1.0
def open_download_stream(id, options = nil)
read_stream(id, options).tap do |stream|
if block_given?
begin
yield stream
ensure
stream.close
end
end
end
end
# Downloads the contents of the file specified by id and writes them to
# the destination io object.
#
# @example Download the file and write it to the io object.
# fs.download_to_stream(id, io)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to read.
# @param [ IO ] io The io object to write to.
#
# @since 2.1.0
def download_to_stream(id, io)
open_download_stream(id) do |stream|
stream.each do |chunk|
io << chunk
end
end
end
# Opens a stream from which the application can read the contents of the stored file
# specified by filename and the revision in options.
#
# Revision numbers are defined as follows:
# 0 = the original stored file
# 1 = the first revision
# 2 = the second revision
# etc…
# -2 = the second most recent revision
# -1 = the most recent revision
#
# @example Open a stream to download the most recent revision.
# fs.open_download_stream_by_name('some-file.txt')
#
# # @example Open a stream to download the original file.
# fs.open_download_stream_by_name('some-file.txt', revision: 0)
#
# @example Open a stream to download the second revision of the stored file.
# fs.open_download_stream_by_name('some-file.txt', revision: 2)
#
# @param [ String ] filename The file's name.
# @param [ Hash ] opts Options for the download.
#
# @option opts [ Integer ] :revision The revision number of the file to download.
# Defaults to -1, the most recent version.
#
# @return [ Stream::Read ] The stream to read from.
#
# @raise [ Error::FileNotFound ] If the file is not found.
# @raise [ Error::InvalidFileRevision ] If the requested revision is not found for the file.
#
# @yieldparam [ Hash ] The read stream.
#
# @since 2.1.0
def open_download_stream_by_name(filename, opts = {}, &block)
revision = opts.fetch(:revision, -1)
if revision < 0
skip = revision.abs - 1
sort = { 'uploadDate' => Mongo::Index::DESCENDING }
else
skip = revision
sort = { 'uploadDate' => Mongo::Index::ASCENDING }
end
file_info_doc = files_collection.find({ filename: filename} ,
sort: sort,
skip: skip,
limit: -1).first
unless file_info_doc
raise Error::FileNotFound.new(filename, :filename) unless opts[:revision]
raise Error::InvalidFileRevision.new(filename, opts[:revision])
end
open_download_stream(file_info_doc[:_id], file_info_doc: file_info_doc, &block)
end
# Downloads the contents of the stored file specified by filename and by the
# revision in options and writes the contents to the destination io object.
#
# Revision numbers are defined as follows:
# 0 = the original stored file
# 1 = the first revision
# 2 = the second revision
# etc…
# -2 = the second most recent revision
# -1 = the most recent revision
#
# @example Download the most recent revision.
# fs.download_to_stream_by_name('some-file.txt', io)
#
# # @example Download the original file.
# fs.download_to_stream_by_name('some-file.txt', io, revision: 0)
#
# @example Download the second revision of the stored file.
# fs.download_to_stream_by_name('some-file.txt', io, revision: 2)
#
# @param [ String ] filename The file's name.
# @param [ IO ] io The io object to write to.
# @param [ Hash ] opts Options for the download.
#
# @option opts [ Integer ] :revision The revision number of the file to download.
# Defaults to -1, the most recent version.
#
# @raise [ Error::FileNotFound ] If the file is not found.
# @raise [ Error::InvalidFileRevision ] If the requested revision is not found for the file.
#
# @since 2.1.0
def download_to_stream_by_name(filename, io, opts = {})
download_to_stream(open_download_stream_by_name(filename, opts).file_id, io)
end
# Opens an upload stream to GridFS to which the contents of a user file came be written.
#
# @example Open a stream to which the contents of a file came be written.
# fs.open_upload_stream('a-file.txt')
#
# @param [ String ] filename The filename of the file to upload.
# @param [ Hash ] opts The options for the write stream.
#
# @option opts [ Object ] :file_id An optional unique file id. An ObjectId is generated otherwise.
# @option opts [ Integer ] :chunk_size Override the default chunk size.
# @option opts [ Hash ] :metadata User data for the 'metadata' field of the files
# collection document.
# @option opts [ String ] :content_type The content type of the file.
# Deprecated, please use the metadata document instead.
# @option opts [ Array<String> ] :aliases A list of aliases.
# Deprecated, please use the metadata document instead.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @return [ Stream::Write ] The write stream.
#
# @yieldparam [ Hash ] The write stream.
#
# @since 2.1.0
def open_upload_stream(filename, opts = {})
write_stream(filename, opts).tap do |stream|
if block_given?
begin
yield stream
ensure
stream.close
end
end
end
end
# Uploads a user file to a GridFS bucket.
# Reads the contents of the user file from the source stream and uploads it as chunks in the
# chunks collection. After all the chunks have been uploaded, it creates a files collection
# document for the filename in the files collection.
#
# @example Upload a file to the GridFS bucket.
# fs.upload_from_stream('a-file.txt', file)
#
# @param [ String ] filename The filename of the file to upload.
# @param [ IO ] io The source io stream to upload from.
# @param [ Hash ] opts The options for the write stream.
#
# @option opts [ Object ] :file_id An optional unique file id. An ObjectId is generated otherwise.
# @option opts [ Integer ] :chunk_size Override the default chunk size.
# @option opts [ Hash ] :metadata User data for the 'metadata' field of the files
# collection document.
# @option opts [ String ] :content_type The content type of the file. Deprecated, please
# use the metadata document instead.
# @option opts [ Array<String> ] :aliases A list of aliases. Deprecated, please use the
# metadata document instead.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @return [ BSON::ObjectId ] The ObjectId file id.
#
# @since 2.1.0
def upload_from_stream(filename, io, opts = {})
open_upload_stream(filename, opts) do |stream|
begin
stream.write(io)
rescue IOError
begin
stream.abort
rescue Error::OperationFailure
end
raise
end
end.file_id
end
# Get the read preference.
#
# @example Get the read preference.
# fs.read_preference
#
# @return [ Mongo::ServerSelector ] The read preference.
#
# @since 2.1.0
def read_preference
@read_preference ||= options[:read] || database.read_preference
end
# Get the write concern.
#
# @example Get the write concern.
# stream.write_concern
#
# @return [ Mongo::WriteConcern ] The write concern.
#
# @since 2.1.0
def write_concern
@write_concern ||= if wco = @options[:write_concern] || @options[:write]
WriteConcern.get(wco)
else
database.write_concern
end
end
private
# @param [ Hash ] opts The options.
#
# @option opts [ BSON::Document ] :file_info_doc For internal
# driver use only. A BSON document to use as file information.
def read_stream(id, opts = nil)
Stream.get(self, Stream::READ_MODE, { file_id: id }.update(options).update(opts || {}))
end
def write_stream(filename, opts)
Stream.get(self, Stream::WRITE_MODE, { filename: filename }.merge!(options).merge!(opts))
end
def chunks_name
"#{prefix}.#{Grid::File::Chunk::COLLECTION}"
end
def files_name
"#{prefix}.#{Grid::File::Info::COLLECTION}"
end
def ensure_indexes!
if files_collection.find({}, limit: 1, projection: { _id: 1 }).first.nil?
chunks_collection.indexes.create_one(FSBucket::CHUNKS_INDEX, :unique => true)
files_collection.indexes.create_one(FSBucket::FILES_INDEX)
end
end
end
end
end
RUBY-2230 Rescue write errors in GridFS for cleanup purposes in addition to read errors (#1882)
Co-authored-by: Oleg Pudeyev <20b70f0af00562e63758b9ee42012ecc96c58590@bsdpower.com>
# Copyright (C) 2014-2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Grid
# Represents a view of the GridFS in the database.
#
# @since 2.0.0
class FSBucket
extend Forwardable
# The default root prefix.
#
# @since 2.0.0
DEFAULT_ROOT = 'fs'.freeze
# The specification for the chunks collection index.
#
# @since 2.0.0
CHUNKS_INDEX = { :files_id => 1, :n => 1 }.freeze
# The specification for the files collection index.
#
# @since 2.1.0
FILES_INDEX = { filename: 1, uploadDate: 1 }.freeze
# @return [ Collection ] chunks_collection The chunks collection.
#
# @since 2.0.0
attr_reader :chunks_collection
# @return [ Database ] database The database.
#
# @since 2.0.0
attr_reader :database
# @return [ Collection ] files_collection The files collection.
#
# @since 2.0.0
attr_reader :files_collection
# @return [ Hash ] options The FSBucket options.
#
# @since 2.1.0
attr_reader :options
# Get client from the database.
#
# @since 2.1.0
def_delegators :database,
:client
# Find files collection documents matching a given selector.
#
# @example Find files collection documents by a filename.
# fs.find(filename: 'file.txt')
#
# @param [ Hash ] selector The selector to use in the find.
# @param [ Hash ] options The options for the find.
#
# @option options [ Integer ] :batch_size The number of documents returned in each batch
# of results from MongoDB.
# @option options [ Integer ] :limit The max number of docs to return from the query.
# @option options [ true, false ] :no_cursor_timeout The server normally times out idle
# cursors after an inactivity period (10 minutes) to prevent excess memory use.
# Set this option to prevent that.
# @option options [ Integer ] :skip The number of docs to skip before returning results.
# @option options [ Hash ] :sort The key and direction pairs by which the result set
# will be sorted.
#
# @return [ CollectionView ] The collection view.
#
# @since 2.1.0
def find(selector = nil, options = {})
opts = options.merge(read: read_preference) if read_preference
files_collection.find(selector, opts || options)
end
# Find a file in the GridFS.
#
# @example Find a file by its id.
# fs.find_one(_id: id)
#
# @example Find a file by its filename.
# fs.find_one(filename: 'test.txt')
#
# @param [ Hash ] selector The selector.
#
# @return [ Grid::File ] The file.
#
# @since 2.0.0
#
# @deprecated Please use #find instead with a limit of -1.
# Will be removed in version 3.0.
def find_one(selector = nil)
file_info = files_collection.find(selector).first
return nil unless file_info
chunks = chunks_collection.find(:files_id => file_info[:_id]).sort(:n => 1)
Grid::File.new(chunks.to_a, Options::Mapper.transform(file_info, Grid::File::Info::MAPPINGS.invert))
end
# Insert a single file into the GridFS.
#
# @example Insert a single file.
# fs.insert_one(file)
#
# @param [ Grid::File ] file The file to insert.
#
# @return [ BSON::ObjectId ] The file id.
#
# @since 2.0.0
#
# @deprecated Please use #upload_from_stream or #open_upload_stream instead.
# Will be removed in version 3.0.
def insert_one(file)
@indexes ||= ensure_indexes!
chunks_collection.insert_many(file.chunks)
files_collection.insert_one(file.info)
file.id
end
# Create the GridFS.
#
# @example Create the GridFS.
# Grid::FSBucket.new(database)
#
# @param [ Database ] database The database the files reside in.
# @param [ Hash ] options The GridFS options.
#
# @option options [ String ] :fs_name The prefix for the files and chunks
# collections.
# @option options [ String ] :bucket_name The prefix for the files and chunks
# collections.
# @option options [ Integer ] :chunk_size Override the default chunk
# size.
# @option options [ String ] :read The read preference.
# @option options [ Session ] :session The session to use.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @since 2.0.0
def initialize(database, options = {})
@database = database
@options = options.dup
=begin WriteConcern object support
if @options[:write_concern].is_a?(WriteConcern::Base)
# Cache the instance so that we do not needlessly reconstruct it.
@write_concern = @options[:write_concern]
@options[:write_concern] = @write_concern.options
end
=end
@options.freeze
@chunks_collection = database[chunks_name]
@files_collection = database[files_name]
end
# Get the prefix for the GridFS
#
# @example Get the prefix.
# fs.prefix
#
# @return [ String ] The GridFS prefix.
#
# @since 2.0.0
def prefix
@options[:fs_name] || @options[:bucket_name]|| DEFAULT_ROOT
end
# Remove a single file from the GridFS.
#
# @example Remove a file from the GridFS.
# fs.delete_one(file)
#
# @param [ Grid::File ] file The file to remove.
#
# @return [ Result ] The result of the remove.
#
# @since 2.0.0
def delete_one(file)
delete(file.id)
end
# Remove a single file, identified by its id from the GridFS.
#
# @example Remove a file from the GridFS.
# fs.delete(id)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to remove.
#
# @return [ Result ] The result of the remove.
#
# @raise [ Error::FileNotFound ] If the file is not found.
#
# @since 2.1.0
def delete(id)
result = files_collection.find({ :_id => id }, @options).delete_one
chunks_collection.find({ :files_id => id }, @options).delete_many
raise Error::FileNotFound.new(id, :id) if result.n == 0
result
end
# Opens a stream from which a file can be downloaded, specified by id.
#
# @example Open a stream from which a file can be downloaded.
# fs.open_download_stream(id)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to read.
# @param [ Hash ] options The options.
#
# @option options [ BSON::Document ] :file_info_doc For internal
# driver use only. A BSON document to use as file information.
#
# @return [ Stream::Read ] The stream to read from.
#
# @yieldparam [ Hash ] The read stream.
#
# @since 2.1.0
def open_download_stream(id, options = nil)
read_stream(id, options).tap do |stream|
if block_given?
begin
yield stream
ensure
stream.close
end
end
end
end
# Downloads the contents of the file specified by id and writes them to
# the destination io object.
#
# @example Download the file and write it to the io object.
# fs.download_to_stream(id, io)
#
# @param [ BSON::ObjectId, Object ] id The id of the file to read.
# @param [ IO ] io The io object to write to.
#
# @since 2.1.0
def download_to_stream(id, io)
open_download_stream(id) do |stream|
stream.each do |chunk|
io << chunk
end
end
end
# Opens a stream from which the application can read the contents of the stored file
# specified by filename and the revision in options.
#
# Revision numbers are defined as follows:
# 0 = the original stored file
# 1 = the first revision
# 2 = the second revision
# etc…
# -2 = the second most recent revision
# -1 = the most recent revision
#
# @example Open a stream to download the most recent revision.
# fs.open_download_stream_by_name('some-file.txt')
#
# # @example Open a stream to download the original file.
# fs.open_download_stream_by_name('some-file.txt', revision: 0)
#
# @example Open a stream to download the second revision of the stored file.
# fs.open_download_stream_by_name('some-file.txt', revision: 2)
#
# @param [ String ] filename The file's name.
# @param [ Hash ] opts Options for the download.
#
# @option opts [ Integer ] :revision The revision number of the file to download.
# Defaults to -1, the most recent version.
#
# @return [ Stream::Read ] The stream to read from.
#
# @raise [ Error::FileNotFound ] If the file is not found.
# @raise [ Error::InvalidFileRevision ] If the requested revision is not found for the file.
#
# @yieldparam [ Hash ] The read stream.
#
# @since 2.1.0
def open_download_stream_by_name(filename, opts = {}, &block)
revision = opts.fetch(:revision, -1)
if revision < 0
skip = revision.abs - 1
sort = { 'uploadDate' => Mongo::Index::DESCENDING }
else
skip = revision
sort = { 'uploadDate' => Mongo::Index::ASCENDING }
end
file_info_doc = files_collection.find({ filename: filename} ,
sort: sort,
skip: skip,
limit: -1).first
unless file_info_doc
raise Error::FileNotFound.new(filename, :filename) unless opts[:revision]
raise Error::InvalidFileRevision.new(filename, opts[:revision])
end
open_download_stream(file_info_doc[:_id], file_info_doc: file_info_doc, &block)
end
# Downloads the contents of the stored file specified by filename and by the
# revision in options and writes the contents to the destination io object.
#
# Revision numbers are defined as follows:
# 0 = the original stored file
# 1 = the first revision
# 2 = the second revision
# etc…
# -2 = the second most recent revision
# -1 = the most recent revision
#
# @example Download the most recent revision.
# fs.download_to_stream_by_name('some-file.txt', io)
#
# # @example Download the original file.
# fs.download_to_stream_by_name('some-file.txt', io, revision: 0)
#
# @example Download the second revision of the stored file.
# fs.download_to_stream_by_name('some-file.txt', io, revision: 2)
#
# @param [ String ] filename The file's name.
# @param [ IO ] io The io object to write to.
# @param [ Hash ] opts Options for the download.
#
# @option opts [ Integer ] :revision The revision number of the file to download.
# Defaults to -1, the most recent version.
#
# @raise [ Error::FileNotFound ] If the file is not found.
# @raise [ Error::InvalidFileRevision ] If the requested revision is not found for the file.
#
# @since 2.1.0
def download_to_stream_by_name(filename, io, opts = {})
download_to_stream(open_download_stream_by_name(filename, opts).file_id, io)
end
# Opens an upload stream to GridFS to which the contents of a user file came be written.
#
# @example Open a stream to which the contents of a file came be written.
# fs.open_upload_stream('a-file.txt')
#
# @param [ String ] filename The filename of the file to upload.
# @param [ Hash ] opts The options for the write stream.
#
# @option opts [ Object ] :file_id An optional unique file id. An ObjectId is generated otherwise.
# @option opts [ Integer ] :chunk_size Override the default chunk size.
# @option opts [ Hash ] :metadata User data for the 'metadata' field of the files
# collection document.
# @option opts [ String ] :content_type The content type of the file.
# Deprecated, please use the metadata document instead.
# @option opts [ Array<String> ] :aliases A list of aliases.
# Deprecated, please use the metadata document instead.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @return [ Stream::Write ] The write stream.
#
# @yieldparam [ Hash ] The write stream.
#
# @since 2.1.0
def open_upload_stream(filename, opts = {})
write_stream(filename, opts).tap do |stream|
if block_given?
begin
yield stream
ensure
stream.close
end
end
end
end
# Uploads a user file to a GridFS bucket.
# Reads the contents of the user file from the source stream and uploads it as chunks in the
# chunks collection. After all the chunks have been uploaded, it creates a files collection
# document for the filename in the files collection.
#
# @example Upload a file to the GridFS bucket.
# fs.upload_from_stream('a-file.txt', file)
#
# @param [ String ] filename The filename of the file to upload.
# @param [ IO ] io The source io stream to upload from.
# @param [ Hash ] opts The options for the write stream.
#
# @option opts [ Object ] :file_id An optional unique file id. An ObjectId is generated otherwise.
# @option opts [ Integer ] :chunk_size Override the default chunk size.
# @option opts [ Hash ] :metadata User data for the 'metadata' field of the files
# collection document.
# @option opts [ String ] :content_type The content type of the file. Deprecated, please
# use the metadata document instead.
# @option opts [ Array<String> ] :aliases A list of aliases. Deprecated, please use the
# metadata document instead.
# @option options [ Hash ] :write Deprecated. Equivalent to :write_concern
# option.
# @option options [ Hash ] :write_concern The write concern options.
# Can be :w => Integer|String, :fsync => Boolean, :j => Boolean.
#
# @return [ BSON::ObjectId ] The ObjectId file id.
#
# @since 2.1.0
def upload_from_stream(filename, io, opts = {})
open_upload_stream(filename, opts) do |stream|
begin
stream.write(io)
# IOError and SystemCallError are for errors reading the io.
# Error::SocketError and Error::SocketTimeoutError are for
# writing to MongoDB.
rescue IOError, SystemCallError, Error::SocketError, Error::SocketTimeoutError
begin
stream.abort
rescue Error::OperationFailure
end
raise
end
end.file_id
end
# Get the read preference.
#
# @example Get the read preference.
# fs.read_preference
#
# @return [ Mongo::ServerSelector ] The read preference.
#
# @since 2.1.0
def read_preference
@read_preference ||= options[:read] || database.read_preference
end
# Get the write concern.
#
# @example Get the write concern.
# stream.write_concern
#
# @return [ Mongo::WriteConcern ] The write concern.
#
# @since 2.1.0
def write_concern
@write_concern ||= if wco = @options[:write_concern] || @options[:write]
WriteConcern.get(wco)
else
database.write_concern
end
end
private
# @param [ Hash ] opts The options.
#
# @option opts [ BSON::Document ] :file_info_doc For internal
# driver use only. A BSON document to use as file information.
def read_stream(id, opts = nil)
Stream.get(self, Stream::READ_MODE, { file_id: id }.update(options).update(opts || {}))
end
def write_stream(filename, opts)
Stream.get(self, Stream::WRITE_MODE, { filename: filename }.merge!(options).merge!(opts))
end
def chunks_name
"#{prefix}.#{Grid::File::Chunk::COLLECTION}"
end
def files_name
"#{prefix}.#{Grid::File::Info::COLLECTION}"
end
def ensure_indexes!
if files_collection.find({}, limit: 1, projection: { _id: 1 }).first.nil?
chunks_collection.indexes.create_one(FSBucket::CHUNKS_INDEX, :unique => true)
files_collection.indexes.create_one(FSBucket::FILES_INDEX)
end
end
end
end
end
|
module MoocDataParser
require 'optparse'
require 'ostruct'
require 'httparty'
require 'json'
require 'io/console'
class App
def run(args)
init_variables()
parse_options(args)
decide_what_to_do(maybe_fetch_json())
$cache.write_file_to_cache('data.json', @notes.to_json)
end
def decide_what_to_do(json)
if @options.user
show_info_about(@options.user, 'username', json)
elsif @options.user_email
show_info_about(@options.user_email, 'email', json)
elsif @options.user_tmc_username
show_info_about(@options.user_tmc_username, 'username', json)
elsif @options.list
list_and_filter_participants(json)
else
$cache.write_file_to_cache('data.json', @notes.to_json)
puts @opt
abort
end
end
def init_variables
$cache ||= MoocDataParser::DummyCacher.new
@notes = begin JSON.parse($cache.read_file_from_cache('data.json')) rescue {} end
end
def parse_options(args)
@options = OpenStruct.new
@opt = OptionParser.new do |opts|
opts.banner = "Usage: show-mooc-details.rb [options]"
opts.on("-f", "--force", "Reload data from server") do |v|
@options.reload = true
end
opts.on("-u", "--user username", "Show details for user") do |v|
@options.user = v
end
opts.on("-m", "--missing-points", "Show missing compulsary points") do |v|
@options.show_missing_compulsory_points = true
end
opts.on("-c", "--completion-precentige", "Show completition percentige") do |v|
@options.show_completion_percentige = true
end
opts.on("-e", "--email emailaddress", "Show details for user") do |v|
@options.user_email = v
end
opts.on("-t", "--tmc-account tmc-account", "Show details for user") do |v|
@options.user_tmc_username = v
end
opts.on("-l", "--list", "Show the basic list") do |v|
@options.list = true
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
@opt.parse!(args)
end
def get_auth
print 'username: '
username = $stdin.gets.strip
print 'password: '
password = $stdin.noecho(&:gets).strip
puts
{username: username, password: password}
end
def maybe_fetch_json()
if @options.reload or @notes['user_info'].nil? or @notes['week_data'].nil?
t = -> do
loop do
print '.'
sleep 0.5
end
puts
end
auth = get_auth()
th = Thread.new(&t)
url = "http://tmc.mooc.fi/mooc/participants.json?api_version=7&utf8=%E2%9C%93&filter_koko_nimi=&column_username=1&column_email=1&column_koko_nimi=1&column_hakee_yliopistoon_2014=1&group_completion_course_id=18"
user_info = JSON.parse(HTTParty.get(url, basic_auth: auth).body)['participants']
week_data = fetch_week_datas(auth)
@notes['user_info'] = user_info.clone
@notes['week_data'] = week_data.clone
th.kill
puts
{participants: user_info, week_data: week_data}
else
{participants: @notes['user_info'].clone, week_data: @notes['week_data'].clone}
end
end
def show_info_about(user, user_field = 'username', json)
participants = json[:participants]
week_data = json[:week_data]
my_user = participants.find{|a| a[user_field] == user }
if my_user.nil?
abort "User not found"
end
show_user_print_basic_info()
show_user_print_completion_percentage(my_user, week_data) if @options.show_completion_percentige
show_user_print_missing_points(my_user, week_data) if @options.show_missing_compulsory_points
end
def show_user_print_basic_info
formatted_print_user_details ["Username", my_user['username']]
formatted_print_user_details ["Email", my_user['email']]
formatted_print_user_details ["Hakee yliopistoon", my_user['hakee_yliopistoon_2014']]
formatted_print_user_details ["Koko nimi", my_user['koko_nimi']]
end
def show_user_print_missing_points(my_user, week_data)
formatted_print_user_details ["Compulsory points"]
get_points_info_for_user(my_user, week_data).each do |k,v|
formatted_print_user_details [k, v.join(", ")]
end
end
def show_user_print_completion_percentage(my_user, week_data)
formatted_print_user_details ["Points per week"]
done_exercise_percents(my_user, participants).each do |k|
begin
k = k.first
formatted_print_user_details [k[0], k[1]]
rescue
nil
end
end
end
def formatted_print_user_details(details)
case details.size
when 1
puts "%18s" % details
when 2
puts "%18s: %-20s" % details
end
end
def fetch_week_datas(auth)
base_url = "http://tmc.mooc.fi/mooc/courses/18/points/"
weeks = %w(1 2 3 4 5 6 7 8 9 10 11 12)
rest = ".json?api_version=7"
week_data = {}
weeks.each do |week|
week_data[week] = JSON.parse(HTTParty.get(base_url + week + rest, basic_auth: auth).body)['users_to_points']
end
week_data
end
def list_and_filter_participants(json)
wanted_fields = %W(username email koko_nimi)
participants = json[:participants]
week_data = json[:week_data]
everyone_in_course = participants.size
only_applying!(participants)
hakee_yliopistoon = participants.size
puts "%-20s %-35s %-25s %-120s" % ["Username", "Email", "Real name", "Missing points"]
puts '-'*200
participants.each do |participant|
nice_string_in_array = wanted_fields.map do |key|
participant[key]
end
if @options.show_completion_percentige
nice_string_in_array << format_done_exercises_percents(done_exercise_percents(participant, participants))
end
if @options.show_missing_compulsory_points
nice_string_in_array << missing_points_to_list_string(get_points_info_for_user(participant, week_data))
end
to_be_printed = "%-20s %-35s %-25s "
to_be_printed << "%-180s " if @options.show_completion_percentige
to_be_printed << "%-120s" if @options.show_missing_compulsory_points
puts to_be_printed % nice_string_in_array
end
puts
puts
puts "Stats: "
puts "%25s: %4d" % ["Kaikenkaikkiaan kurssilla", everyone_in_course]
puts "%25s: %4d" % ["Hakee yliopistoon", hakee_yliopistoon]
end
def format_done_exercises_percents(hash)
hash.map do |k|
begin
k = k.first
"#{k[0].scan(/\d+/).first}: #{k[1]}"
rescue
nil
end
end.compact.join(", ")
end
def done_exercise_percents(participant, participants_data)
user_info = participants_data.find{ |p| p['username'] == participant['username'] }
exercise_weeks = user_info['groups']
week_keys = (1..12).map{|i| "viikko#{i}"}
week_keys.map do |week|
details = exercise_weeks[week]
unless details.nil?
{week => ("%3.1f%" % [(details['points'].to_f / details['total'].to_f) * 100])}
end
end
end
def missing_points_to_list_string(missing_by_week)
str = ""
missing_by_week.keys.each do |week|
missing = missing_by_week[week]
unless missing.nil? or missing.length == 0
str << week
str << ": "
str << missing.join(",")
str << " "
end
end
str
end
def get_points_info_for_user(participant, week_data)
# TODO: täydennä data viikolle 12
compulsory_exercises = {'6' => %w(102.1 102.2 102.3 103.1 103.2 103.3), '7' => %w(116.1 116.2 116.3), '8' => %w(124.1 124.2 124.3 124.4),
'9' => %w(134.1 134.2 134.3 134.4 134.5), '10' => %w(141.1 141.2 141.3 141.4), '11' => %w(151.1 151.2 151.3 151.4), '12' => %w()}
points_by_week = {}
week_data.keys.each do |week|
points_by_week[week] = week_data[week][participant['username']]
end
missing_by_week = {}
points_by_week.keys.each do |week|
weeks_points = points_by_week[week] || [] #palauttaa arrayn
weeks_compulsory_points = compulsory_exercises[week] || []
missing_by_week[week] = weeks_compulsory_points - weeks_points
end
missing_by_week
end
def only_applying!(participants)
participants.select! do |participant|
participant['hakee_yliopistoon_2014']
end
end
end
end
Refactoring
module MoocDataParser
require 'optparse'
require 'ostruct'
require 'httparty'
require 'json'
require 'io/console'
class App
def run(args)
init_variables()
parse_options(args)
decide_what_to_do(maybe_fetch_json())
$cache.write_file_to_cache('data.json', @notes.to_json)
end
def decide_what_to_do(json)
if @options.user
show_info_about(@options.user, 'username', json)
elsif @options.user_email
show_info_about(@options.user_email, 'email', json)
elsif @options.user_tmc_username
show_info_about(@options.user_tmc_username, 'username', json)
elsif @options.list
list_and_filter_participants(json)
else
$cache.write_file_to_cache('data.json', @notes.to_json)
puts @opt
abort
end
end
def init_variables
$cache ||= MoocDataParser::DummyCacher.new
@notes = begin JSON.parse($cache.read_file_from_cache('data.json')) rescue {} end
end
def parse_options(args)
@options = OpenStruct.new
@opt = OptionParser.new do |opts|
opts.banner = "Usage: show-mooc-details.rb [options]"
opts.on("-f", "--force", "Reload data from server") do |v|
@options.reload = true
end
opts.on("-u", "--user username", "Show details for user") do |v|
@options.user = v
end
opts.on("-m", "--missing-points", "Show missing compulsary points") do |v|
@options.show_missing_compulsory_points = true
end
opts.on("-c", "--completion-precentige", "Show completition percentige") do |v|
@options.show_completion_percentige = true
end
opts.on("-e", "--email emailaddress", "Show details for user") do |v|
@options.user_email = v
end
opts.on("-t", "--tmc-account tmc-account", "Show details for user") do |v|
@options.user_tmc_username = v
end
opts.on("-l", "--list", "Show the basic list") do |v|
@options.list = true
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
end
@opt.parse!(args)
end
def get_auth
print 'username: '
username = $stdin.gets.strip
print 'password: '
password = $stdin.noecho(&:gets).strip
puts
{username: username, password: password}
end
def get_process_thread
t = -> do
loop do
print '.'
sleep 0.5
end
puts
end
Thread.new(&t)
end
def maybe_fetch_json()
if @options.reload or @notes['user_info'].nil? or @notes['week_data'].nil?
download_data()
else
{participants: @notes['user_info'].clone, week_data: @notes['week_data'].clone}
end
end
def download_data
auth = get_auth()
thread = get_process_thread()
url = "http://tmc.mooc.fi/mooc/participants.json?api_version=7&utf8=%E2%9C%93&filter_koko_nimi=&column_username=1&column_email=1&column_koko_nimi=1&column_hakee_yliopistoon_2014=1&group_completion_course_id=18"
user_info = JSON.parse(HTTParty.get(url, basic_auth: auth).body)['participants']
week_data = fetch_week_datas(auth)
@notes['user_info'] = user_info.clone
@notes['week_data'] = week_data.clone
thread.kill
puts
{participants: user_info, week_data: week_data}
end
def show_info_about(user, user_field = 'username', json)
participants = json[:participants]
week_data = json[:week_data]
my_user = participants.find{|a| a[user_field] == user }
if my_user.nil?
abort "User not found"
end
show_user_print_basic_info()
show_user_print_completion_percentage(my_user, week_data) if @options.show_completion_percentige
show_user_print_missing_points(my_user, week_data) if @options.show_missing_compulsory_points
end
def show_user_print_basic_info
formatted_print_user_details ["Username", my_user['username']]
formatted_print_user_details ["Email", my_user['email']]
formatted_print_user_details ["Hakee yliopistoon", my_user['hakee_yliopistoon_2014']]
formatted_print_user_details ["Koko nimi", my_user['koko_nimi']]
end
def show_user_print_missing_points(my_user, week_data)
formatted_print_user_details ["Compulsory points"]
get_points_info_for_user(my_user, week_data).each do |k,v|
formatted_print_user_details [k, v.join(", ")]
end
end
def show_user_print_completion_percentage(my_user, week_data)
formatted_print_user_details ["Points per week"]
done_exercise_percents(my_user, participants).each do |k|
begin
k = k.first
formatted_print_user_details [k[0], k[1]]
rescue
nil
end
end
end
def formatted_print_user_details(details)
case details.size
when 1
puts "%18s" % details
when 2
puts "%18s: %-20s" % details
end
end
def fetch_week_datas(auth)
base_url = "http://tmc.mooc.fi/mooc/courses/18/points/"
weeks = %w(1 2 3 4 5 6 7 8 9 10 11 12)
rest = ".json?api_version=7"
week_data = {}
weeks.each do |week|
week_data[week] = JSON.parse(HTTParty.get(base_url + week + rest, basic_auth: auth).body)['users_to_points']
end
week_data
end
def wanted_fields
%w(username email koko_nimi)
end
def list_and_filter_participants(json)
participants = json[:participants]
week_data = json[:week_data]
everyone_in_course = participants.size
only_applying!(participants)
hakee_yliopistoon = participants.size
print_headers()
process_participants(participants, week_data)
print_list_stats(everyone_in_course, hakee_yliopistoon)
end
def print_headers
puts "%-20s %-35s %-25s %-120s" % ["Username", "Email", "Real name", "Missing points"]
puts '-'*200
end
def print_list_stats(everyone_in_course, hakee_yliopistoon)
puts "\n"
puts "Stats: "
puts "%25s: %4d" % ["Kaikenkaikkiaan kurssilla", everyone_in_course]
puts "%25s: %4d" % ["Hakee yliopistoon", hakee_yliopistoon]
end
def process_participants(participants, week_data)
participants.each do |participant|
nice_string_in_array = wanted_fields.map do |key|
participant[key]
end
if @options.show_completion_percentige
nice_string_in_array << format_done_exercises_percents(done_exercise_percents(participant, participants))
end
if @options.show_missing_compulsory_points
nice_string_in_array << missing_points_to_list_string(get_points_info_for_user(participant, week_data))
end
to_be_printed = "%-20s %-35s %-25s "
to_be_printed << "%-180s " if @options.show_completion_percentige
to_be_printed << "%-120s" if @options.show_missing_compulsory_points
puts to_be_printed % nice_string_in_array
end
end
def format_done_exercises_percents(hash)
hash.map do |k|
begin
k = k.first
"#{k[0].scan(/\d+/).first}: #{k[1]}"
rescue
nil
end
end.compact.join(", ")
end
def done_exercise_percents(participant, participants_data)
user_info = participants_data.find{ |p| p['username'] == participant['username'] }
exercise_weeks = user_info['groups']
week_keys = (1..12).map{|i| "viikko#{i}"}
week_keys.map do |week|
details = exercise_weeks[week]
unless details.nil?
{week => ("%3.1f%" % [(details['points'].to_f / details['total'].to_f) * 100])}
end
end
end
def missing_points_to_list_string(missing_by_week)
str = ""
missing_by_week.keys.each do |week|
missing = missing_by_week[week]
unless missing.nil? or missing.length == 0
str << week
str << ": "
str << missing.join(",")
str << " "
end
end
str
end
def get_points_info_for_user(participant, week_data)
# TODO: täydennä data viikolle 12
compulsory_exercises = {'6' => %w(102.1 102.2 102.3 103.1 103.2 103.3), '7' => %w(116.1 116.2 116.3), '8' => %w(124.1 124.2 124.3 124.4),
'9' => %w(134.1 134.2 134.3 134.4 134.5), '10' => %w(141.1 141.2 141.3 141.4), '11' => %w(151.1 151.2 151.3 151.4), '12' => %w()}
points_by_week = {}
week_data.keys.each do |week|
points_by_week[week] = week_data[week][participant['username']]
end
missing_by_week = {}
points_by_week.keys.each do |week|
weeks_points = points_by_week[week] || [] #palauttaa arrayn
weeks_compulsory_points = compulsory_exercises[week] || []
missing_by_week[week] = weeks_compulsory_points - weeks_points
end
missing_by_week
end
def only_applying!(participants)
participants.select! do |participant|
participant['hakee_yliopistoon_2014']
end
end
end
end
|
module MultipleMan
VERSION = "0.8.0"
end
version bump
module MultipleMan
VERSION = "0.8.1"
end
|
module Munge
module Attribute
class Path
def initialize(source_path, file_path)
@relative = resolve_relative(source_path, file_path).freeze
@absolute = file_path.dup.freeze
@basename = resolve_basename(file_path).freeze
@extnames = resolve_extnames(file_path).freeze
end
attr_reader :relative, :absolute, :basename, :extnames
private
def resolve_relative(source_path, file_path)
folder = Pathname.new(source_path)
file = Pathname.new(file_path)
file.relative_path_from(folder).to_s
end
def resolve_basename(file_path)
file_name = File.basename(file_path)
file_name_parts = file_name.split(".")
file_name_parts.first
end
def resolve_extnames(file_path)
file_name = File.basename(file_path)
file_name_parts = file_name.split(".")
file_name_parts[1..-1]
end
end
end
end
add path.dirname
module Munge
module Attribute
class Path
def initialize(source_path, file_path)
@relative = resolve_relative(source_path, file_path).freeze
@absolute = file_path.dup.freeze
@basename = resolve_basename(file_path).freeze
@extnames = resolve_extnames(file_path).freeze
@dirname = resolve_dirname(@relative).freeze
end
attr_reader :relative, :absolute, :basename, :extnames, :dirname
private
def resolve_relative(source_path, file_path)
folder = Pathname.new(source_path)
file = Pathname.new(file_path)
file.relative_path_from(folder).to_s
end
def resolve_basename(file_path)
file_name = File.basename(file_path)
file_name_parts = file_name.split(".")
file_name_parts.first
end
def resolve_extnames(file_path)
file_name = File.basename(file_path)
file_name_parts = file_name.split(".")
file_name_parts[1..-1]
end
def resolve_dirname(relpath)
relpath_with_prefix_slash = File.join("/", relpath)
dirname = File.dirname(relpath_with_prefix_slash)
File.join(dirname[1..-1], "/")
end
end
end
end
|
class MyAnimeListImport
ANIME_STATUS_MAP = {
"1" => "Currently Watching",
"watching" => "Currently Watching",
"Watching" => "Currently Watching",
"2" => "Completed",
"completed" => "Completed",
"Completed" => "Completed",
"3" => "On Hold",
"onhold" => "On Hold",
"On-Hold" => "On Hold",
"4" => "Dropped",
"dropped" => "Dropped",
"Dropped" => "Dropped",
"6" => "Plan to Watch",
"plantowatch" => "Plan to Watch",
"Plan to Watch" => "Plan to Watch"
}
MANGA_STATUS_MAP = {
"1" => "Currently Reading",
"reading" => "Currently Reading",
"Reading" => "Currently Reading",
"2" => "Completed",
"completed" => "Completed",
"Completed" => "Completed",
"3" => "On Hold",
"onhold" => "On Hold",
"On-Hold" => "On Hold",
"4" => "Dropped",
"dropped" => "Dropped",
"Dropped" => "Dropped",
"6" => "Plan to Read",
"plantoread" => "Plan to Read",
"Plan to Read" => "Plan to Read"
}
def initialize(user, xml)
@user = user
@xml = xml
@data = nil
@list = "anime"
end
def data
if @data.nil?
@data = []
@data = hashdata[list].map do |indv|
row = {
rating: indv["my_score"].to_i,
notes: indv["my_comments"].blank? ? indv["my_tags"] : indv["my_comments"]
}
if list == "manga"
row.merge!({
mal_id: indv["manga_mangadb_id"].to_i,
title: indv["manga_title"],
status: MANGA_STATUS_MAP[indv["my_status"]] || "Currently Reading",
volumes_read: indv["my_read_volumes"].to_i,
chapters_read: indv["my_read_chapters"].to_i,
reread_count: indv["my_times_read"].to_i
})
elsif list == "anime"
row.merge!({
mal_id: indv["series_animedb_id"].to_i,
title: indv["series_title"],
status: ANIME_STATUS_MAP[indv["my_status"]] || "Currently Watching",
episodes_watched: indv["my_watched_episodes"].to_i,
rewatch_count: indv["my_times_watched"].to_i,
rewatching: indv["my_rewatching"] == "1"
})
end
row
end
end
@data
end
def apply!
table = (list == "manga") ? Manga : Anime
animangoes = table.where(mal_id: data.map {|x| x[:mal_id] }).index_by(&:mal_id)
failures = []
count = 0
data.each do |mal_entry|
animanga = animangoes[mal_entry[:mal_id]]
if animanga.nil?
failures << mal_entry[:title]
else
entry = nil
if list == "manga"
entry = MangaLibraryEntry.where(user_id: @user.id, manga_id: animanga.id).first_or_initialize
entry.chapters_read = restrict_range(mal_entry[:chapters_read], animanga.chapter_count)
entry.volumes_read = restrict_range(mal_entry[:volumes_read], animanga.volume_count)
entry.reread_count = mal_entry[:reread_count]
else
entry = LibraryEntry.where(user_id: @user.id, anime_id: animanga.id).first_or_initialize
entry.episodes_watched = restrict_range(mal_entry[:episodes_watched], animanga.episode_count)
entry.rewatch_count = mal_entry[:rewatch_count] < 255 ? mal_entry[:rewatch_count] : 0
entry.rewatching = mal_entry[:rewatching]
end
entry.status = mal_entry[:status]
entry.updated_at = Time.now
entry.notes = mal_entry[:notes]
entry.imported = true
entry.rating = mal_entry[:rating].to_f / 2
entry.rating = nil if entry.rating == 0
entry.save!
count += 1
end
end
comment = "Hey, we just finished importing #{count} titles from your MAL account."
# If the user account was created in the last 24 hours add a welcome message.
if @user.created_at >= 1.day.ago
comment << " Welcome to Hummingbird!"
end
if failures.length > 0
comment << "\n\nThe following were not imported:\n * "
comment << failures.join("\n * ")
end
Action.broadcast(
action_type: "created_profile_comment",
user: @user,
poster: User.find(1),
comment: comment
)
end
private
def restrict_range(num, max)
# FIXME: Hack because we use 0 instead of nil
return num if max.nil? || max == 0
[num, max].min
end
def hashdata
if @hashdata.nil?
@hashdata = Hash.from_xml(@xml)["myanimelist"]
else
@hashdata
end
end
def list
if hashdata.include?("manga")
"manga"
elsif hashdata.include?("anime")
"anime"
else
raise "Unknown list type"
end
end
end
Try to import shows from MAL while importing list.
class MyAnimeListImport
ANIME_STATUS_MAP = {
"1" => "Currently Watching",
"watching" => "Currently Watching",
"Watching" => "Currently Watching",
"2" => "Completed",
"completed" => "Completed",
"Completed" => "Completed",
"3" => "On Hold",
"onhold" => "On Hold",
"On-Hold" => "On Hold",
"4" => "Dropped",
"dropped" => "Dropped",
"Dropped" => "Dropped",
"6" => "Plan to Watch",
"plantowatch" => "Plan to Watch",
"Plan to Watch" => "Plan to Watch"
}
MANGA_STATUS_MAP = {
"1" => "Currently Reading",
"reading" => "Currently Reading",
"Reading" => "Currently Reading",
"2" => "Completed",
"completed" => "Completed",
"Completed" => "Completed",
"3" => "On Hold",
"onhold" => "On Hold",
"On-Hold" => "On Hold",
"4" => "Dropped",
"dropped" => "Dropped",
"Dropped" => "Dropped",
"6" => "Plan to Read",
"plantoread" => "Plan to Read",
"Plan to Read" => "Plan to Read"
}
def initialize(user, xml)
@user = user
@xml = xml
@data = nil
@list = "anime"
end
def data
if @data.nil?
@data = []
@data = hashdata[list].map do |indv|
row = {
rating: indv["my_score"].to_i,
notes: indv["my_comments"].blank? ? indv["my_tags"] : indv["my_comments"]
}
if list == "manga"
row.merge!({
mal_id: indv["manga_mangadb_id"].to_i,
title: indv["manga_title"],
status: MANGA_STATUS_MAP[indv["my_status"]] || "Currently Reading",
volumes_read: indv["my_read_volumes"].to_i,
chapters_read: indv["my_read_chapters"].to_i,
reread_count: indv["my_times_read"].to_i
})
elsif list == "anime"
row.merge!({
mal_id: indv["series_animedb_id"].to_i,
title: indv["series_title"],
status: ANIME_STATUS_MAP[indv["my_status"]] || "Currently Watching",
episodes_watched: indv["my_watched_episodes"].to_i,
rewatch_count: indv["my_times_watched"].to_i,
rewatching: indv["my_rewatching"] == "1"
})
end
row
end
end
@data
end
def apply!
table = (list == "manga") ? Manga : Anime
media_list = table.where(mal_id: data.map {|x| x[:mal_id] }).index_by(&:mal_id)
failures = []
count = 0
data.each do |mal_entry|
media = media_list[mal_entry[:mal_id]]
if media.nil?
begin
media = table.create_or_update_from_hash(
MALImport.new(list.to_sym, mal_entry[:mal_id]).to_h
)
rescue
failures << mal_entry[:title]
end
else
entry = nil
if list == "manga"
entry = MangaLibraryEntry.where(user_id: @user.id, manga_id: media.id).first_or_initialize
entry.chapters_read = restrict_range(mal_entry[:chapters_read], media.chapter_count)
entry.volumes_read = restrict_range(mal_entry[:volumes_read], media.volume_count)
entry.reread_count = mal_entry[:reread_count]
else
entry = LibraryEntry.where(user_id: @user.id, anime_id: media.id).first_or_initialize
entry.episodes_watched = restrict_range(mal_entry[:episodes_watched], media.episode_count)
entry.rewatch_count = mal_entry[:rewatch_count] < 255 ? mal_entry[:rewatch_count] : 0
entry.rewatching = mal_entry[:rewatching]
end
entry.status = mal_entry[:status]
entry.updated_at = Time.now
entry.notes = mal_entry[:notes]
entry.imported = true
entry.rating = mal_entry[:rating].to_f / 2
entry.rating = nil if entry.rating == 0
entry.save!
count += 1
end
end
comment = "Hey, we just finished importing #{count} titles from your MAL account."
# If the user account was created in the last 24 hours add a welcome message.
if @user.created_at >= 1.day.ago
comment << " Welcome to Hummingbird!"
end
if failures.length > 0
comment << "\n\nThe following could not be imported:\n * "
comment << failures.join("\n * ")
end
Action.broadcast(
action_type: "created_profile_comment",
user: @user,
poster: User.find(1),
comment: comment
)
end
private
def restrict_range(num, max)
# FIXME: Hack because we use 0 instead of nil
return num if max.nil? || max == 0
[num, max].min
end
def hashdata
if @hashdata.nil?
@hashdata = Hash.from_xml(@xml)["myanimelist"]
else
@hashdata
end
end
def list
if hashdata.include?("manga")
"manga"
elsif hashdata.include?("anime")
"anime"
else
raise "Unknown list type"
end
end
end
|
module Nanite
class Agent
class Monitor
include DaemonizeHelper
attr_reader :agent, :options, :shutting_down, :pid_file
def initialize(agent, options = {})
@agent = agent
@options = options
setup_pid_file
daemonize_agent if options[:daemonize]
setup_traps
end
def setup_pid_file
@pid_file = PidFile.new(agent.identity, options)
@pid_file.check
end
def daemonize_agent
daemonize(agent.identity, options)
pid_file.write
end
def setup_traps
['INT', 'TERM'].each do |signal|
trap signal do
graceful_shutdown
end
end unless $TESTING
trap 'USR1' do
Nanite::Log.info("#{Nanite::Actor.running_jobs.size} running jobs")
Nanite::Log.info("Job list:\n#{Nanite::Actor.running_jobs.collect{|job| "#{job.type}: #{job.payload[0..50]}"}}")
end
end
def graceful_shutdown
exit if shutting_down
@shutting_down = true
begin
initiate_shutdown
rescue
Nanite::Log.error("Error during graceful shutdown: #{$!.message}\n#{$!.backtrace.join("\n")}")
exit
end
end
def cleanup
pid_file.remove if options[:daemonize]
end
def initiate_shutdown
cleanup
agent.unsubscribe
agent.un_register
wait_for_running_actors do
shutdown
end
end
def shutdown
agent.disconnect
EM.add_timer(0.5) do
EM.stop
exit
end
end
def wait_for_running_actors(&blk)
if options[:graceful] and Nanite::Actor.running_jobs?
Nanite::Log.info("Waiting for running jobs to finish")
timer = EM.add_periodic_timer(1) do
unless Nanite::Actor.running_jobs?
timer.cancel
blk.call
end
end
else
blk.call
end
end
end
end
end
Ensure running job dump doesn't fail when no jobs have been worked through.
module Nanite
class Agent
class Monitor
include DaemonizeHelper
attr_reader :agent, :options, :shutting_down, :pid_file
def initialize(agent, options = {})
@agent = agent
@options = options
setup_pid_file
daemonize_agent if options[:daemonize]
setup_traps
end
def setup_pid_file
@pid_file = PidFile.new(agent.identity, options)
@pid_file.check
end
def daemonize_agent
daemonize(agent.identity, options)
pid_file.write
end
def setup_traps
['INT', 'TERM'].each do |signal|
trap signal do
graceful_shutdown
end
end unless $TESTING
trap 'USR1' do
Nanite::Log.info("#{(Nanite::Actor.running_jobs || []).size} running jobs")
Nanite::Log.info("Job list:\n#{(Nanite::Actor.running_jobs || []).collect{|job| "#{job.type}: #{job.payload[0..50]}"}}")
end
end
def graceful_shutdown
exit if shutting_down
@shutting_down = true
begin
initiate_shutdown
rescue
Nanite::Log.error("Error during graceful shutdown: #{$!.message}\n#{$!.backtrace.join("\n")}")
exit
end
end
def cleanup
pid_file.remove if options[:daemonize]
end
def initiate_shutdown
cleanup
agent.unsubscribe
agent.un_register
wait_for_running_actors do
shutdown
end
end
def shutdown
agent.disconnect
EM.add_timer(0.5) do
EM.stop
exit
end
end
def wait_for_running_actors(&blk)
if options[:graceful] and Nanite::Actor.running_jobs?
Nanite::Log.info("Waiting for running jobs to finish")
timer = EM.add_periodic_timer(1) do
unless Nanite::Actor.running_jobs?
timer.cancel
blk.call
end
end
else
blk.call
end
end
end
end
end |
module Octopress
module Deploy
class Git
def initialize(options={})
@options = options
@repo = @options[:git_url]
@branch = @options[:git_branch] || 'master'
@remote = @options[:remote] || 'deploy'
@remote_path = @options[:remote_path] || ''
@remote_path = @remote_path.sub(/^\//,'') #remove leading slash
@site_dir = File.expand_path(@options[:site_dir] || '_site')
@deploy_dir = File.expand_path(@options[:deploy_dir] || '.deploy')
@pull_dir = @options[:dir]
abort "Deploy Failed: Configure a git_url in #{@options[:config_file]} before deploying.".red if @repo.nil?
end
# Initialize, pull, copy and deploy.
#
def push
check_branch
init_repo
puts "Syncing #{@site_dir.sub(Dir.pwd.strip+'/', '')} files to #{@repo}."
FileUtils.cd @deploy_dir do
git_pull
clean_deploy
copy_site
git_push
end
end
def pull
`git clone -b #{@branch} #{@repo} #{@pull_dir}`
end
# Ensure that the deploy branch is not that same as the current working branch
#
def check_branch
same_branch = `git branch -a` =~ /\* #{@branch}/
if current_remote = `git remote -v`.match(/\s\S+/)
same_remote = current_remote[0].match(/#{@repo}/)
end
if same_remote && same_branch
puts "Deploy to #{@branch} canceled:".red
puts "You cannot deploy to the same branch you are working in. This will overwrite the source for your site.\n"
puts "First, back up your site's source to a branch:"
puts "\n git checkout -b source".yellow
puts " git push origin source".yellow
puts "\nWith that, you'll work in the #{"source".bold} branch and deploy to the #{@branch.bold} branch."
abort
end
end
# Check to see if local deployment dir is configured to deploy.
#
def check_deploy_dir
if Dir.exist? @deploy_dir
FileUtils.cd @deploy_dir do
return `git remote -v`.include? @repo
end
end
end
def self.default_config(options={})
config = <<-CONFIG
#{"git_url: #{options[:git_url]}".ljust(40)} # remote repository url, e.g. git@github.com:username/repo_name
# Note on git_branch:
# If using GitHub project pages, set the branch to 'gh-pages'.
# For GitHub user/organization pages or Heroku, set the branch to 'master'.
#
#{"git_branch: #{options[:git_branch] || 'master'}".ljust(40)} # Git branch where static site files are commited
CONFIG
config << "\n# " unless options[:remote_path]
config << "#{"remote_path: #{options[:remote_path]}".ljust(38)} # Destination directory"
end
# If necessary create deploy directory and initialize it with deployment remote.
#
def init_repo
return if check_deploy_dir
FileUtils.mkdir_p @deploy_dir
FileUtils.cd @deploy_dir do
if Dir[@deploy_dir+'/*'].empty?
# initialize the repository and add the remote.
#
`git init`
`git remote add #{@remote} #{@repo}`
# Attempt to pull from the remote.
#
if git_pull
`git branch -m #{@branch}`
# If no branch exists on remote, create one locally.
else
`echo "initialize deploy repo" > _`
`git add .`
`git commit -m \"initial commit\"`
`git branch -m #{@branch}`
`git rm _`
`git add -u`
`git commit -m 'cleanup'`
end
end
end
end
def git_push
`git push #{@remote} #{@branch}`
end
# Attempt to pull from the remote branch
#
def git_pull
if `git branch -a` =~ /remotes\/#{@remote}\/#{@branch}/ ||
`git ls-remote #{@remote}` =~ /refs\/heads\/#{@branch}/
`git pull #{@remote} #{@branch}`
end
end
# Remove files in deploy dir, ensuring a 1:1 site files deployment.
#
def clean_deploy
FileUtils.rm_rf(Dir.glob('*'), secure: true)
end
# Copy site files into deploy dir.
#
def copy_site
target_dir = File.join(@deploy_dir, @remote_path).sub(/\/$/,'')
FileUtils.cp_r @site_dir + '/.', target_dir
message = "Site updated at: #{Time.now.utc}"
`git add --all .`
`git commit -m \"#{message}\"`
end
end
end
end
Ensure repo for deployment matches configuration. Fixes #56
module Octopress
module Deploy
class Git
def initialize(options={})
@options = options
@repo = @options[:git_url]
@branch = @options[:git_branch] || 'master'
@remote = @options[:remote] || 'deploy'
@remote_path = @options[:remote_path] || ''
@remote_path = @remote_path.sub(/^\//,'') #remove leading slash
@site_dir = File.expand_path(@options[:site_dir] || '_site')
@deploy_dir = File.expand_path(@options[:deploy_dir] || '.deploy')
@pull_dir = @options[:dir]
abort "Deploy Failed: Configure a git_url in #{@options[:config_file]} before deploying.".red if @repo.nil?
end
# Initialize, pull, copy and deploy.
#
def push
check_branch
init_repo
puts "Syncing #{@site_dir.sub(Dir.pwd.strip+'/', '')} files to #{@repo}."
FileUtils.cd @deploy_dir do
git_pull
clean_deploy
copy_site
git_push
end
end
def pull
`git clone -b #{@branch} #{@repo} #{@pull_dir}`
end
# Ensure that the deploy branch is not that same as the current working branch
#
def check_branch
same_branch = `git branch -a` =~ /\* #{@branch}/
if current_remote = `git remote -v`.match(/\s\S+/)
same_remote = current_remote[0].match(/#{@repo}/)
end
if same_remote && same_branch
puts "Deploy to #{@branch} canceled:".red
puts "You cannot deploy to the same branch you are working in. This will overwrite the source for your site.\n"
puts "First, back up your site's source to a branch:"
puts "\n git checkout -b source".yellow
puts " git push origin source".yellow
puts "\nWith that, you'll work in the #{"source".bold} branch and deploy to the #{@branch.bold} branch."
abort
end
end
# Check to see if local deployment dir is configured to deploy.
#
def check_deploy_dir
if Dir.exist? @deploy_dir
FileUtils.cd @deploy_dir do
return `git remote -v`.include? @repo
end
end
end
def self.default_config(options={})
config = <<-CONFIG
#{"git_url: #{options[:git_url]}".ljust(40)} # remote repository url, e.g. git@github.com:username/repo_name
# Note on git_branch:
# If using GitHub project pages, set the branch to 'gh-pages'.
# For GitHub user/organization pages or Heroku, set the branch to 'master'.
#
#{"git_branch: #{options[:git_branch] || 'master'}".ljust(40)} # Git branch where static site files are commited
CONFIG
config << "\n# " unless options[:remote_path]
config << "#{"remote_path: #{options[:remote_path]}".ljust(38)} # Destination directory"
end
# If necessary create deploy directory and initialize it with deployment remote.
#
def init_repo
return if check_deploy_dir
FileUtils.mkdir_p @deploy_dir
FileUtils.cd @deploy_dir do
if Dir[@deploy_dir+'/*'].empty?
# initialize the repository and add the remote.
#
`git init`
`git remote add #{@remote} #{@repo}`
# Attempt to pull from the remote.
#
if git_pull
`git branch -m #{@branch}`
# If no branch exists on remote, create one locally.
else
`echo "initialize deploy repo" > _`
`git add .`
`git commit -m \"initial commit\"`
`git branch -m #{@branch}`
`git rm _`
`git add -u`
`git commit -m 'cleanup'`
end
end
end
end
def git_push
if `git remote -v` =~ /#{@remote}\s+#{@repo}.+\(push\)/
`git push #{@remote} #{@branch}`
else
remotes = `git remote -v`
push_remote = remotes.match(/^origin\s+(.+)\s+\(push\)/)
if push_remote
abort %Q{Deployment remote #{@remote} is pointing to "#{push_remote[1]}" but configuration points to #{@remote}
To reset your deployment, run:
rm -rf #{@deploy_dir}
octopress deploy}
else
abort %Q{Deployment remote configured improperly. To reset your deployment run:
rm -rf #{@deploy_dir}
octopress deploy}
end
end
# Attempt to pull from the remote branch
#
def git_pull
if `git branch -a` =~ /remotes\/#{@remote}\/#{@branch}/ ||
`git ls-remote #{@remote}` =~ /refs\/heads\/#{@branch}/
`git pull #{@remote} #{@branch}`
end
end
# Remove files in deploy dir, ensuring a 1:1 site files deployment.
#
def clean_deploy
FileUtils.rm_rf(Dir.glob('*'), secure: true)
end
# Copy site files into deploy dir.
#
def copy_site
target_dir = File.join(@deploy_dir, @remote_path).sub(/\/$/,'')
FileUtils.cp_r @site_dir + '/.', target_dir
message = "Site updated at: #{Time.now.utc}"
`git add --all .`
`git commit -m \"#{message}\"`
end
end
end
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jquery_datetimepick/version'
Gem::Specification.new do |gem|
gem.name = "jquery_datetimepick"
gem.version = JqueryDatetimepick::VERSION
gem.authors = ["Herman verschooten"]
gem.email = ["Herman@verschooten.net"]
gem.description = %q{Rails form helpers for jQuery DateTime picker}
gem.summary = %q{This gem requires the jquery_datepick gem and adds the necessary styles and javascripts to make it a datetimepicker}
gem.homepage = "http://github.com/Hermanverschooten/jquery_datetimepick"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency "jquery_datepick"
gem.license = 'MIT'
end
add dev dependency to railties
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jquery_datetimepick/version'
Gem::Specification.new do |gem|
gem.name = "jquery_datetimepick"
gem.version = JqueryDatetimepick::VERSION
gem.authors = ["Herman verschooten"]
gem.email = ["Herman@verschooten.net"]
gem.description = %q{Rails form helpers for jQuery DateTime picker}
gem.summary = %q{This gem requires the jquery_datepick gem and adds the necessary styles and javascripts to make it a datetimepicker}
gem.homepage = "http://github.com/Hermanverschooten/jquery_datetimepick"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_dependency "jquery_datepick"
gem.add_development_dependency "railties"
gem.license = 'MIT'
end
|
# Copyright:: Copyright (c) Chef Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "omnibus/sugarable"
require "omnibus/whitelist"
begin
require "pedump"
rescue LoadError
STDERR.puts "pedump not found - windows health checks disabled"
end
module Omnibus
class HealthCheck
include Instrumentation
include Logging
include Util
include Sugarable
class << self
# @see (HealthCheck#new)
def run!(project)
new(project).run!
end
end
#
# The project to healthcheck.
#
# @return [Project]
#
attr_reader :project
#
# Run the healthchecks against the given project. It is assumed that the
# project has already been built.
#
# @param [Project] project
# the project to health check
#
def initialize(project)
@project = project
end
#
# Run the given health check. Healthcheks are skipped on Windows.
#
# @raise [HealthCheckFailed]
# if the health check fails
#
# @return [true]
# if the healthchecks pass
#
def run!
measure("Health check time") do
log.info(log_key) { "Running health on #{project.name}" }
bad_libs, good_libs = case Ohai["platform"]
when "mac_os_x"
health_check_otool
when "aix"
health_check_aix
when "windows"
# TODO: objdump -p will provided a very limited check of
# explicit dependencies on windows. Most dependencies are
# implicit and hence not detected.
log.warn(log_key) { "Skipping dependency health checks on Windows." }
[{}, {}]
when "solaris2"
health_check_solaris
else
health_check_ldd
end
unresolved = []
unreliable = []
detail = []
if bad_libs.keys.length > 0
bad_libs.each do |name, lib_hash|
lib_hash.each do |lib, linked_libs|
linked_libs.each do |linked, count|
if linked =~ /not found/
unresolved << lib unless unresolved.include? lib
else
unreliable << linked unless unreliable.include? linked
end
detail << "#{name}|#{lib}|#{linked}|#{count}"
end
end
end
log.error(log_key) { "Failed!" }
bad_omnibus_libs, bad_omnibus_bins = bad_libs.keys.partition { |k| k.include? "embedded/lib" }
log.error(log_key) do
out = "The following libraries have unsafe or unmet dependencies:\n"
bad_omnibus_libs.each do |lib|
out << " --> #{lib}\n"
end
out
end
log.error(log_key) do
out = "The following binaries have unsafe or unmet dependencies:\n"
bad_omnibus_bins.each do |bin|
out << " --> #{bin}\n"
end
out
end
if unresolved.length > 0
log.error(log_key) do
out = "The following requirements could not be resolved:\n"
unresolved.each do |lib|
out << " --> #{lib}\n"
end
out
end
end
if unreliable.length > 0
log.error(log_key) do
out = "The following libraries cannot be guaranteed to be on "
out << "target systems:\n"
unreliable.each do |lib|
out << " --> #{lib}\n"
end
out
end
end
log.error(log_key) do
out = "The precise failures were:\n"
detail.each do |line|
item, dependency, location, count = line.split("|")
reason = location =~ /not found/ ? "Unresolved dependency" : "Unsafe dependency"
out << " --> #{item}\n"
out << " DEPENDS ON: #{dependency}\n"
out << " COUNT: #{count}\n"
out << " PROVIDED BY: #{location}\n"
out << " FAILED BECAUSE: #{reason}\n"
end
out
end
raise HealthCheckFailed
end
if good_libs.keys.length == 0 && !windows?
raise "Internal error: no good libraries were found"
end
conflict_map = {}
conflict_map = relocation_check if relocation_checkable?
if conflict_map.keys.length > 0
log.warn(log_key) { "Multiple dlls with overlapping images detected" }
conflict_map.each do |lib_name, data|
base = data[:base]
size = data[:size]
next_valid_base = data[:base] + data[:size]
log.warn(log_key) do
out = "Overlapping dll detected:\n"
out << " #{lib_name} :\n"
out << " IMAGE BASE: #{hex}\n" % base
out << " IMAGE SIZE: #{hex} (#{size} bytes)\n" % size
out << " NEXT VALID BASE: #{hex}\n" % next_valid_base
out << " CONFLICTS:\n"
data[:conflicts].each do |conflict_name|
cbase = conflict_map[conflict_name][:base]
csize = conflict_map[conflict_name][:size]
out << " - #{conflict_name} #{hex} + #{hex}\n" % [cbase, csize]
end
out
end
end
# Don't raise an error yet. This is only bad for FIPS mode.
end
true
end
end
# Ensure the method relocation_check is able to run
#
# @return [Boolean]
#
def relocation_checkable?
return false unless windows?
begin
require "pedump"
true
rescue LoadError
false
end
end
# Check dll image location overlap/conflicts on windows.
#
# @return [Hash<String, Hash<Symbol, ...>>]
# library_name ->
# :base -> base address
# :size -> the total image size in bytes
# :conflicts -> array of library names that overlap
#
def relocation_check
conflict_map = {}
embedded_bin = "#{project.install_dir}/embedded/bin"
Dir.glob("#{embedded_bin}/*.dll") do |lib_path|
log.debug(log_key) { "Analyzing dependencies for #{lib_path}" }
File.open(lib_path, "rb") do |f|
dump = PEdump.new(lib_path)
pe = dump.pe f
# Don't scan dlls for a different architecture.
next if windows_arch_i386? == pe.x64?
lib_name = File.basename(lib_path)
base = pe.ioh.ImageBase
size = pe.ioh.SizeOfImage
conflicts = []
# This can be done more smartly but O(n^2) is just fine for n = small
conflict_map.each do |candidate_name, details|
unless details[:base] >= base + size ||
details[:base] + details[:size] <= base
details[:conflicts] << lib_name
conflicts << candidate_name
end
end
conflict_map[lib_name] = {
base: base,
size: size,
conflicts: conflicts,
}
log.debug(log_key) { "Discovered #{lib_name} at #{hex} + #{hex}" % [ base, size ] }
end
end
# Filter out non-conflicting entries.
conflict_map.delete_if do |lib_name, details|
details[:conflicts].empty?
end
end
#
# Run healthchecks against otool.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_otool
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | egrep '\.(dylib|bundle)$'", "xargs otool -L") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
when /^\s+(.+) \(.+\)$/
linked = Regexp.last_match[1]
name = File.basename(linked)
bad_libs, good_libs = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks against aix.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_aix
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"XCOFF\" | awk -F: '{print $1}'", "xargs -n 1 ldd") do |line|
case line
when /^(.+) needs:$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+)$/
name = Regexp.last_match[1]
linked = Regexp.last_match[1]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /File is not an executable XCOFF file/ # ignore non-executable files
else
log.warn(log_key) { "Line did not match for #{current_library}\n#{line}" }
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks on Solaris.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_solaris
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"ELF\" | awk -F: '{print $1}' | sed -e 's/:$//'", "xargs -n 1 ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /^\s+(.+) \(.+\)$/
next
when /^\s+statically linked$/
next
when /^\s+not a dynamic executable$/ # ignore non-executable files
else
log.warn(log_key) do
"Line did not match for #{current_library}\n#{line}"
end
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks against ldd.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_ldd
current_library = nil
bad_libs = {}
good_libs = {}
# This algorithm runs on both Linux and FreeBSD and needs to be MANUALLY tested on both
read_shared_libs("find #{project.install_dir}/ -type f", "xargs ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /^\s+(.+) \(.+\)$/
next
when /^\s+statically linked$/
next
when /^\s+libjvm.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+libjava.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+libmawt.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+not a dynamic executable$/ # ignore non-executable files
else
log.warn(log_key) do
"Line did not match for #{current_library}\n#{line}"
end
end
end
[bad_libs, good_libs]
end
private
#
# This is the printf style format string to render a pointer/size_t on the
# current platform.
#
# @return [String]
#
def hex
windows_arch_i386? ? "0x%08x" : "0x%016x"
end
#
# The list of whitelisted (ignored) files from the project and softwares.
#
# @return [Array<String, Regexp>]
#
def whitelist_files
project.library.components.inject([]) do |array, component|
array += component.whitelist_files
array
end
end
#
# Execute the given command, yielding each line.
#
# @param [String] command
# the command to execute
# @yield [String]
# each line
#
def read_shared_libs(find_command, ldd_command, &output_proc)
#
# construct the list of files to check
#
find_output = shellout!(find_command).stdout.lines
find_output.reject! { |file| IGNORED_ENDINGS.any? { |ending| file.end_with?("#{ending}\n") } }
find_output.reject! { |file| IGNORED_SUBSTRINGS.any? { |substr| file.include?(substr) } }
if find_output.empty?
# probably the find_command is busted, it should never be empty or why are you using omnibus?
raise "Internal Error: Health Check found no lines"
end
if find_output.any? { |file| file !~ Regexp.new(project.install_dir) }
# every file in the find output should be within the install_dir
raise "Internal Error: Health Check lines not matching the install_dir"
end
#
# feed the list of files to the "ldd" command
#
# this command will typically fail if the last file isn't a valid lib/binary which happens often
ldd_output = shellout(ldd_command, input: find_output.join).stdout
#
# do the output process to determine if the files are good or bad
#
ldd_output.each_line do |line|
output_proc.call(line)
end
end
#
# Check the given path and library for "bad" libraries.
#
# @param [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
# @param [String]
# the library being analyzed
# @param [String]
# dependency library name
# @param [String]
# actual path of library satisfying the dependency
#
# @return the modified bad_library hash
#
def check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
safe = nil
whitelist_libs = case Ohai["platform"]
when "arch"
ARCH_WHITELIST_LIBS
when "mac_os_x"
MAC_WHITELIST_LIBS
when "omnios"
OMNIOS_WHITELIST_LIBS
when "solaris2"
SOLARIS_WHITELIST_LIBS
when "smartos"
SMARTOS_WHITELIST_LIBS
when "freebsd"
FREEBSD_WHITELIST_LIBS
when "aix"
AIX_WHITELIST_LIBS
else
WHITELIST_LIBS
end
whitelist_libs.each do |reg|
safe ||= true if reg.match(name)
end
whitelist_files.each do |reg|
safe ||= true if reg.match(current_library)
end
log.debug(log_key) { " --> Dependency: #{name}" }
log.debug(log_key) { " --> Provided by: #{linked}" }
if !safe && linked !~ Regexp.new(project.install_dir)
log.debug(log_key) { " -> FAILED: #{current_library} has unsafe dependencies" }
bad_libs[current_library] ||= {}
bad_libs[current_library][name] ||= {}
if bad_libs[current_library][name].key?(linked)
bad_libs[current_library][name][linked] += 1
else
bad_libs[current_library][name][linked] = 1
end
else
good_libs[current_library] = true
log.debug(log_key) { " -> PASSED: #{name} is either whitelisted or safely provided." }
end
[bad_libs, good_libs]
end
end
end
extract freebsd method and fix it
Signed-off-by: Lamont Granquist <0ab8dc438f73addc98d9ad5925ec8f2b97991703@scriptkiddie.org>
# Copyright:: Copyright (c) Chef Software Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "omnibus/sugarable"
require "omnibus/whitelist"
begin
require "pedump"
rescue LoadError
STDERR.puts "pedump not found - windows health checks disabled"
end
module Omnibus
class HealthCheck
include Instrumentation
include Logging
include Util
include Sugarable
class << self
# @see (HealthCheck#new)
def run!(project)
new(project).run!
end
end
#
# The project to healthcheck.
#
# @return [Project]
#
attr_reader :project
#
# Run the healthchecks against the given project. It is assumed that the
# project has already been built.
#
# @param [Project] project
# the project to health check
#
def initialize(project)
@project = project
end
#
# Run the given health check. Healthcheks are skipped on Windows.
#
# @raise [HealthCheckFailed]
# if the health check fails
#
# @return [true]
# if the healthchecks pass
#
def run!
measure("Health check time") do
log.info(log_key) { "Running health on #{project.name}" }
bad_libs, good_libs =
case Ohai["platform"]
when "mac_os_x"
health_check_otool
when "aix"
health_check_aix
when "windows"
# TODO: objdump -p will provided a very limited check of
# explicit dependencies on windows. Most dependencies are
# implicit and hence not detected.
log.warn(log_key) { "Skipping dependency health checks on Windows." }
[{}, {}]
when "solaris2"
health_check_solaris
when "freebsd", "openbsd", "netbsd"
health_check_freebsd
else
health_check_linux
end
unresolved = []
unreliable = []
detail = []
if bad_libs.keys.length > 0
bad_libs.each do |name, lib_hash|
lib_hash.each do |lib, linked_libs|
linked_libs.each do |linked, count|
if linked =~ /not found/
unresolved << lib unless unresolved.include? lib
else
unreliable << linked unless unreliable.include? linked
end
detail << "#{name}|#{lib}|#{linked}|#{count}"
end
end
end
log.error(log_key) { "Failed!" }
bad_omnibus_libs, bad_omnibus_bins = bad_libs.keys.partition { |k| k.include? "embedded/lib" }
log.error(log_key) do
out = "The following libraries have unsafe or unmet dependencies:\n"
bad_omnibus_libs.each do |lib|
out << " --> #{lib}\n"
end
out
end
log.error(log_key) do
out = "The following binaries have unsafe or unmet dependencies:\n"
bad_omnibus_bins.each do |bin|
out << " --> #{bin}\n"
end
out
end
if unresolved.length > 0
log.error(log_key) do
out = "The following requirements could not be resolved:\n"
unresolved.each do |lib|
out << " --> #{lib}\n"
end
out
end
end
if unreliable.length > 0
log.error(log_key) do
out = "The following libraries cannot be guaranteed to be on "
out << "target systems:\n"
unreliable.each do |lib|
out << " --> #{lib}\n"
end
out
end
end
log.error(log_key) do
out = "The precise failures were:\n"
detail.each do |line|
item, dependency, location, count = line.split("|")
reason = location =~ /not found/ ? "Unresolved dependency" : "Unsafe dependency"
out << " --> #{item}\n"
out << " DEPENDS ON: #{dependency}\n"
out << " COUNT: #{count}\n"
out << " PROVIDED BY: #{location}\n"
out << " FAILED BECAUSE: #{reason}\n"
end
out
end
raise HealthCheckFailed
end
if good_libs.keys.length == 0 && !windows?
raise "Internal error: no good libraries were found"
end
conflict_map = {}
conflict_map = relocation_check if relocation_checkable?
if conflict_map.keys.length > 0
log.warn(log_key) { "Multiple dlls with overlapping images detected" }
conflict_map.each do |lib_name, data|
base = data[:base]
size = data[:size]
next_valid_base = data[:base] + data[:size]
log.warn(log_key) do
out = "Overlapping dll detected:\n"
out << " #{lib_name} :\n"
out << " IMAGE BASE: #{hex}\n" % base
out << " IMAGE SIZE: #{hex} (#{size} bytes)\n" % size
out << " NEXT VALID BASE: #{hex}\n" % next_valid_base
out << " CONFLICTS:\n"
data[:conflicts].each do |conflict_name|
cbase = conflict_map[conflict_name][:base]
csize = conflict_map[conflict_name][:size]
out << " - #{conflict_name} #{hex} + #{hex}\n" % [cbase, csize]
end
out
end
end
# Don't raise an error yet. This is only bad for FIPS mode.
end
true
end
end
# Ensure the method relocation_check is able to run
#
# @return [Boolean]
#
def relocation_checkable?
return false unless windows?
begin
require "pedump"
true
rescue LoadError
false
end
end
# Check dll image location overlap/conflicts on windows.
#
# @return [Hash<String, Hash<Symbol, ...>>]
# library_name ->
# :base -> base address
# :size -> the total image size in bytes
# :conflicts -> array of library names that overlap
#
def relocation_check
conflict_map = {}
embedded_bin = "#{project.install_dir}/embedded/bin"
Dir.glob("#{embedded_bin}/*.dll") do |lib_path|
log.debug(log_key) { "Analyzing dependencies for #{lib_path}" }
File.open(lib_path, "rb") do |f|
dump = PEdump.new(lib_path)
pe = dump.pe f
# Don't scan dlls for a different architecture.
next if windows_arch_i386? == pe.x64?
lib_name = File.basename(lib_path)
base = pe.ioh.ImageBase
size = pe.ioh.SizeOfImage
conflicts = []
# This can be done more smartly but O(n^2) is just fine for n = small
conflict_map.each do |candidate_name, details|
unless details[:base] >= base + size ||
details[:base] + details[:size] <= base
details[:conflicts] << lib_name
conflicts << candidate_name
end
end
conflict_map[lib_name] = {
base: base,
size: size,
conflicts: conflicts,
}
log.debug(log_key) { "Discovered #{lib_name} at #{hex} + #{hex}" % [ base, size ] }
end
end
# Filter out non-conflicting entries.
conflict_map.delete_if do |lib_name, details|
details[:conflicts].empty?
end
end
#
# Run healthchecks against otool.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_otool
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | egrep '\.(dylib|bundle)$'", "xargs otool -L") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
when /^\s+(.+) \(.+\)$/
linked = Regexp.last_match[1]
name = File.basename(linked)
bad_libs, good_libs = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks against aix.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_aix
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"XCOFF\" | awk -F: '{print $1}'", "xargs -n 1 ldd") do |line|
case line
when /^(.+) needs:$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+)$/
name = Regexp.last_match[1]
linked = Regexp.last_match[1]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /File is not an executable XCOFF file/ # ignore non-executable files
else
log.warn(log_key) { "Line did not match for #{current_library}\n#{line}" }
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks on Solaris.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_solaris
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"ELF\" | awk -F: '{print $1}' | sed -e 's/:$//'", "xargs -n 1 ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /^\s+(.+) \(.+\)$/
next
when /^\s+statically linked$/
next
when /^\s+not a dynamic executable$/ # ignore non-executable files
else
log.warn(log_key) do
"Line did not match for #{current_library}\n#{line}"
end
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks on FreeBSD
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_freebsd
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"ELF\" | awk -F: '{print $1}' | sed -e 's/:$//'", "xargs ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /^\s+(.+) \(.+\)$/
next
when /^\s+statically linked$/
next
when /^\s+not a dynamic executable$/ # ignore non-executable files
else
log.warn(log_key) do
"Line did not match for #{current_library}\n#{line}"
end
end
end
[bad_libs, good_libs]
end
#
# Run healthchecks against ldd.
#
# @return [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
#
def health_check_linux
current_library = nil
bad_libs = {}
good_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f", "xargs ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
( bad_libs, good_libs ) = check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
when /^\s+(.+) \(.+\)$/
next
when /^\s+statically linked$/
next
when /^\s+libjvm.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+libjava.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+libmawt.so/ # FIXME: should remove if it doesn't blow up server
next
when /^\s+not a dynamic executable$/ # ignore non-executable files
else
log.warn(log_key) do
"Line did not match for #{current_library}\n#{line}"
end
end
end
[bad_libs, good_libs]
end
private
#
# This is the printf style format string to render a pointer/size_t on the
# current platform.
#
# @return [String]
#
def hex
windows_arch_i386? ? "0x%08x" : "0x%016x"
end
#
# The list of whitelisted (ignored) files from the project and softwares.
#
# @return [Array<String, Regexp>]
#
def whitelist_files
project.library.components.inject([]) do |array, component|
array += component.whitelist_files
array
end
end
#
# Execute the given command, yielding each line.
#
# @param [String] command
# the command to execute
# @yield [String]
# each line
#
def read_shared_libs(find_command, ldd_command, &output_proc)
#
# construct the list of files to check
#
find_output = shellout!(find_command).stdout.lines
find_output.reject! { |file| IGNORED_ENDINGS.any? { |ending| file.end_with?("#{ending}\n") } }
find_output.reject! { |file| IGNORED_SUBSTRINGS.any? { |substr| file.include?(substr) } }
if find_output.empty?
# probably the find_command is busted, it should never be empty or why are you using omnibus?
raise "Internal Error: Health Check found no lines"
end
if find_output.any? { |file| file !~ Regexp.new(project.install_dir) }
# every file in the find output should be within the install_dir
raise "Internal Error: Health Check lines not matching the install_dir"
end
#
# feed the list of files to the "ldd" command
#
# this command will typically fail if the last file isn't a valid lib/binary which happens often
ldd_output = shellout(ldd_command, input: find_output.join).stdout
#
# do the output process to determine if the files are good or bad
#
ldd_output.each_line do |line|
output_proc.call(line)
end
end
#
# Check the given path and library for "bad" libraries.
#
# @param [Hash<String, Hash<String, Hash<String, Int>>>]
# the bad libraries (library_name -> dependency_name -> satisfied_lib_path -> count)
# @param [String]
# the library being analyzed
# @param [String]
# dependency library name
# @param [String]
# actual path of library satisfying the dependency
#
# @return the modified bad_library hash
#
def check_for_bad_library(bad_libs, good_libs, current_library, name, linked)
safe = nil
whitelist_libs = case Ohai["platform"]
when "arch"
ARCH_WHITELIST_LIBS
when "mac_os_x"
MAC_WHITELIST_LIBS
when "omnios"
OMNIOS_WHITELIST_LIBS
when "solaris2"
SOLARIS_WHITELIST_LIBS
when "smartos"
SMARTOS_WHITELIST_LIBS
when "freebsd"
FREEBSD_WHITELIST_LIBS
when "aix"
AIX_WHITELIST_LIBS
else
WHITELIST_LIBS
end
whitelist_libs.each do |reg|
safe ||= true if reg.match(name)
end
whitelist_files.each do |reg|
safe ||= true if reg.match(current_library)
end
log.debug(log_key) { " --> Dependency: #{name}" }
log.debug(log_key) { " --> Provided by: #{linked}" }
if !safe && linked !~ Regexp.new(project.install_dir)
log.debug(log_key) { " -> FAILED: #{current_library} has unsafe dependencies" }
bad_libs[current_library] ||= {}
bad_libs[current_library][name] ||= {}
if bad_libs[current_library][name].key?(linked)
bad_libs[current_library][name][linked] += 1
else
bad_libs[current_library][name][linked] = 1
end
else
good_libs[current_library] = true
log.debug(log_key) { " -> PASSED: #{name} is either whitelisted or safely provided." }
end
[bad_libs, good_libs]
end
end
end
|
module Onesky
module Rails
VERSION = "1.3.0"
end
end
bump version
module Onesky
module Rails
VERSION = "1.3.1"
end
end
|
module Onesky
module Rails
VERSION = "1.1.0"
end
end
bump version to 1.2.0
module Onesky
module Rails
VERSION = "1.2.0"
end
end
|
require "openxml/package"
require "open-uri"
module OpenXml
module Docx
class Package < OpenXml::Package
attr_reader :document,
:settings,
:headers,
:footers,
:styles,
:fonts,
:numbering,
:image_names
content_types do
default "xml", TYPE_XML
default "odttf", TYPE_OBSCURED_FONT
default "jpeg", TYPE_IMAGE[:jpeg]
default "png", TYPE_IMAGE[:png]
default "gif", TYPE_IMAGE[:gif]
default "bmp", TYPE_IMAGE[:bmp]
default "tiff", TYPE_IMAGE[:tiff]
override "/word/styles.xml", TYPE_STYLES
override "/word/settings.xml", TYPE_SETTINGS
override "/word/fontTable.xml", TYPE_FONT_TABLE
override "/word/numbering.xml", TYPE_NUMBERING
end
def initialize
super
rels.add_relationship REL_DOCUMENT, "/word/document.xml"
@settings = OpenXml::Docx::Parts::Settings.new
@styles = OpenXml::Docx::Parts::Styles.new
@fonts = OpenXml::Docx::Parts::Fonts.new
@numbering = OpenXml::Docx::Parts::Numbering.new
@document = OpenXml::Docx::Parts::Document.new
@headers = []
@footers = []
@image_names = []
document.relationships.add_relationship REL_STYLES, "styles.xml"
document.relationships.add_relationship REL_SETTINGS, "settings.xml"
document.relationships.add_relationship REL_FONT_TABLE, "fontTable.xml"
document.relationships.add_relationship REL_NUMBERING, "numbering.xml"
add_part "word/_rels/document.xml.rels", document.relationships
add_part "word/_rels/fontTable.xml.rels", fonts.relationships
add_part "word/document.xml", document
add_part "word/settings.xml", settings
add_part "word/styles.xml", styles
add_part "word/fontTable.xml", fonts
add_part "word/numbering.xml", numbering
end
def embed_truetype_font(path: nil, name: nil)
open(path, "rb") do |source_font|
obfuscation_data = obfuscate_font source_font
data = obfuscation_data[:bytes] << source_font.read
destination_font_name = "font#{fonts.fonts.count + 1}.odttf"
add_part "word/fonts/#{destination_font_name}", OpenXml::Parts::UnparsedPart.new(data)
font_relationship = fonts.relationships.add_relationship REL_FONT, "fonts/#{destination_font_name}"
font_description = OpenXml::Docx::Elements::Font.new
font_description.font_name = name
embed_tag = OpenXml::Docx::Elements::EmbedRegular.new
embed_tag.font_key = "{#{obfuscation_data[:key]}}"
embed_tag.relationship_id = font_relationship.id
font_description << embed_tag
fonts << font_description
end
end
def embed_image(path: nil, content_type: nil, into_part: nil)
return if path.nil?
extension_match = path.match(/\.(?<extension>[^\.]+?)(?:\?.+)?$/)
content_type ||= extension_match[:extension] if extension_match
return if content_type.nil?
open(path, "rb") do |source_image|
embed_image_data(data: source_image.read, content_type: content_type, into_part: into_part)
end
end
def embed_image_data(data: nil, content_type: nil, into_part: nil)
return if data.nil? || content_type.nil?
into_part = document unless into_part.respond_to?(:relationships)
content_type = "jpeg" if content_type == "jpg"
content_type = content_type.to_sym
destination_image_name = "image#{image_names.count + 1}.#{content_type}"
add_part "word/media/#{destination_image_name}", OpenXml::Parts::UnparsedPart.new(data)
image_names << destination_image_name
image_relationship = into_part.relationships.add_relationship REL_IMAGE, "media/#{destination_image_name}"
image_relationship.id
end
def add_header(header)
headers << header
header_name = "header#{headers.count}.xml"
Package.content_types { override "/word/#{header_name}", TYPE_HEADER }
add_part "word/#{header_name}", header
add_part "word/_rels/#{header_name}.rels", header.relationships
relationship = document.relationships.add_relationship REL_HEADER, header_name
relationship.id
end
def add_footer(footer)
footers << footer
footer_name = "footer#{footers.count}.xml"
Package.content_types { override "/word/#{footer_name}", TYPE_FOOTER }
add_part "word/#{footer_name}", footer
add_part "word/_rels/#{footer_name}.rels", footer.relationships
relationship = document.relationships.add_relationship REL_FOOTER, footer_name
relationship.id
end
private
def obfuscate_font(font)
# From the OpenXml spec, section 17.8.1, the algorithm for obfuscating a font:
# - Generate a GUID, which is used and stored as the obfuscation key
# - Reverse the order of the bytes in the GUID (i.e. Big Endian ordering)
# - XOR the value with the first 32 bytes of the binary: once against 0-15, once against 16-31
# - Store the resulting file in the document, and store the obfuscation key in the fontKey attribute
key = SecureRandom::uuid.upcase # Spec requires hex characters be uppercase
raw_key = key.gsub("-", "")
big_endian_key = [raw_key].pack("H*").bytes.reverse
obfuscated_bytes = []
2.times do
bytes = font.read(16).bytes
(0...16).each do |index|
obfuscated_bytes << (bytes[index] ^ big_endian_key[index])
end
end
{ key: key, bytes: obfuscated_bytes.pack("C*") }
end
end
end
end
Add header/footer mime type overrides to existing ContentTypes
Fixes #73
require "openxml/package"
require "open-uri"
module OpenXml
module Docx
class Package < OpenXml::Package
attr_reader :document,
:settings,
:headers,
:footers,
:styles,
:fonts,
:numbering,
:image_names
content_types do
default "xml", TYPE_XML
default "odttf", TYPE_OBSCURED_FONT
default "jpeg", TYPE_IMAGE[:jpeg]
default "png", TYPE_IMAGE[:png]
default "gif", TYPE_IMAGE[:gif]
default "bmp", TYPE_IMAGE[:bmp]
default "tiff", TYPE_IMAGE[:tiff]
override "/word/styles.xml", TYPE_STYLES
override "/word/settings.xml", TYPE_SETTINGS
override "/word/fontTable.xml", TYPE_FONT_TABLE
override "/word/numbering.xml", TYPE_NUMBERING
end
def initialize
super
rels.add_relationship REL_DOCUMENT, "/word/document.xml"
@settings = OpenXml::Docx::Parts::Settings.new
@styles = OpenXml::Docx::Parts::Styles.new
@fonts = OpenXml::Docx::Parts::Fonts.new
@numbering = OpenXml::Docx::Parts::Numbering.new
@document = OpenXml::Docx::Parts::Document.new
@headers = []
@footers = []
@image_names = []
document.relationships.add_relationship REL_STYLES, "styles.xml"
document.relationships.add_relationship REL_SETTINGS, "settings.xml"
document.relationships.add_relationship REL_FONT_TABLE, "fontTable.xml"
document.relationships.add_relationship REL_NUMBERING, "numbering.xml"
add_part "word/_rels/document.xml.rels", document.relationships
add_part "word/_rels/fontTable.xml.rels", fonts.relationships
add_part "word/document.xml", document
add_part "word/settings.xml", settings
add_part "word/styles.xml", styles
add_part "word/fontTable.xml", fonts
add_part "word/numbering.xml", numbering
end
def embed_truetype_font(path: nil, name: nil)
open(path, "rb") do |source_font|
obfuscation_data = obfuscate_font source_font
data = obfuscation_data[:bytes] << source_font.read
destination_font_name = "font#{fonts.fonts.count + 1}.odttf"
add_part "word/fonts/#{destination_font_name}", OpenXml::Parts::UnparsedPart.new(data)
font_relationship = fonts.relationships.add_relationship REL_FONT, "fonts/#{destination_font_name}"
font_description = OpenXml::Docx::Elements::Font.new
font_description.font_name = name
embed_tag = OpenXml::Docx::Elements::EmbedRegular.new
embed_tag.font_key = "{#{obfuscation_data[:key]}}"
embed_tag.relationship_id = font_relationship.id
font_description << embed_tag
fonts << font_description
end
end
def embed_image(path: nil, content_type: nil, into_part: nil)
return if path.nil?
extension_match = path.match(/\.(?<extension>[^\.]+?)(?:\?.+)?$/)
content_type ||= extension_match[:extension] if extension_match
return if content_type.nil?
open(path, "rb") do |source_image|
embed_image_data(data: source_image.read, content_type: content_type, into_part: into_part)
end
end
def embed_image_data(data: nil, content_type: nil, into_part: nil)
return if data.nil? || content_type.nil?
into_part = document unless into_part.respond_to?(:relationships)
content_type = "jpeg" if content_type == "jpg"
content_type = content_type.to_sym
destination_image_name = "image#{image_names.count + 1}.#{content_type}"
add_part "word/media/#{destination_image_name}", OpenXml::Parts::UnparsedPart.new(data)
image_names << destination_image_name
image_relationship = into_part.relationships.add_relationship REL_IMAGE, "media/#{destination_image_name}"
image_relationship.id
end
def add_header(header)
headers << header
header_name = "header#{headers.count}.xml"
content_types.add_override "/word/#{header_name}", TYPE_HEADER
add_part "word/#{header_name}", header
add_part "word/_rels/#{header_name}.rels", header.relationships
relationship = document.relationships.add_relationship REL_HEADER, header_name
relationship.id
end
def add_footer(footer)
footers << footer
footer_name = "footer#{footers.count}.xml"
content_types.add_override "/word/#{footer_name}", TYPE_FOOTER
add_part "word/#{footer_name}", footer
add_part "word/_rels/#{footer_name}.rels", footer.relationships
relationship = document.relationships.add_relationship REL_FOOTER, footer_name
relationship.id
end
private
def obfuscate_font(font)
# From the OpenXml spec, section 17.8.1, the algorithm for obfuscating a font:
# - Generate a GUID, which is used and stored as the obfuscation key
# - Reverse the order of the bytes in the GUID (i.e. Big Endian ordering)
# - XOR the value with the first 32 bytes of the binary: once against 0-15, once against 16-31
# - Store the resulting file in the document, and store the obfuscation key in the fontKey attribute
key = SecureRandom::uuid.upcase # Spec requires hex characters be uppercase
raw_key = key.gsub("-", "")
big_endian_key = [raw_key].pack("H*").bytes.reverse
obfuscated_bytes = []
2.times do
bytes = font.read(16).bytes
(0...16).each do |index|
obfuscated_bytes << (bytes[index] ^ big_endian_key[index])
end
end
{ key: key, bytes: obfuscated_bytes.pack("C*") }
end
end
end
end
|
module OrdinaryCms
VERSION = "0.0.1"
end
version change
module OrdinaryCms
VERSION = "0.1.1"
end
|
require 'json'
module Origen
module Registers
# The register class can be used to represent not only h/ware resgisters,
# but really any entity which has an address and data component, such as a specific RAM location.<br>
# Any registers instantiated through Origen::Registers#add_reg are instances of this class.
#
# All methods in BitCollection can also be called on a Reg object.
class Reg
include Origen::SubBlocks::Path
include Origen::SubBlocks::Domains
# These attributes can be defined on a register at definition time and will get applied
# to all of its contained bits unless a specific bit has its own definition of the same
# attribute
REG_LEVEL_ATTRIBUTES = {
_feature: {},
_reset: { aliases: [:res] },
_memory: {},
_path: { aliases: [:hdl_path] },
_abs_path: { aliases: [:absolute_path] },
_access: {},
_bit_order: {}
}
# Returns the object that own the register.
# ==== Example
# $soc.reg(:blah).owner # Returns the $soc object
attr_reader :owner
alias_method :parent, :owner
# The base address of the register, this will be set dynamically
# by Origen based on the parent's base address
attr_accessor :base_address
attr_writer :address # :nodoc:
# Returns an integer representing the number of bits in the register
attr_reader :size
# The register name
attr_accessor :name
# Any feature associated with the register
attr_accessor :feature
attr_accessor :grows_backwards # :nodoc:
attr_accessor :lookup # :nodoc:
# Returns a full path to the file in which the register was defined
attr_reader :define_file
# Returns any application-specific meta-data attatched to the given register
attr_accessor :meta
alias_method :meta_data, :meta
alias_method :metadata, :meta
# If the given register's reset data is backed by memory, the memory address can
# be recorded in this attribute
attr_accessor :memory
# Normally shouldn't be called directly, instantiate through add_reg
# Upon initialization bits are stored as follows:
# @bits -
# An array of bit objects in position order, @bits[5] corresponds
# to the bit as position r
# @lookup -
# A Hash lookup table for quickly accessing bit objects by name
# @lookup = { :bit_or_bus_name => {:pos => 3, :bits => 4} }
def initialize(owner, address, size, name, options = {}) # :nodoc:
@owner = owner
@address = address
@size = size
@bits = []
@lookup = {}
@name = name
@init_as_writable = options.delete(:init_as_writable)
@define_file = options.delete(:define_file)
@from_placeholder = options.delete(:from_placeholder) || false
REG_LEVEL_ATTRIBUTES.each do |attribute, _meta|
if @from_placeholder
instance_variable_set("@#{attribute[1..-1]}", options.delete(attribute))
else
# If register creation is coming directly from Reg.new, instead of Placeholder,
# it may not have attributes with '_' prefix
instance_variable_set("@#{attribute[1..-1]}", options.delete(attribute[1..-1].to_sym))
end
end
@description_from_api = {}
description = options.delete(:description)
if description
@description_from_api[:_reg] = description.split(/\r?\n/)
end
@meta = default_reg_metadata.merge(options.delete(:meta) || {})
# Initialize with unwritable bits that read back as zero, can override this
# to make all writable by default by setting the :init_writable option to true
@size.times do |n|
@bits << Bit.new(self, n, writable: @init_as_writable, undefined: true)
end
add_bits_from_options(options)
end
# Returns the bit order attribute of the register (either :msb0 or :lsb0). If
# not explicitly defined on this register it will be inherited from the parent
# and will default to :lsb0 at the top-level
def bit_order
@bit_order ||= parent.respond_to?(:bit_order) ? parent.bit_order : :lsb0
end
def freeze
bits.each(&:freeze)
# Call any methods which cache results to generate the instance variables
# before they are frozen
address
super
end
def bind(bitname, live_parameter)
unless live_parameter.respond_to?(:is_a_live_parameter?) && live_parameter.is_a_live_parameter?
fail 'Only live updating parameters should be bound, make sure you have not missed .live in the path to the parameter!'
end
@parameter_bound_bits ||= {}
@parameter_bound_bits[bitname] = live_parameter
end
def has_parameter_bound_bits?
@parameter_bound_bits && !@parameter_bound_bits.empty?
end
def update_bound_bits
@updating_bound_bits = true
@parameter_bound_bits.each do |name, val|
bits(name).write(val)
end
@updating_bound_bits = false
end
def updating_bound_bits?
@updating_bound_bits
end
def inspect(options = {})
# This fancy_output option is passed in via option hash
# Even better, the output could auto-detect 7-bit vs 8-bit terminal output and adjust the parameter, but that's for another day
fancy_output = options[:fancy_output].nil? ? true : options[:fancy_output]
if fancy_output
horiz_double_line = '═'
horiz_double_tee_down = '╤'
horiz_double_tee_up = '╧'
corner_double_up_left = '╒'
corner_double_up_right = '╕'
horiz_single_line = '─'
horiz_single_tee_down = '┬'
horiz_single_tee_up = '┴'
horiz_single_cross = '┼'
horiz_double_cross = '╪'
corner_single_down_left = '└'
corner_single_down_right = '┘'
vert_single_line = '│'
vert_single_tee_left = '┤'
vert_single_tee_right = '├'
else
horiz_double_line = '='
horiz_double_tee_down = '='
horiz_double_tee_up = '='
corner_double_up_left = '.'
corner_double_up_right = '.'
horiz_single_line = '-'
horiz_single_tee_down = '-'
horiz_single_tee_up = '-'
horiz_single_cross = '+'
horiz_double_cross = '='
corner_single_down_left = '`'
corner_single_down_right = '\''
vert_single_line = '|'
vert_single_tee_left = '<'
vert_single_tee_right = '>'
end
bit_width = 13
desc = ["\n0x%X - :#{name}" % address]
r = size % 8
if r == 0 || (size > 8 && bit_order == :msb0)
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * 8)).chop + corner_double_up_right
else
if bit_order == :lsb0
desc << (' ' + (' ' * (bit_width + 1) * (8 - r)) + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * r)).chop + corner_double_up_right
else
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * r)).chop + corner_double_up_right
end
end
# "<#{self.class}: #{self.name}>"
num_bytes = (size / 8.0).ceil
num_bytes.times do |byte_index|
# Need to add support for little endian regs here?
byte_number = num_bytes - byte_index
if bit_order == :lsb0
max_bit = (byte_number * 8) - 1
min_bit = max_bit - 8 + 1
else
min_bit = (byte_index * 8)
max_bit = min_bit + 7
end
# BIT INDEX ROW
line = ' '
line_complete = false
8.times do |i|
if bit_order == :lsb0
bit_num = (byte_number * 8) - i - 1
else
bit_num = (byte_index * 8) + i
end
if bit_num > size - 1
if bit_order == :msb0 && bit_num == size
line += vert_single_line
line_complete = true
else
line << ' ' + ''.center(bit_width) unless line_complete
end
else
line << vert_single_line + "#{bit_num}".center(bit_width)
end
end
line += vert_single_line unless line_complete
desc << line
# BIT NAME ROW
line = ' '
first_done = false
line_complete = false
named_bits include_spacers: true do |name, bit, bitcounter|
if _bit_in_range?(bit, max_bit, min_bit)
if bit_order == :lsb0
if max_bit > (size - 1) && !first_done
(max_bit - (size - 1)).times do
line << ' ' * (bit_width + 1)
end
end
end
if bit.size > 1
if name
if bitcounter.nil?
if bit_order == :lsb0
bit_name = "#{name}[#{_max_bit_in_range(bit, max_bit, min_bit)}:#{_min_bit_in_range(bit, max_bit, min_bit)}]"
else
bit_name = "#{name}[#{_min_bit_in_range(bit, max_bit, min_bit)}:#{_max_bit_in_range(bit, max_bit, min_bit)}]"
end
bit_span = _num_bits_in_range(bit, max_bit, min_bit)
else
upper = _max_bit_in_range(bit, max_bit, min_bit) + bitcounter - bit.size
lower = _min_bit_in_range(bit, max_bit, min_bit) + bitcounter - bit.size
if bit_order == :lsb0
bit_name = "#{name}[#{upper}:#{lower}]"
else
bit_name = "#{name}[#{upper}:#{lower}]"
end
bit_span = upper - lower + 1
end
width = (bit_width * bit_span) + bit_span - 1
if bit_name.length > width
line << vert_single_line + "#{bit_name[0..width - 2]}*"
else
line << vert_single_line + bit_name.center(width)
end
else
bit.shift_out_left do |bit|
if _index_in_range?(bit.position, max_bit, min_bit)
line << vert_single_line + ''.center(bit_width)
end
end
end
else
if name
bit_name = "#{name}"
if bit_name.length > bit_width
txt = "#{bit_name[0..bit_width - 2]}*"
else
txt = bit_name
end
else
txt = ''
end
line << vert_single_line + txt.center(bit_width)
end
end
first_done = true
end
line += vert_single_line
desc << line
# BIT STATE ROW
line = ' '
first_done = false
named_bits include_spacers: true do |name, bit, _bitcounter|
if _bit_in_range?(bit, max_bit, min_bit)
if bit_order == :lsb0
if max_bit > (size - 1) && !first_done
(max_bit - (size - 1)).times do
line << ' ' * (bit_width + 1)
end
end
end
if bit.size > 1
if name
if bit.has_known_value?
value = '0x%X' % bit.val[_max_bit_in_range(bit, max_bit, min_bit).._min_bit_in_range(bit, max_bit, min_bit)]
else
if bit.reset_val == :undefined
value = 'X'
else
value = 'M'
end
end
value += _state_desc(bit)
bit_span = _num_bits_in_range(bit, max_bit, min_bit)
width = bit_width * bit_span
line << vert_single_line + value.center(width + bit_span - 1)
else
bit.shift_out_left do |bit|
if _index_in_range?(bit.position, max_bit, min_bit)
line << vert_single_line + ''.center(bit_width)
end
end
end
else
if name
if bit.has_known_value?
val = bit.val
else
if bit.reset_val == :undefined
val = 'X'
else
val = 'M'
end
end
value = "#{val}" + _state_desc(bit)
line << vert_single_line + value.center(bit_width)
else
line << vert_single_line + ''.center(bit_width)
end
end
end
first_done = true
end
line += vert_single_line
desc << line
if size >= 8
r = size % 8
if byte_index == 0 && r != 0 && bit_order == :lsb0
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * (8 - r)).chop + horiz_double_cross + (horiz_single_line * (bit_width + 1) * r)).chop + vert_single_tee_left
elsif (byte_index == num_bytes - 1) && r != 0 && bit_order == :msb0
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * r)).chop + corner_single_down_right
elsif (byte_index == num_bytes - 2) && r != 0 && bit_order == :msb0
desc << ' ' + vert_single_tee_right + ((horiz_single_line * bit_width + horiz_single_cross) * r) + ((horiz_single_line * bit_width + horiz_single_tee_up) * (8 - r)).chop + corner_single_down_right
else
if byte_index == num_bytes - 1
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * 8)).chop + corner_single_down_right
else
desc << (' ' + vert_single_tee_right + ((horiz_single_line * bit_width + horiz_single_cross) * 8)).chop + vert_single_tee_left
end
end
else
if bit_order == :lsb0
desc << (' ' + (' ' * (bit_width + 1) * (8 - size)) + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * size)).chop + corner_single_down_right
else
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * size)).chop + corner_single_down_right
end
end
end
desc.join("\n")
end
# Returns a hash containing all register descriptions that have been parsed so far.
#
# @api private
def description_lookup
@@description_lookup ||= {}
end
# Returns any application specific metadata that has been inherited by the
# given register.
# This does not account for any overridding that may have been applied to
# this register specifically however, use the meta method to get that.
def default_reg_metadata
Origen::Registers.default_reg_metadata.merge(
Origen::Registers.reg_metadata[owner.class] || {})
end
def bit_value_descriptions(bitname, options = {})
options = {
format: :binary
}.merge(options)
base = case options[:format]
when :bin, :binary
2
when :hex, :hexadecimal
16
when :dec, :decimal
10
else
fail "Unknown integer format: #{options[:format]}"
end
desc = {}
description(bitname).each do |line|
if line =~ /^\s*(\d+)\s+\|\s+(.+)/
desc[Regexp.last_match[1].to_i(base)] = Regexp.last_match[2]
end
end
desc
end
# Returns the full name of the register when this has been specified in the register
# description like this:
#
# # ** This is the Register Full Name **
# # This register blah blah
#
# This method will also be called by bit collections to look up the name when
# defined in a similar manner in the bit description.
#
# If no name has been specified this will return nil.
def full_name(bitname = :_reg, _options = {})
bitname, options = :_reg, bitname if bitname.is_a?(Hash)
desc = description(bitname).first
# Capture something like this:
# ** This is the full name ** - This bit blah blah
if desc && desc =~ /\s*\*\*\s*([^\*.]*)\s*\*\*/
Regexp.last_match[1].strip
end
end
# Escapes brackets and parenthesis. Helper for description method.
def escape_special_char(str)
str.gsub('[', '\[').gsub(']', '\]').gsub('(', '\(').gsub(')', '\)') if str
end
# Returns the description of this register if any, if none then an empty array
# will be returned
#
# **Note** Adding a description field will override any comment-driven documentation
# of a register (ie markdown style comments)
def description(bitname = :_reg, options = {})
bitname, options = :_reg, bitname if bitname.is_a?(Hash)
options = {
include_name: true,
include_bit_values: true
}.merge(options)
if @description_from_api[bitname]
desc = @description_from_api[bitname]
else
parse_descriptions unless description_lookup[define_file]
begin
desc = description_lookup[define_file][name][bitname] || []
rescue
desc = []
end
end
desc = desc.reject do |line|
if bitname != :_reg
unless options[:include_bit_values]
!!(line =~ /^\s*(\d+)\s+\|\s+(.+)/)
end
else
false
end
end
if desc.first
unless options[:include_name]
desc[0] = desc.first.sub(/\s*\*\*\s*#{escape_special_char(full_name(bitname))}\s*\*\*\s*-?\s*/, '')
end
end
desc.shift while desc.first && desc.first.strip.empty?
desc.pop while desc.last && desc.last.strip.empty?
desc
end
alias_method :descriptions, :description
# @api private
def parse_descriptions
desc = []
File.readlines(define_file).each do |line|
if line =~ /^\s*#(.*)/
desc << Regexp.last_match[1].strip
# http://rubular.com/r/D8lg2P5kK1 http://rubular.com/r/XP4ydPV8Fd
elsif line =~ /^\s*reg\(?\s*[:"'](\w+)["']?\s*,.*\sdo/ || line =~ /^\s*.*add_reg\(?\s*[:"'](\w+)["']?\s*,.*/
@current_reg_name = Regexp.last_match[1].to_sym
description_lookup[define_file] ||= {}
description_lookup[define_file][@current_reg_name] ||= {}
description_lookup[define_file][@current_reg_name][:_reg] = desc.dup
desc = []
# http://www.rubular.com/r/7FidbC1JRA
elsif @current_reg_name && line =~ /^\s*(add_bit|bit|reg\.bit)s?\(?\s*\d+\.?\.?\d*\s*,\s*:(\w+)/
description_lookup[define_file][@current_reg_name][Regexp.last_match[2].to_sym] = desc.dup
desc = []
else
desc = []
end
end
end
def contains_bits?
true
end
# @api private
def add_bits_from_options(options = {}) # :nodoc:
# options is now an array for split bit groups or a hash if single bit/range bits
# Now add the requested bits to the register, removing the unwritable bits as required
options.each do |bit_id, bit_params|
puts bit_id
puts bit_params
if bit_params.is_a? Hash
description = bit_params.delete(:description)
if description
@description_from_api[bit_id] = description.split(/\r?\n/)
end
bind(bit_id, bit_params.delete(:bind)) if bit_params[:bind]
position = bit_params[:pos] || 0
num_bits = bit_params[:bits] || 1
if @_reset
if @_reset.is_a?(Symbol)
bit_params[:res] = @_reset
else
bit_params[:res] = @_reset[(num_bits + position - 1), position]
end
end
bit_params[:access] = @_access if bit_params[:access].nil?
bit_params[:res] = bit_params[:data] if bit_params[:data]
bit_params[:res] = bit_params[:reset] if bit_params[:reset]
if num_bits == 1
add_bit(bit_id, position, bit_params) # and add the new one
else
add_bus(bit_id, position, num_bits, bit_params)
end
elsif bit_params.is_a? Array
description = bit_params.map { |h| h.delete(:description) }.compact.join("\n")
unless description.empty?
@description_from_api[bit_id] = description.split(/\r?\n/)
end
add_bus_scramble(bit_id, bit_params)
end
end
self
end
# This method is called whenever reg.clone is called to make a copy of a given register.
# Ruby will correctly copy all instance variables but it will not drill down to copy nested
# attributes, like the bits contained in @bits.
# This function will therefore correctly clone all bits contained in the register also.
def initialize_copy(orig) # :nodoc:
@bits = []
orig.bits.each do |bit|
@bits << bit.clone
end
@lookup = orig.lookup.clone
self
end
# Returns a dummy register object that can be used on the fly, this can sometimes
# be useful to configure an intricate read operation.
# ==== Example
# # Read bit 5 of RAM address 0xFFFF1280
# dummy = Reg.dummy # Create a dummy reg to configure the read operation
# dummy.address = 0xFFFF1280 # Set the address
# dummy.bit(5).read!(1) # Read bit 5 expecting a 1
def self.dummy(size = 16)
Reg.new(self, 0, size, :dummy, init_as_writable: true)
end
# Returns each named bit collection contained in the register,
def named_bits(options = {})
options = {
include_spacers: false
}.merge(options)
result = []
# test if @lookup has any values stored as an array
# if so it means there is a split group of bits
# process that differently to a single bit or continuous range of bits
# which are typically stored in a hash
split_bits = false
@lookup.each { |_k, v| split_bits = true if v.is_a? Array }
if split_bits == false
if bit_order == :lsb0
current_pos = size
else
current_pos = 0
end
# Sort by position
@lookup.sort_by { |_name, details| bit_order == :lsb0 ? -details[:pos] : details[:pos] }.each do |name, details|
if bit_order == :lsb0
pos = details[:bits] + details[:pos]
else
pos = details[:pos]
end
if options[:include_spacers] && (pos != current_pos)
if bit_order == :lsb0
collection = BitCollection.dummy(self, nil, size: current_pos - pos, pos: pos)
else
collection = BitCollection.dummy(self, nil, size: pos - current_pos, pos: current_pos)
end
unless collection.size == 0
if block_given?
yield nil, collection
else
result << [nil, collection]
end
end
end
collection = BitCollection.new(self, name)
details[:bits].times do |i|
collection << @bits[details[:pos] + i]
end
unless collection.size == 0
if block_given?
yield name, collection
else
result << [name, collection]
end
end
if bit_order == :lsb0
current_pos = details[:pos]
else
current_pos = details[:bits] + details[:pos]
end
end
if options[:include_spacers] && ((bit_order == :lsb0 && current_pos != 0) ||
bit_order == :msb0 && current_pos != size)
if bit_order == :lsb0
collection = BitCollection.dummy(self, nil, size: current_pos, pos: 0)
else
collection = BitCollection.dummy(self, nil, size: size - current_pos, pos: current_pos)
end
unless collection.size == 0
if block_given?
yield nil, collection
else
result << [nil, collection]
end
end
end
elsif split_bits == true # if there are split bits, need to convert all register bit values to array elements to allow sorting
# if the register has bits split up across it, then store the bits in order of decreasing reg position
# but first, stuff all the bits in a simple array, as single bits, or ranges of bits
@lookup_splits = []
@lookup.each do |k, v|
tempbit = {}
bitcounter = {}
if v.is_a? Hash
# then this is already a single bit or a continuous range so just stuff it into the array
tempbit[k] = v
@lookup_splits << tempbit.clone
elsif v.is_a? Array
# if the bitgroup is split, then decompose into single bits and continuous ranges
v.each_with_index do |bitdetail, _i|
if bitcounter.key?(k)
bitcounter[k] = bitcounter[k] + bitdetail[:bits]
else
bitcounter[k] = bitdetail[:bits]
end
tempbit[k] = bitdetail
@lookup_splits << tempbit.clone
end
end
if v.is_a? Array
@lookup_splits.each_with_index do |_e, q|
groupname = @lookup_splits[q].to_a[0][0]
if groupname == k
@lookup_splits[q][groupname][:bitgrouppos] = bitcounter[groupname] if groupname == k
bitcounter[groupname] = bitcounter[groupname] - @lookup_splits[q][groupname][:bits]
end
end
end
end
# Now sort the array in descending order
# Does adding the bitgrouppos need to happen after the sort ?
@lookup_splits = @lookup_splits.sort do |a, b|
b.to_a[0][1][:pos] <=> a.to_a[0][1][:pos]
end
current_pos = size
countbits = {} # if countbits.method == nil
@master = {}
bitgroup = {}
bitinfo = {}
info = {}
@lookup_splits.each_with_index do |hash, _i|
name = hash.to_a[0][0]
details = hash.to_a[0][1]
bitcounter = hash.to_a[0][1][:bitgrouppos]
pos = details[:bits] + details[:pos]
if options[:include_spacers] && (pos != current_pos)
collection = BitCollection.dummy(self, nil, size: current_pos - pos, pos: pos)
unless collection.size == 0
if block_given?
yield nil, collection, bitcounter
else
result << [nil, collection, bitcounter]
end
end
end
collection = BitCollection.new(self, name)
details[:bits].times do |i|
collection << @bits[details[:pos] + i]
end
unless collection.size == 0
if block_given?
yield name, collection, bitcounter
else
result << [name, collection, bitcounter]
end
end
current_pos = details[:pos]
end
if options[:include_spacers] && current_pos != 0
collection = BitCollection.dummy(self, nil, size: current_pos, pos: 0)
unless collection.size == 0
if block_given?
yield nil, collection, bitcounter
else
result << [nil, collection, bitcounter]
end
end
end
end
unless block_given?
result
end
end
# Returns each named bit collection contained in self
def reverse_named_bits(_options = {})
bits = []
named_bits { |name, bit| bits << [name, bit] }
bits.each do |bit|
yield bit[0], bit[1]
end
end
# Returns an array of occupied bit positions
# ==== Example
# reg :fstat, @base + 0x0000, :size => 8 do
# bit 7, :ccif
# bit 6, :rdcolerr
# bit 5, :accerr
# bit 4, :pviol
# bit 0, :mgstat0
# end
# regs(:fstat).used_bits
# => [0, 4, 5, 6, 7]
#
# ==== Example
# reg :aguahb2, @base + 0x2A, :size => 8 do
# bit 5..2, :m0b_hbstrb, :reset => 0x0
# bit 1..0, :m0b_htrans, :reset => 0x2
# end
# regs(:aguahb2).used_bits
# => [0, 1, 2, 3, 4, 5]
def used_bits(_options = {})
used_bits = []
named_bits do |_name, bit|
used_bits << bit.position if bit.size == 1
if bit.size > 1
used_bits << ((bit.position)..(bit.position + bit.size - 1)).to_a
end
end
used_bits.flatten!
used_bits.sort!
used_bits
end
# Returns true if any named_bits exist, false if used_bits is an empty array
def used_bits?(_options = {})
used_bits.size > 0
end
# Returns an array of unoccupied bit positions
# ==== Example
# reg :fstat, @base + 0x0000, :size => 8 do
# bit 7, :ccif
# bit 6, :rdcolerr
# bit 5, :accerr
# bit 4, :pviol
# bit 0, :mgstat0
# end
# regs(:fstat).empty_bits
# => [1, 2, 3]
#
# ==== Example
# reg :aguahb2, @base + 0x2A, :size => 8 do
# bit 5..2, :m0b_hbstrb, :reset => 0x0
# bit 1..0, :m0b_htrans, :reset => 0x2
# end
# regs(:aguahb2).empty_bits
# => [6, 7]
def empty_bits(_options = {})
array_span = (0..(size - 1)).to_a
empty_bits = array_span - used_bits
empty_bits
end
# Returns true if any named_bits exist, false if used_bits is an empty array
def empty_bits?(_options = {})
empty_bits.size > 0
end
# Proxies requests from bit collections to the register owner
def request(operation, options = {}) # :nodoc:
if operation == :read_register
object = reader
(Origen.top_level || owner).read_register_missing!(self) unless object
else
object = writer
(Origen.top_level || owner).write_register_missing!(self) unless object
end
object.send(operation, self, options)
self
end
# Returns the object that will be responsible for writing the given register
def writer
@writer ||= lookup_operation_handler(:write_register)
end
# Returns the object that will be responsible for reading the given register
def reader
@reader ||= lookup_operation_handler(:read_register)
end
# @api private
def lookup_operation_handler(operation)
# Could have made the controller be the owner when assigned above, but may have run
# into problems with the reg meta data stuff
reg_owner = owner.respond_to?(:controller) && owner.controller ? owner.controller : owner
if reg_owner.respond_to?(operation)
reg_owner
elsif reg_owner.respond_to?(:owner) && reg_owner.owner.respond_to?(operation)
reg_owner.owner
elsif Origen.top_level && Origen.top_level.respond_to?(operation)
Origen.top_level
end
end
# Returns the relative address of the given register, equivalent to calling
# reg.address(:relative => true)
def offset
address(relative: true)
end
# Returns the register address added to its current base_address value (if any).
#
# @param [Hash] options
# @option options [Boolean] :relative (false) Return the address without adding the base address (if present)
def address(options = {})
options = {
relative: false
}.merge(options)
address = @address
domain_option = options[:domains] || options[:domain]
@domain_option ||= domain_option unless frozen?
# Blow the cache when the domain option changes
@base_address_applied = nil unless @domain_option == domain_option
unless @base_address_applied
# Give highest priority to the original API which allowed the object
# doing register read/write to define a base_address method
if (writer && writer.methods.include?(:base_address) && writer.method(:base_address).arity != 0) ||
(reader && reader.methods.include?(:base_address) && reader.method(:base_address).arity != 0)
# This currently assumes that the base address is always the same
# for reading and writing
if writer && writer.respond_to?(:base_address) && writer.method(:base_address).arity != 0
self.base_address = writer.base_address(self)
elsif reader && reader.respond_to?(:base_address) && reader.method(:base_address).arity != 0
self.base_address = reader.base_address(self)
end
else
o = owner.is_a?(Container) ? owner.owner : owner
d = domain_option || domains
if o && o.reg_base_address(domain: d)
self.base_address = o.reg_base_address(domain: d)
end
end
@base_address_applied = true
end
unless options[:relative]
address += base_address if base_address
end
if options[:address_type]
Origen.deprecate 'Specifying the address_type of a register address will be removed from Origen 3'
case options[:address_type]
when :byte
address = address * 2
when :word
address = address
when :longword
address = address / 2
else
fail 'Unknown address type requested!'
end
end
address
end
alias_method :addr, :address
# Returns true if the register owner matches the given name. A match will be detected
# if the class names of the register's owner contains the given name.
#
# Alternatively if the register owner implements a method called reg_owner_alias
# then the value that this returns instead will also be considered when checking if the given
# name matches. This method can return an array of names if multiple aliases are required.
#
# Aliases can be useful for de-coupling the commonly used name, e.g. "NVM" from the actual
# class name.
#
# @example
# class C90NVM
# include Origen::Registers
#
# def initialize
# add_reg :clkdiv, 0x3, 16, :div => {:bits => 8}
# end
#
# end
#
# reg = C90NVM.new.reg(:clkdiv)
# reg.owned_by?(:ram) # => false
# reg.owned_by?(:nvm) # => true
# reg.owned_by?(:c90nvm) # => true
# reg.owned_by?(:c40nvm) # => false
# reg.owned_by?(:flash) # => false
#
# @example Using an alias
# class C90NVM
# include Origen::Registers
#
# def initialize
# add_reg :clkdiv, 0x3, 16, :div => {:bits => 8}
# end
#
# def reg_owner_alias
# "flash"
# end
#
# end
#
# reg = C90NVM.new.reg(:clkdiv)
# reg.owned_by?(:ram) # => false
# reg.owned_by?(:nvm) # => true
# reg.owned_by?(:c90nvm) # => true
# reg.owned_by?(:c40nvm) # => false
# reg.owned_by?(:flash) # => true
def owned_by?(name)
!!(owner.class.to_s =~ /#{name}/i) || begin
if owner.respond_to?(:reg_owner_alias)
[owner.reg_owner_alias].flatten.any? do |al|
al.to_s =~ /#{name}/i
end
else
false
end
end
end
# Returns true if the register contains a bit(s) matching the given name
# ==== Example
# add_reg :control, 0x55, :status => {:pos => 1}
#
# reg(:control).has_bit?(:result) # => false
# reg(:control).has_bit?(:status) # => true
def has_bit?(name)
@lookup.include?(name)
end
alias_method :has_bits?, :has_bit?
alias_method :has_bit, :has_bit?
alias_method :has_bits, :has_bit?
# Add a bit to the register, should only be called internally
def add_bit(id, position, options = {}) # :nodoc:
options = { data: @bits[position].data, # If undefined preserve any data/reset value that has
res: @bits[position].data, # already been applied at reg level
}.merge(options)
@lookup[id] = { pos: position, bits: 1, feature: options[:feature] }
@bits.delete_at(position) # Remove the initial bit from this position
@bits.insert(position, Bit.new(self, position, options))
self
end
# Add a bus to the register, should only be called internally
def add_bus(id, position, size, options = {}) # :nodoc:
default_data = 0
size.times do |n|
default_data |= @bits[position + n].data << n
end
options = { data: default_data, # If undefined preserve any data/reset value that has
res: default_data, # already been applied at reg level
}.merge(options)
@lookup[id] = { pos: position, bits: size }
size.times do |n|
bit_options = options.dup
bit_options[:data] = options[:data][n]
if options[:res].is_a?(Symbol)
bit_options[:res] = options[:res]
else
bit_options[:res] = options[:res][n]
end
@bits.delete_at(position + n)
@bits.insert(position + n, Bit.new(self, position + n, bit_options))
end
self
end
def add_bus_scramble(id, array_of_hashes = [])
array_of_hashes.each do |options|
bind(id, options.delete(:bind)) if options[:bind]
position = options[:pos] || 0
num_bits = options[:bits] || 1
size = options[:bits]
options[:data] = options[:data] if options[:data]
options[:res] = options[:reset] if options[:reset]
default_data = 0
size.times do |n|
default_data |= @bits[position + n].data << n
end
options = { data: default_data, # If undefined preserve any data/reset value that has
res: default_data, # already been applied at reg level
}.merge(options)
@lookup[id] = [] if @lookup[id].nil?
@lookup[id] = @lookup[id].push(pos: position, bits: size)
size.times do |n|
bit_options = options.dup
bit_options[:data] = options[:data][n]
bit_options[:res] = options[:res][n]
@bits.delete_at(position + n)
@bits.insert(position + n, Bit.new(self, position + n, bit_options))
end
self
end
end
# Delete the bits in the collection from the register
def delete_bit(collection)
[collection.name].flatten.each do |name|
@lookup.delete(name)
end
collection.each do |bit|
@bits.delete_at(bit.position) # Remove the bit
@bits.insert(bit.position, Bit.new(self, bit.position, writable: @init_as_writable))
end
self
end
alias_method :delete_bits, :delete_bit
# @api private
def expand_range(range)
if range.first > range.last
range = Range.new(range.last, range.first)
end
range.each do |i|
yield i
end
end
# Returns the bit object(s) responding to the given name, wrapped in a BitCollection.
# This method also accepts multiple name possibilities, if neither bit exists in
# the register it will raise an error, otherwise it will return the first match.
# If no args passed in, it will return a BitCollection containing all bits.
# If a number is passed in then the bits from those positions are returned.
# ==== Example
# add_reg :control, 0x55, :status => {:pos => 1, :bits => 2},
# :fail => {:pos => 0}
#
# reg(:control).bit(:fail) # => Returns a BitCollection containing the fail bit
# reg(:control).bits(:status) # => Returns a BifCollection containing the status bits
# reg(:control).bit(:bist_fail, :fail) # => Returns a BitCollection containing the fail bit
# reg(:control).bit(0) # => Returns a BitCollection containing the fail bit
# reg(:control).bit(1) # => Returns a BitCollection containing status bit
# reg(:control).bit(1,2) # => Returns a BitCollection containing both status bits
def bit(*args)
multi_bit_names = false
# return get_bits_with_constraint(nil,:default) if args.size == 0
constraint = extract_feature_params(args)
if constraint.nil?
constraint = :default
end
collection = BitCollection.new(self, :unknown)
if args.size == 0
collection.add_name(name)
@bits.each do |bit|
collection << get_bits_with_constraint(bit.position, constraint)
end
else
args.flatten!
args.sort!
args.each do |arg_item|
if arg_item.is_a?(Integer)
b = get_bits_with_constraint(arg_item, constraint)
collection << b if b
elsif arg_item.is_a?(Range)
expand_range(arg_item) do |bit_number|
collection << get_bits_with_constraint(bit_number, constraint)
end
else
multi_bit_names = args.size > 1
# Reaches here if bit name is specified
if @lookup.include?(arg_item)
split_bits = false
@lookup.each { |_k, v| split_bits = true if v.is_a? Array }
coll = get_lookup_feature_bits(arg_item, constraint, split_bits)
if coll
coll.each do |b|
collection.add_name(arg_item)
collection << b
end
end
end
end
end
end
if collection.size == 0
# Originally Origen returned nil when asking for a bit via an index which does not
# exist, e.g. reg[1000] => nil
# The args numeric clause here is to maintain that behavior
if Origen.config.strict_errors && !args.all? { |arg| arg.is_a?(Numeric) }
puts "Register #{@name} does not have a bits(s) named :#{args.join(', :')} or it might not be enabled."
puts 'This could also be a typo, these are the valid bit names:'
puts @lookup.keys
fail 'Missing bits error!'
end
nil
else
if multi_bit_names
collection.sort_by!(&:position)
end
collection
end
end
alias_method :bits, :bit
alias_method :[], :bit
def get_bits_with_constraint(number, params)
return nil unless @bits[number]
if (params == :default || !params) && @bits[number].enabled?
@bits[number]
elsif params == :none && !@bits[number].has_feature_constraint?
@bits[number]
elsif params == :all
@bits[number]
elsif params.class == Array
params.each do |param|
unless @bits[number].enabled_by_feature?(param)
return nil
end
@bits[number]
end
elsif @bits[number].enabled_by_feature?(params)
@bits[number]
else
return Bit.new(self, number, writable: false)
end
end
def get_lookup_feature_bits(bit_name, params, split_group_reg)
##
if split_group_reg == false # if this register has single bits and continuous ranges
if @lookup.include?(bit_name)
collection = BitCollection.new(self, bit_name)
(@lookup[bit_name][:bits]).times do |i|
collection << @bits[@lookup[bit_name][:pos] + i]
end
if !params || params == :default
if collection.enabled?
return collection
end
elsif params == :none
unless collection.has_feature_constraint?
return collection
end
elsif params == :all
return collection
elsif params.class == Array
if params.all? { |param| collection.enabled_by_feature?(param) }
return collection
end
else
if collection.enabled_by_feature?(params)
return collection
end
end
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name][:pos])
else
return []
end
elsif split_group_reg == true # if this registers has split bits in its range
if @lookup.is_a?(Hash) # && @lookup.include?(bit_name)
collection = false
@lookup.each do |k, v| # k is the bitname, v is the hash of bit data
if k == bit_name
collection ||= BitCollection.new(self, k)
if v.is_a?(Array)
v.reverse_each do |pb| # loop each piece of bit group data
(pb[:bits]).times do |i|
collection << @bits[pb[:pos] + i]
end
end
else
(v[:bits]).times do |i|
collection << @bits[v[:pos] + i]
end
end
end
end
if !params || params == :default
if collection.enabled?
return collection
end
elsif params == :none
unless collection.has_feature_constraint?
return collection
end
elsif params == :all
return collection
elsif params.class == Array
if params.all? { |param| collection.enabled_by_feature?(param) }
return collection
end
else
if collection.enabled_by_feature?(params)
return collection
end
end
if @lookup.is_a?(Hash) && @lookup[bit_name].is_a?(Array)
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name][0][:pos])
else
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name[:pos]])
end
else
return []
end
end
end
def extract_feature_params(args)
index = args.find_index { |arg| arg.class == Hash }
if index
params = args.delete_at(index)
else
params = nil
end
if params
return params[:enabled_features] || params[:enabled_feature]
else
return nil
end
end
# All other Reg methods are delegated to BitCollection
def method_missing(method, *args, &block) # :nodoc:
if method.to_sym == :to_ary || method.to_sym == :to_hash
nil
elsif meta_data_method?(method)
extract_meta_data(method, *args)
else
if BitCollection.instance_methods.include?(method)
to_bit_collection.send(method, *args, &block)
elsif has_bits?(method)
bits(method)
else
super
end
end
end
def to_bit_collection
BitCollection.new(self, name, @bits)
end
# Recognize that Reg responds to all BitCollection methods methods based on
# application-specific meta data properties
def respond_to?(*args) # :nodoc:
sym = args.first.to_sym
meta_data_method?(sym) || has_bits?(sym) || super(sym) || BitCollection.instance_methods.include?(sym)
end
# Copy overlays from one reg object to another
# ==== Example
# reg(:data_copy).has_overlay? # => false
# reg(:data).overlay("data_val")
# reg(:data_copy).copy_overlays_from(reg(:data))
# reg(:data_copy).has_overlay? # => true
def copy_overlays_from(reg, options = {})
size.times do |i|
source_bit = reg.bit[i]
if source_bit.has_overlay?
ov = source_bit.overlay_str
# If an id has been supplied make sure any trailing ID in the source is
# changed to supplied identifier
ov.gsub!(/_\d$/, "_#{options[:update_id]}") if options[:update_id]
@bits[i].overlay(ov)
end
end
self
end
# Copies data from one reg object to another
# ==== Example
# reg(:data_copy).data # => 0
# reg(:data).write(0x1234)
# reg(:data_copy).copy_data_from(reg(:data))
# reg(:data_copy).data # => 0x1234
def copy_data_from(reg)
size.times do |i|
@bits[i].write(reg.bit[i].data)
end
self
end
# Copies data and overlays from one reg object to another, it does not copy
# read or store flags
def copy(reg)
size.times do |i|
source_bit = reg.bit[i]
@bits[i].overlay(source_bit.overlay_str) if source_bit.has_overlay?
@bits[i].write(source_bit.data)
end
self
end
# Returns the BITWISE AND of reg with another reg or a number, the state of
# both registers remains unchanged
# ==== Example
# reg(:data).write(0x5555)
# reg(:data2).write(0xFFFF)
# reg(:data) & 0xFF00 # => 0x5500
# reg(:data) & reg(:data2) # => 0x5555
def &(val)
data & Reg.clean_value(val)
end
# Returns the BITWISE OR of reg with another reg or a number, the state of
# both registers remains unchanged
def |(val)
data | Reg.clean_value(val)
end
# Returns the SUM of reg with another reg or a number, the state of
# both registers remains unchanged
def +(val)
data + Reg.clean_value(val)
end
# Returns the SUBTRACTION of reg with another reg or a number, the state of
# both registers remains unchanged
def -(val)
data - Reg.clean_value(val)
end
# Returns the DIVISION of reg with another reg or a number, the state of
# both registers remains unchanged
def /(val)
data / Reg.clean_value(val)
end
# Returns the PRODUCT of reg with another reg or a number, the state of
# both registers remains unchanged
def *(val)
data * Reg.clean_value(val)
end
# Cleans an input value, in some cases it could be a register object, or an explicit value.
# This will return an explicit value in either case.
def self.clean_value(value) # :nodoc:
value = value.val if value.respond_to?('val') # Pull out the data value if a reg object has been passed in
value
end
# @api private
def meta_data_method?(method)
attr_name = method.to_s.gsub(/\??=?/, '').to_sym
if default_reg_metadata.key?(attr_name)
if method.to_s =~ /\?/
[true, false].include?(default_reg_metadata[attr_name])
else
true
end
else
false
end
end
def extract_meta_data(method, *args)
method = method.to_s.sub('?', '')
if method =~ /=/
instance_variable_set("@#{method.sub('=', '')}", args.first)
else
instance_variable_get("@#{method}") || meta[method.to_sym]
end
end
# Returns true if the register is constrained by the given/any feature
def enabled_by_feature?(name = nil)
if !name
!!feature
else
if feature.class == Array
feature.each do |f|
if f == name
return true
end
end
return false
else
return feature == name
end
end
end
alias_method :has_feature_constraint?, :enabled_by_feature?
# Query the owner heirarchy to see if this register is enabled
def enabled?
if feature
value = false
current_owner = self
if feature.class == Array
feature.each do |f|
current_owner = self
loop do
if current_owner.respond_to?(:owner)
current_owner = current_owner.owner
if current_owner.respond_to?(:has_feature?)
if current_owner.has_feature?(f)
value = true
break
end
end
else # if current owner does not have a owner
value = false
break
end
end # loop end
unless value
if Origen.top_level && \
Origen.top_level.respond_to?(:has_feature?) && \
Origen.top_level.has_feature?(f)
value = true
unless value
break
end
end
end
unless value
break # break if feature not found and return false
end
end # iterated through all features in array
return value
else # if feature.class != Array
loop do
if current_owner.respond_to?(:owner)
current_owner = current_owner.owner
if current_owner.respond_to?(:has_feature?)
if current_owner.has_feature?(feature)
value = true
break
end
end
else # if current owner does not have a owner
value = false
break
end
end # loop end
unless value
if Origen.top_level && \
Origen.top_level.respond_to?(:has_feature?) && \
Origen.top_level.has_feature?(feature)
value = true
end
end
return value
end
else
return true
end
end
# Returns true if any of the bits within this register has feature
# associated with it.
def has_bits_enabled_by_feature?(name = nil)
if !name
bits.any?(&:has_feature_constraint?)
else
bits.any? { |bit| bit.enabled_by_feature?(name) }
end
end
def to_json(*args)
JSON.pretty_generate({
name: name,
full_name: full_name,
address: address,
offset: offset,
size: size,
path: path,
reset_value: reset_value,
description: description(include_name: false, include_bit_values: false),
bits: named_bits.map do |name, bit|
{
name: name,
full_name: bit.full_name,
position: bit.position,
size: bit.size,
reset_value: bit.reset_value,
access: bit.access,
description: bit.description(include_name: false, include_bit_values: false),
bit_values: bit.bit_value_descriptions.map do |val, desc|
{
value: val,
description: desc
}
end
}
end
}, *args)
end
private
def _state_desc(bits)
state = []
unless bits.readable? && bits.writable?
if bits.readable?
state << 'RO'
else
state << 'WO'
end
end
state << 'Rd' if bits.is_to_be_read?
state << 'Str' if bits.is_to_be_stored?
state << 'Ov' if bits.has_overlay?
if state.empty?
''
else
"(#{state.join('|')})"
end
end
def _max_bit_in_range(bits, max, _min)
upper = bits.position + bits.size - 1
[upper, max].min - bits.position
end
def _min_bit_in_range(bits, _max, min)
lower = bits.position
[lower, min].max - bits.position
end
# Returns true if some portion of the given bits falls
# within the given range
def _bit_in_range?(bits, max, min)
upper = bits.position + bits.size - 1
lower = bits.position
!((lower > max) || (upper < min))
end
# Returns the number of bits from the given bits that
# fall within the given range
def _num_bits_in_range(bits, max, min)
upper = bits.position + bits.size - 1
lower = bits.position
[upper, max].min - [lower, min].max + 1
end
# Returns true if the given number is is the
# given range
def _index_in_range?(i, max, min)
!((i > max) || (i < min))
end
def _bit_rw(bits)
if bits.readable? && bits.writable?
'RW'
elsif bits.readable?
'RO'
elsif bits.writable?
'WO'
else
'X'
end
end
end
end
end
remove debug comments
require 'json'
module Origen
module Registers
# The register class can be used to represent not only h/ware resgisters,
# but really any entity which has an address and data component, such as a specific RAM location.<br>
# Any registers instantiated through Origen::Registers#add_reg are instances of this class.
#
# All methods in BitCollection can also be called on a Reg object.
class Reg
include Origen::SubBlocks::Path
include Origen::SubBlocks::Domains
# These attributes can be defined on a register at definition time and will get applied
# to all of its contained bits unless a specific bit has its own definition of the same
# attribute
REG_LEVEL_ATTRIBUTES = {
_feature: {},
_reset: { aliases: [:res] },
_memory: {},
_path: { aliases: [:hdl_path] },
_abs_path: { aliases: [:absolute_path] },
_access: {},
_bit_order: {}
}
# Returns the object that own the register.
# ==== Example
# $soc.reg(:blah).owner # Returns the $soc object
attr_reader :owner
alias_method :parent, :owner
# The base address of the register, this will be set dynamically
# by Origen based on the parent's base address
attr_accessor :base_address
attr_writer :address # :nodoc:
# Returns an integer representing the number of bits in the register
attr_reader :size
# The register name
attr_accessor :name
# Any feature associated with the register
attr_accessor :feature
attr_accessor :grows_backwards # :nodoc:
attr_accessor :lookup # :nodoc:
# Returns a full path to the file in which the register was defined
attr_reader :define_file
# Returns any application-specific meta-data attatched to the given register
attr_accessor :meta
alias_method :meta_data, :meta
alias_method :metadata, :meta
# If the given register's reset data is backed by memory, the memory address can
# be recorded in this attribute
attr_accessor :memory
# Normally shouldn't be called directly, instantiate through add_reg
# Upon initialization bits are stored as follows:
# @bits -
# An array of bit objects in position order, @bits[5] corresponds
# to the bit as position r
# @lookup -
# A Hash lookup table for quickly accessing bit objects by name
# @lookup = { :bit_or_bus_name => {:pos => 3, :bits => 4} }
def initialize(owner, address, size, name, options = {}) # :nodoc:
@owner = owner
@address = address
@size = size
@bits = []
@lookup = {}
@name = name
@init_as_writable = options.delete(:init_as_writable)
@define_file = options.delete(:define_file)
@from_placeholder = options.delete(:from_placeholder) || false
REG_LEVEL_ATTRIBUTES.each do |attribute, _meta|
if @from_placeholder
instance_variable_set("@#{attribute[1..-1]}", options.delete(attribute))
else
# If register creation is coming directly from Reg.new, instead of Placeholder,
# it may not have attributes with '_' prefix
instance_variable_set("@#{attribute[1..-1]}", options.delete(attribute[1..-1].to_sym))
end
end
@description_from_api = {}
description = options.delete(:description)
if description
@description_from_api[:_reg] = description.split(/\r?\n/)
end
@meta = default_reg_metadata.merge(options.delete(:meta) || {})
# Initialize with unwritable bits that read back as zero, can override this
# to make all writable by default by setting the :init_writable option to true
@size.times do |n|
@bits << Bit.new(self, n, writable: @init_as_writable, undefined: true)
end
add_bits_from_options(options)
end
# Returns the bit order attribute of the register (either :msb0 or :lsb0). If
# not explicitly defined on this register it will be inherited from the parent
# and will default to :lsb0 at the top-level
def bit_order
@bit_order ||= parent.respond_to?(:bit_order) ? parent.bit_order : :lsb0
end
def freeze
bits.each(&:freeze)
# Call any methods which cache results to generate the instance variables
# before they are frozen
address
super
end
def bind(bitname, live_parameter)
unless live_parameter.respond_to?(:is_a_live_parameter?) && live_parameter.is_a_live_parameter?
fail 'Only live updating parameters should be bound, make sure you have not missed .live in the path to the parameter!'
end
@parameter_bound_bits ||= {}
@parameter_bound_bits[bitname] = live_parameter
end
def has_parameter_bound_bits?
@parameter_bound_bits && !@parameter_bound_bits.empty?
end
def update_bound_bits
@updating_bound_bits = true
@parameter_bound_bits.each do |name, val|
bits(name).write(val)
end
@updating_bound_bits = false
end
def updating_bound_bits?
@updating_bound_bits
end
def inspect(options = {})
# This fancy_output option is passed in via option hash
# Even better, the output could auto-detect 7-bit vs 8-bit terminal output and adjust the parameter, but that's for another day
fancy_output = options[:fancy_output].nil? ? true : options[:fancy_output]
if fancy_output
horiz_double_line = '═'
horiz_double_tee_down = '╤'
horiz_double_tee_up = '╧'
corner_double_up_left = '╒'
corner_double_up_right = '╕'
horiz_single_line = '─'
horiz_single_tee_down = '┬'
horiz_single_tee_up = '┴'
horiz_single_cross = '┼'
horiz_double_cross = '╪'
corner_single_down_left = '└'
corner_single_down_right = '┘'
vert_single_line = '│'
vert_single_tee_left = '┤'
vert_single_tee_right = '├'
else
horiz_double_line = '='
horiz_double_tee_down = '='
horiz_double_tee_up = '='
corner_double_up_left = '.'
corner_double_up_right = '.'
horiz_single_line = '-'
horiz_single_tee_down = '-'
horiz_single_tee_up = '-'
horiz_single_cross = '+'
horiz_double_cross = '='
corner_single_down_left = '`'
corner_single_down_right = '\''
vert_single_line = '|'
vert_single_tee_left = '<'
vert_single_tee_right = '>'
end
bit_width = 13
desc = ["\n0x%X - :#{name}" % address]
r = size % 8
if r == 0 || (size > 8 && bit_order == :msb0)
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * 8)).chop + corner_double_up_right
else
if bit_order == :lsb0
desc << (' ' + (' ' * (bit_width + 1) * (8 - r)) + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * r)).chop + corner_double_up_right
else
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * r)).chop + corner_double_up_right
end
end
# "<#{self.class}: #{self.name}>"
num_bytes = (size / 8.0).ceil
num_bytes.times do |byte_index|
# Need to add support for little endian regs here?
byte_number = num_bytes - byte_index
if bit_order == :lsb0
max_bit = (byte_number * 8) - 1
min_bit = max_bit - 8 + 1
else
min_bit = (byte_index * 8)
max_bit = min_bit + 7
end
# BIT INDEX ROW
line = ' '
line_complete = false
8.times do |i|
if bit_order == :lsb0
bit_num = (byte_number * 8) - i - 1
else
bit_num = (byte_index * 8) + i
end
if bit_num > size - 1
if bit_order == :msb0 && bit_num == size
line += vert_single_line
line_complete = true
else
line << ' ' + ''.center(bit_width) unless line_complete
end
else
line << vert_single_line + "#{bit_num}".center(bit_width)
end
end
line += vert_single_line unless line_complete
desc << line
# BIT NAME ROW
line = ' '
first_done = false
line_complete = false
named_bits include_spacers: true do |name, bit, bitcounter|
if _bit_in_range?(bit, max_bit, min_bit)
if bit_order == :lsb0
if max_bit > (size - 1) && !first_done
(max_bit - (size - 1)).times do
line << ' ' * (bit_width + 1)
end
end
end
if bit.size > 1
if name
if bitcounter.nil?
if bit_order == :lsb0
bit_name = "#{name}[#{_max_bit_in_range(bit, max_bit, min_bit)}:#{_min_bit_in_range(bit, max_bit, min_bit)}]"
else
bit_name = "#{name}[#{_min_bit_in_range(bit, max_bit, min_bit)}:#{_max_bit_in_range(bit, max_bit, min_bit)}]"
end
bit_span = _num_bits_in_range(bit, max_bit, min_bit)
else
upper = _max_bit_in_range(bit, max_bit, min_bit) + bitcounter - bit.size
lower = _min_bit_in_range(bit, max_bit, min_bit) + bitcounter - bit.size
if bit_order == :lsb0
bit_name = "#{name}[#{upper}:#{lower}]"
else
bit_name = "#{name}[#{upper}:#{lower}]"
end
bit_span = upper - lower + 1
end
width = (bit_width * bit_span) + bit_span - 1
if bit_name.length > width
line << vert_single_line + "#{bit_name[0..width - 2]}*"
else
line << vert_single_line + bit_name.center(width)
end
else
bit.shift_out_left do |bit|
if _index_in_range?(bit.position, max_bit, min_bit)
line << vert_single_line + ''.center(bit_width)
end
end
end
else
if name
bit_name = "#{name}"
if bit_name.length > bit_width
txt = "#{bit_name[0..bit_width - 2]}*"
else
txt = bit_name
end
else
txt = ''
end
line << vert_single_line + txt.center(bit_width)
end
end
first_done = true
end
line += vert_single_line
desc << line
# BIT STATE ROW
line = ' '
first_done = false
named_bits include_spacers: true do |name, bit, _bitcounter|
if _bit_in_range?(bit, max_bit, min_bit)
if bit_order == :lsb0
if max_bit > (size - 1) && !first_done
(max_bit - (size - 1)).times do
line << ' ' * (bit_width + 1)
end
end
end
if bit.size > 1
if name
if bit.has_known_value?
value = '0x%X' % bit.val[_max_bit_in_range(bit, max_bit, min_bit).._min_bit_in_range(bit, max_bit, min_bit)]
else
if bit.reset_val == :undefined
value = 'X'
else
value = 'M'
end
end
value += _state_desc(bit)
bit_span = _num_bits_in_range(bit, max_bit, min_bit)
width = bit_width * bit_span
line << vert_single_line + value.center(width + bit_span - 1)
else
bit.shift_out_left do |bit|
if _index_in_range?(bit.position, max_bit, min_bit)
line << vert_single_line + ''.center(bit_width)
end
end
end
else
if name
if bit.has_known_value?
val = bit.val
else
if bit.reset_val == :undefined
val = 'X'
else
val = 'M'
end
end
value = "#{val}" + _state_desc(bit)
line << vert_single_line + value.center(bit_width)
else
line << vert_single_line + ''.center(bit_width)
end
end
end
first_done = true
end
line += vert_single_line
desc << line
if size >= 8
r = size % 8
if byte_index == 0 && r != 0 && bit_order == :lsb0
desc << (' ' + corner_double_up_left + ((horiz_double_line * bit_width + horiz_double_tee_down) * (8 - r)).chop + horiz_double_cross + (horiz_single_line * (bit_width + 1) * r)).chop + vert_single_tee_left
elsif (byte_index == num_bytes - 1) && r != 0 && bit_order == :msb0
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * r)).chop + corner_single_down_right
elsif (byte_index == num_bytes - 2) && r != 0 && bit_order == :msb0
desc << ' ' + vert_single_tee_right + ((horiz_single_line * bit_width + horiz_single_cross) * r) + ((horiz_single_line * bit_width + horiz_single_tee_up) * (8 - r)).chop + corner_single_down_right
else
if byte_index == num_bytes - 1
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * 8)).chop + corner_single_down_right
else
desc << (' ' + vert_single_tee_right + ((horiz_single_line * bit_width + horiz_single_cross) * 8)).chop + vert_single_tee_left
end
end
else
if bit_order == :lsb0
desc << (' ' + (' ' * (bit_width + 1) * (8 - size)) + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * size)).chop + corner_single_down_right
else
desc << (' ' + corner_single_down_left + ((horiz_single_line * bit_width + horiz_single_tee_up) * size)).chop + corner_single_down_right
end
end
end
desc.join("\n")
end
# Returns a hash containing all register descriptions that have been parsed so far.
#
# @api private
def description_lookup
@@description_lookup ||= {}
end
# Returns any application specific metadata that has been inherited by the
# given register.
# This does not account for any overridding that may have been applied to
# this register specifically however, use the meta method to get that.
def default_reg_metadata
Origen::Registers.default_reg_metadata.merge(
Origen::Registers.reg_metadata[owner.class] || {})
end
def bit_value_descriptions(bitname, options = {})
options = {
format: :binary
}.merge(options)
base = case options[:format]
when :bin, :binary
2
when :hex, :hexadecimal
16
when :dec, :decimal
10
else
fail "Unknown integer format: #{options[:format]}"
end
desc = {}
description(bitname).each do |line|
if line =~ /^\s*(\d+)\s+\|\s+(.+)/
desc[Regexp.last_match[1].to_i(base)] = Regexp.last_match[2]
end
end
desc
end
# Returns the full name of the register when this has been specified in the register
# description like this:
#
# # ** This is the Register Full Name **
# # This register blah blah
#
# This method will also be called by bit collections to look up the name when
# defined in a similar manner in the bit description.
#
# If no name has been specified this will return nil.
def full_name(bitname = :_reg, _options = {})
bitname, options = :_reg, bitname if bitname.is_a?(Hash)
desc = description(bitname).first
# Capture something like this:
# ** This is the full name ** - This bit blah blah
if desc && desc =~ /\s*\*\*\s*([^\*.]*)\s*\*\*/
Regexp.last_match[1].strip
end
end
# Escapes brackets and parenthesis. Helper for description method.
def escape_special_char(str)
str.gsub('[', '\[').gsub(']', '\]').gsub('(', '\(').gsub(')', '\)') if str
end
# Returns the description of this register if any, if none then an empty array
# will be returned
#
# **Note** Adding a description field will override any comment-driven documentation
# of a register (ie markdown style comments)
def description(bitname = :_reg, options = {})
bitname, options = :_reg, bitname if bitname.is_a?(Hash)
options = {
include_name: true,
include_bit_values: true
}.merge(options)
if @description_from_api[bitname]
desc = @description_from_api[bitname]
else
parse_descriptions unless description_lookup[define_file]
begin
desc = description_lookup[define_file][name][bitname] || []
rescue
desc = []
end
end
desc = desc.reject do |line|
if bitname != :_reg
unless options[:include_bit_values]
!!(line =~ /^\s*(\d+)\s+\|\s+(.+)/)
end
else
false
end
end
if desc.first
unless options[:include_name]
desc[0] = desc.first.sub(/\s*\*\*\s*#{escape_special_char(full_name(bitname))}\s*\*\*\s*-?\s*/, '')
end
end
desc.shift while desc.first && desc.first.strip.empty?
desc.pop while desc.last && desc.last.strip.empty?
desc
end
alias_method :descriptions, :description
# @api private
def parse_descriptions
desc = []
File.readlines(define_file).each do |line|
if line =~ /^\s*#(.*)/
desc << Regexp.last_match[1].strip
# http://rubular.com/r/D8lg2P5kK1 http://rubular.com/r/XP4ydPV8Fd
elsif line =~ /^\s*reg\(?\s*[:"'](\w+)["']?\s*,.*\sdo/ || line =~ /^\s*.*add_reg\(?\s*[:"'](\w+)["']?\s*,.*/
@current_reg_name = Regexp.last_match[1].to_sym
description_lookup[define_file] ||= {}
description_lookup[define_file][@current_reg_name] ||= {}
description_lookup[define_file][@current_reg_name][:_reg] = desc.dup
desc = []
# http://www.rubular.com/r/7FidbC1JRA
elsif @current_reg_name && line =~ /^\s*(add_bit|bit|reg\.bit)s?\(?\s*\d+\.?\.?\d*\s*,\s*:(\w+)/
description_lookup[define_file][@current_reg_name][Regexp.last_match[2].to_sym] = desc.dup
desc = []
else
desc = []
end
end
end
def contains_bits?
true
end
# @api private
def add_bits_from_options(options = {}) # :nodoc:
# options is now an array for split bit groups or a hash if single bit/range bits
# Now add the requested bits to the register, removing the unwritable bits as required
options.each do |bit_id, bit_params|
if bit_params.is_a? Hash
description = bit_params.delete(:description)
if description
@description_from_api[bit_id] = description.split(/\r?\n/)
end
bind(bit_id, bit_params.delete(:bind)) if bit_params[:bind]
position = bit_params[:pos] || 0
num_bits = bit_params[:bits] || 1
if @_reset
if @_reset.is_a?(Symbol)
bit_params[:res] = @_reset
else
bit_params[:res] = @_reset[(num_bits + position - 1), position]
end
end
bit_params[:access] = @_access if bit_params[:access].nil?
bit_params[:res] = bit_params[:data] if bit_params[:data]
bit_params[:res] = bit_params[:reset] if bit_params[:reset]
if num_bits == 1
add_bit(bit_id, position, bit_params) # and add the new one
else
add_bus(bit_id, position, num_bits, bit_params)
end
elsif bit_params.is_a? Array
description = bit_params.map { |h| h.delete(:description) }.compact.join("\n")
unless description.empty?
@description_from_api[bit_id] = description.split(/\r?\n/)
end
add_bus_scramble(bit_id, bit_params)
end
end
self
end
# This method is called whenever reg.clone is called to make a copy of a given register.
# Ruby will correctly copy all instance variables but it will not drill down to copy nested
# attributes, like the bits contained in @bits.
# This function will therefore correctly clone all bits contained in the register also.
def initialize_copy(orig) # :nodoc:
@bits = []
orig.bits.each do |bit|
@bits << bit.clone
end
@lookup = orig.lookup.clone
self
end
# Returns a dummy register object that can be used on the fly, this can sometimes
# be useful to configure an intricate read operation.
# ==== Example
# # Read bit 5 of RAM address 0xFFFF1280
# dummy = Reg.dummy # Create a dummy reg to configure the read operation
# dummy.address = 0xFFFF1280 # Set the address
# dummy.bit(5).read!(1) # Read bit 5 expecting a 1
def self.dummy(size = 16)
Reg.new(self, 0, size, :dummy, init_as_writable: true)
end
# Returns each named bit collection contained in the register,
def named_bits(options = {})
options = {
include_spacers: false
}.merge(options)
result = []
# test if @lookup has any values stored as an array
# if so it means there is a split group of bits
# process that differently to a single bit or continuous range of bits
# which are typically stored in a hash
split_bits = false
@lookup.each { |_k, v| split_bits = true if v.is_a? Array }
if split_bits == false
if bit_order == :lsb0
current_pos = size
else
current_pos = 0
end
# Sort by position
@lookup.sort_by { |_name, details| bit_order == :lsb0 ? -details[:pos] : details[:pos] }.each do |name, details|
if bit_order == :lsb0
pos = details[:bits] + details[:pos]
else
pos = details[:pos]
end
if options[:include_spacers] && (pos != current_pos)
if bit_order == :lsb0
collection = BitCollection.dummy(self, nil, size: current_pos - pos, pos: pos)
else
collection = BitCollection.dummy(self, nil, size: pos - current_pos, pos: current_pos)
end
unless collection.size == 0
if block_given?
yield nil, collection
else
result << [nil, collection]
end
end
end
collection = BitCollection.new(self, name)
details[:bits].times do |i|
collection << @bits[details[:pos] + i]
end
unless collection.size == 0
if block_given?
yield name, collection
else
result << [name, collection]
end
end
if bit_order == :lsb0
current_pos = details[:pos]
else
current_pos = details[:bits] + details[:pos]
end
end
if options[:include_spacers] && ((bit_order == :lsb0 && current_pos != 0) ||
bit_order == :msb0 && current_pos != size)
if bit_order == :lsb0
collection = BitCollection.dummy(self, nil, size: current_pos, pos: 0)
else
collection = BitCollection.dummy(self, nil, size: size - current_pos, pos: current_pos)
end
unless collection.size == 0
if block_given?
yield nil, collection
else
result << [nil, collection]
end
end
end
elsif split_bits == true # if there are split bits, need to convert all register bit values to array elements to allow sorting
# if the register has bits split up across it, then store the bits in order of decreasing reg position
# but first, stuff all the bits in a simple array, as single bits, or ranges of bits
@lookup_splits = []
@lookup.each do |k, v|
tempbit = {}
bitcounter = {}
if v.is_a? Hash
# then this is already a single bit or a continuous range so just stuff it into the array
tempbit[k] = v
@lookup_splits << tempbit.clone
elsif v.is_a? Array
# if the bitgroup is split, then decompose into single bits and continuous ranges
v.each_with_index do |bitdetail, _i|
if bitcounter.key?(k)
bitcounter[k] = bitcounter[k] + bitdetail[:bits]
else
bitcounter[k] = bitdetail[:bits]
end
tempbit[k] = bitdetail
@lookup_splits << tempbit.clone
end
end
if v.is_a? Array
@lookup_splits.each_with_index do |_e, q|
groupname = @lookup_splits[q].to_a[0][0]
if groupname == k
@lookup_splits[q][groupname][:bitgrouppos] = bitcounter[groupname] if groupname == k
bitcounter[groupname] = bitcounter[groupname] - @lookup_splits[q][groupname][:bits]
end
end
end
end
# Now sort the array in descending order
# Does adding the bitgrouppos need to happen after the sort ?
@lookup_splits = @lookup_splits.sort do |a, b|
b.to_a[0][1][:pos] <=> a.to_a[0][1][:pos]
end
current_pos = size
countbits = {} # if countbits.method == nil
@master = {}
bitgroup = {}
bitinfo = {}
info = {}
@lookup_splits.each_with_index do |hash, _i|
name = hash.to_a[0][0]
details = hash.to_a[0][1]
bitcounter = hash.to_a[0][1][:bitgrouppos]
pos = details[:bits] + details[:pos]
if options[:include_spacers] && (pos != current_pos)
collection = BitCollection.dummy(self, nil, size: current_pos - pos, pos: pos)
unless collection.size == 0
if block_given?
yield nil, collection, bitcounter
else
result << [nil, collection, bitcounter]
end
end
end
collection = BitCollection.new(self, name)
details[:bits].times do |i|
collection << @bits[details[:pos] + i]
end
unless collection.size == 0
if block_given?
yield name, collection, bitcounter
else
result << [name, collection, bitcounter]
end
end
current_pos = details[:pos]
end
if options[:include_spacers] && current_pos != 0
collection = BitCollection.dummy(self, nil, size: current_pos, pos: 0)
unless collection.size == 0
if block_given?
yield nil, collection, bitcounter
else
result << [nil, collection, bitcounter]
end
end
end
end
unless block_given?
result
end
end
# Returns each named bit collection contained in self
def reverse_named_bits(_options = {})
bits = []
named_bits { |name, bit| bits << [name, bit] }
bits.each do |bit|
yield bit[0], bit[1]
end
end
# Returns an array of occupied bit positions
# ==== Example
# reg :fstat, @base + 0x0000, :size => 8 do
# bit 7, :ccif
# bit 6, :rdcolerr
# bit 5, :accerr
# bit 4, :pviol
# bit 0, :mgstat0
# end
# regs(:fstat).used_bits
# => [0, 4, 5, 6, 7]
#
# ==== Example
# reg :aguahb2, @base + 0x2A, :size => 8 do
# bit 5..2, :m0b_hbstrb, :reset => 0x0
# bit 1..0, :m0b_htrans, :reset => 0x2
# end
# regs(:aguahb2).used_bits
# => [0, 1, 2, 3, 4, 5]
def used_bits(_options = {})
used_bits = []
named_bits do |_name, bit|
used_bits << bit.position if bit.size == 1
if bit.size > 1
used_bits << ((bit.position)..(bit.position + bit.size - 1)).to_a
end
end
used_bits.flatten!
used_bits.sort!
used_bits
end
# Returns true if any named_bits exist, false if used_bits is an empty array
def used_bits?(_options = {})
used_bits.size > 0
end
# Returns an array of unoccupied bit positions
# ==== Example
# reg :fstat, @base + 0x0000, :size => 8 do
# bit 7, :ccif
# bit 6, :rdcolerr
# bit 5, :accerr
# bit 4, :pviol
# bit 0, :mgstat0
# end
# regs(:fstat).empty_bits
# => [1, 2, 3]
#
# ==== Example
# reg :aguahb2, @base + 0x2A, :size => 8 do
# bit 5..2, :m0b_hbstrb, :reset => 0x0
# bit 1..0, :m0b_htrans, :reset => 0x2
# end
# regs(:aguahb2).empty_bits
# => [6, 7]
def empty_bits(_options = {})
array_span = (0..(size - 1)).to_a
empty_bits = array_span - used_bits
empty_bits
end
# Returns true if any named_bits exist, false if used_bits is an empty array
def empty_bits?(_options = {})
empty_bits.size > 0
end
# Proxies requests from bit collections to the register owner
def request(operation, options = {}) # :nodoc:
if operation == :read_register
object = reader
(Origen.top_level || owner).read_register_missing!(self) unless object
else
object = writer
(Origen.top_level || owner).write_register_missing!(self) unless object
end
object.send(operation, self, options)
self
end
# Returns the object that will be responsible for writing the given register
def writer
@writer ||= lookup_operation_handler(:write_register)
end
# Returns the object that will be responsible for reading the given register
def reader
@reader ||= lookup_operation_handler(:read_register)
end
# @api private
def lookup_operation_handler(operation)
# Could have made the controller be the owner when assigned above, but may have run
# into problems with the reg meta data stuff
reg_owner = owner.respond_to?(:controller) && owner.controller ? owner.controller : owner
if reg_owner.respond_to?(operation)
reg_owner
elsif reg_owner.respond_to?(:owner) && reg_owner.owner.respond_to?(operation)
reg_owner.owner
elsif Origen.top_level && Origen.top_level.respond_to?(operation)
Origen.top_level
end
end
# Returns the relative address of the given register, equivalent to calling
# reg.address(:relative => true)
def offset
address(relative: true)
end
# Returns the register address added to its current base_address value (if any).
#
# @param [Hash] options
# @option options [Boolean] :relative (false) Return the address without adding the base address (if present)
def address(options = {})
options = {
relative: false
}.merge(options)
address = @address
domain_option = options[:domains] || options[:domain]
@domain_option ||= domain_option unless frozen?
# Blow the cache when the domain option changes
@base_address_applied = nil unless @domain_option == domain_option
unless @base_address_applied
# Give highest priority to the original API which allowed the object
# doing register read/write to define a base_address method
if (writer && writer.methods.include?(:base_address) && writer.method(:base_address).arity != 0) ||
(reader && reader.methods.include?(:base_address) && reader.method(:base_address).arity != 0)
# This currently assumes that the base address is always the same
# for reading and writing
if writer && writer.respond_to?(:base_address) && writer.method(:base_address).arity != 0
self.base_address = writer.base_address(self)
elsif reader && reader.respond_to?(:base_address) && reader.method(:base_address).arity != 0
self.base_address = reader.base_address(self)
end
else
o = owner.is_a?(Container) ? owner.owner : owner
d = domain_option || domains
if o && o.reg_base_address(domain: d)
self.base_address = o.reg_base_address(domain: d)
end
end
@base_address_applied = true
end
unless options[:relative]
address += base_address if base_address
end
if options[:address_type]
Origen.deprecate 'Specifying the address_type of a register address will be removed from Origen 3'
case options[:address_type]
when :byte
address = address * 2
when :word
address = address
when :longword
address = address / 2
else
fail 'Unknown address type requested!'
end
end
address
end
alias_method :addr, :address
# Returns true if the register owner matches the given name. A match will be detected
# if the class names of the register's owner contains the given name.
#
# Alternatively if the register owner implements a method called reg_owner_alias
# then the value that this returns instead will also be considered when checking if the given
# name matches. This method can return an array of names if multiple aliases are required.
#
# Aliases can be useful for de-coupling the commonly used name, e.g. "NVM" from the actual
# class name.
#
# @example
# class C90NVM
# include Origen::Registers
#
# def initialize
# add_reg :clkdiv, 0x3, 16, :div => {:bits => 8}
# end
#
# end
#
# reg = C90NVM.new.reg(:clkdiv)
# reg.owned_by?(:ram) # => false
# reg.owned_by?(:nvm) # => true
# reg.owned_by?(:c90nvm) # => true
# reg.owned_by?(:c40nvm) # => false
# reg.owned_by?(:flash) # => false
#
# @example Using an alias
# class C90NVM
# include Origen::Registers
#
# def initialize
# add_reg :clkdiv, 0x3, 16, :div => {:bits => 8}
# end
#
# def reg_owner_alias
# "flash"
# end
#
# end
#
# reg = C90NVM.new.reg(:clkdiv)
# reg.owned_by?(:ram) # => false
# reg.owned_by?(:nvm) # => true
# reg.owned_by?(:c90nvm) # => true
# reg.owned_by?(:c40nvm) # => false
# reg.owned_by?(:flash) # => true
def owned_by?(name)
!!(owner.class.to_s =~ /#{name}/i) || begin
if owner.respond_to?(:reg_owner_alias)
[owner.reg_owner_alias].flatten.any? do |al|
al.to_s =~ /#{name}/i
end
else
false
end
end
end
# Returns true if the register contains a bit(s) matching the given name
# ==== Example
# add_reg :control, 0x55, :status => {:pos => 1}
#
# reg(:control).has_bit?(:result) # => false
# reg(:control).has_bit?(:status) # => true
def has_bit?(name)
@lookup.include?(name)
end
alias_method :has_bits?, :has_bit?
alias_method :has_bit, :has_bit?
alias_method :has_bits, :has_bit?
# Add a bit to the register, should only be called internally
def add_bit(id, position, options = {}) # :nodoc:
options = { data: @bits[position].data, # If undefined preserve any data/reset value that has
res: @bits[position].data, # already been applied at reg level
}.merge(options)
@lookup[id] = { pos: position, bits: 1, feature: options[:feature] }
@bits.delete_at(position) # Remove the initial bit from this position
@bits.insert(position, Bit.new(self, position, options))
self
end
# Add a bus to the register, should only be called internally
def add_bus(id, position, size, options = {}) # :nodoc:
default_data = 0
size.times do |n|
default_data |= @bits[position + n].data << n
end
options = { data: default_data, # If undefined preserve any data/reset value that has
res: default_data, # already been applied at reg level
}.merge(options)
@lookup[id] = { pos: position, bits: size }
size.times do |n|
bit_options = options.dup
bit_options[:data] = options[:data][n]
if options[:res].is_a?(Symbol)
bit_options[:res] = options[:res]
else
bit_options[:res] = options[:res][n]
end
@bits.delete_at(position + n)
@bits.insert(position + n, Bit.new(self, position + n, bit_options))
end
self
end
def add_bus_scramble(id, array_of_hashes = [])
array_of_hashes.each do |options|
bind(id, options.delete(:bind)) if options[:bind]
position = options[:pos] || 0
num_bits = options[:bits] || 1
size = options[:bits]
options[:data] = options[:data] if options[:data]
options[:res] = options[:reset] if options[:reset]
default_data = 0
size.times do |n|
default_data |= @bits[position + n].data << n
end
options = { data: default_data, # If undefined preserve any data/reset value that has
res: default_data, # already been applied at reg level
}.merge(options)
@lookup[id] = [] if @lookup[id].nil?
@lookup[id] = @lookup[id].push(pos: position, bits: size)
size.times do |n|
bit_options = options.dup
bit_options[:data] = options[:data][n]
bit_options[:res] = options[:res][n]
@bits.delete_at(position + n)
@bits.insert(position + n, Bit.new(self, position + n, bit_options))
end
self
end
end
# Delete the bits in the collection from the register
def delete_bit(collection)
[collection.name].flatten.each do |name|
@lookup.delete(name)
end
collection.each do |bit|
@bits.delete_at(bit.position) # Remove the bit
@bits.insert(bit.position, Bit.new(self, bit.position, writable: @init_as_writable))
end
self
end
alias_method :delete_bits, :delete_bit
# @api private
def expand_range(range)
if range.first > range.last
range = Range.new(range.last, range.first)
end
range.each do |i|
yield i
end
end
# Returns the bit object(s) responding to the given name, wrapped in a BitCollection.
# This method also accepts multiple name possibilities, if neither bit exists in
# the register it will raise an error, otherwise it will return the first match.
# If no args passed in, it will return a BitCollection containing all bits.
# If a number is passed in then the bits from those positions are returned.
# ==== Example
# add_reg :control, 0x55, :status => {:pos => 1, :bits => 2},
# :fail => {:pos => 0}
#
# reg(:control).bit(:fail) # => Returns a BitCollection containing the fail bit
# reg(:control).bits(:status) # => Returns a BifCollection containing the status bits
# reg(:control).bit(:bist_fail, :fail) # => Returns a BitCollection containing the fail bit
# reg(:control).bit(0) # => Returns a BitCollection containing the fail bit
# reg(:control).bit(1) # => Returns a BitCollection containing status bit
# reg(:control).bit(1,2) # => Returns a BitCollection containing both status bits
def bit(*args)
multi_bit_names = false
# return get_bits_with_constraint(nil,:default) if args.size == 0
constraint = extract_feature_params(args)
if constraint.nil?
constraint = :default
end
collection = BitCollection.new(self, :unknown)
if args.size == 0
collection.add_name(name)
@bits.each do |bit|
collection << get_bits_with_constraint(bit.position, constraint)
end
else
args.flatten!
args.sort!
args.each do |arg_item|
if arg_item.is_a?(Integer)
b = get_bits_with_constraint(arg_item, constraint)
collection << b if b
elsif arg_item.is_a?(Range)
expand_range(arg_item) do |bit_number|
collection << get_bits_with_constraint(bit_number, constraint)
end
else
multi_bit_names = args.size > 1
# Reaches here if bit name is specified
if @lookup.include?(arg_item)
split_bits = false
@lookup.each { |_k, v| split_bits = true if v.is_a? Array }
coll = get_lookup_feature_bits(arg_item, constraint, split_bits)
if coll
coll.each do |b|
collection.add_name(arg_item)
collection << b
end
end
end
end
end
end
if collection.size == 0
# Originally Origen returned nil when asking for a bit via an index which does not
# exist, e.g. reg[1000] => nil
# The args numeric clause here is to maintain that behavior
if Origen.config.strict_errors && !args.all? { |arg| arg.is_a?(Numeric) }
puts "Register #{@name} does not have a bits(s) named :#{args.join(', :')} or it might not be enabled."
puts 'This could also be a typo, these are the valid bit names:'
puts @lookup.keys
fail 'Missing bits error!'
end
nil
else
if multi_bit_names
collection.sort_by!(&:position)
end
collection
end
end
alias_method :bits, :bit
alias_method :[], :bit
def get_bits_with_constraint(number, params)
return nil unless @bits[number]
if (params == :default || !params) && @bits[number].enabled?
@bits[number]
elsif params == :none && !@bits[number].has_feature_constraint?
@bits[number]
elsif params == :all
@bits[number]
elsif params.class == Array
params.each do |param|
unless @bits[number].enabled_by_feature?(param)
return nil
end
@bits[number]
end
elsif @bits[number].enabled_by_feature?(params)
@bits[number]
else
return Bit.new(self, number, writable: false)
end
end
def get_lookup_feature_bits(bit_name, params, split_group_reg)
##
if split_group_reg == false # if this register has single bits and continuous ranges
if @lookup.include?(bit_name)
collection = BitCollection.new(self, bit_name)
(@lookup[bit_name][:bits]).times do |i|
collection << @bits[@lookup[bit_name][:pos] + i]
end
if !params || params == :default
if collection.enabled?
return collection
end
elsif params == :none
unless collection.has_feature_constraint?
return collection
end
elsif params == :all
return collection
elsif params.class == Array
if params.all? { |param| collection.enabled_by_feature?(param) }
return collection
end
else
if collection.enabled_by_feature?(params)
return collection
end
end
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name][:pos])
else
return []
end
elsif split_group_reg == true # if this registers has split bits in its range
if @lookup.is_a?(Hash) # && @lookup.include?(bit_name)
collection = false
@lookup.each do |k, v| # k is the bitname, v is the hash of bit data
if k == bit_name
collection ||= BitCollection.new(self, k)
if v.is_a?(Array)
v.reverse_each do |pb| # loop each piece of bit group data
(pb[:bits]).times do |i|
collection << @bits[pb[:pos] + i]
end
end
else
(v[:bits]).times do |i|
collection << @bits[v[:pos] + i]
end
end
end
end
if !params || params == :default
if collection.enabled?
return collection
end
elsif params == :none
unless collection.has_feature_constraint?
return collection
end
elsif params == :all
return collection
elsif params.class == Array
if params.all? { |param| collection.enabled_by_feature?(param) }
return collection
end
else
if collection.enabled_by_feature?(params)
return collection
end
end
if @lookup.is_a?(Hash) && @lookup[bit_name].is_a?(Array)
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name][0][:pos])
else
return BitCollection.dummy(self, bit_name, size: collection.size, pos: @lookup[bit_name[:pos]])
end
else
return []
end
end
end
def extract_feature_params(args)
index = args.find_index { |arg| arg.class == Hash }
if index
params = args.delete_at(index)
else
params = nil
end
if params
return params[:enabled_features] || params[:enabled_feature]
else
return nil
end
end
# All other Reg methods are delegated to BitCollection
def method_missing(method, *args, &block) # :nodoc:
if method.to_sym == :to_ary || method.to_sym == :to_hash
nil
elsif meta_data_method?(method)
extract_meta_data(method, *args)
else
if BitCollection.instance_methods.include?(method)
to_bit_collection.send(method, *args, &block)
elsif has_bits?(method)
bits(method)
else
super
end
end
end
def to_bit_collection
BitCollection.new(self, name, @bits)
end
# Recognize that Reg responds to all BitCollection methods methods based on
# application-specific meta data properties
def respond_to?(*args) # :nodoc:
sym = args.first.to_sym
meta_data_method?(sym) || has_bits?(sym) || super(sym) || BitCollection.instance_methods.include?(sym)
end
# Copy overlays from one reg object to another
# ==== Example
# reg(:data_copy).has_overlay? # => false
# reg(:data).overlay("data_val")
# reg(:data_copy).copy_overlays_from(reg(:data))
# reg(:data_copy).has_overlay? # => true
def copy_overlays_from(reg, options = {})
size.times do |i|
source_bit = reg.bit[i]
if source_bit.has_overlay?
ov = source_bit.overlay_str
# If an id has been supplied make sure any trailing ID in the source is
# changed to supplied identifier
ov.gsub!(/_\d$/, "_#{options[:update_id]}") if options[:update_id]
@bits[i].overlay(ov)
end
end
self
end
# Copies data from one reg object to another
# ==== Example
# reg(:data_copy).data # => 0
# reg(:data).write(0x1234)
# reg(:data_copy).copy_data_from(reg(:data))
# reg(:data_copy).data # => 0x1234
def copy_data_from(reg)
size.times do |i|
@bits[i].write(reg.bit[i].data)
end
self
end
# Copies data and overlays from one reg object to another, it does not copy
# read or store flags
def copy(reg)
size.times do |i|
source_bit = reg.bit[i]
@bits[i].overlay(source_bit.overlay_str) if source_bit.has_overlay?
@bits[i].write(source_bit.data)
end
self
end
# Returns the BITWISE AND of reg with another reg or a number, the state of
# both registers remains unchanged
# ==== Example
# reg(:data).write(0x5555)
# reg(:data2).write(0xFFFF)
# reg(:data) & 0xFF00 # => 0x5500
# reg(:data) & reg(:data2) # => 0x5555
def &(val)
data & Reg.clean_value(val)
end
# Returns the BITWISE OR of reg with another reg or a number, the state of
# both registers remains unchanged
def |(val)
data | Reg.clean_value(val)
end
# Returns the SUM of reg with another reg or a number, the state of
# both registers remains unchanged
def +(val)
data + Reg.clean_value(val)
end
# Returns the SUBTRACTION of reg with another reg or a number, the state of
# both registers remains unchanged
def -(val)
data - Reg.clean_value(val)
end
# Returns the DIVISION of reg with another reg or a number, the state of
# both registers remains unchanged
def /(val)
data / Reg.clean_value(val)
end
# Returns the PRODUCT of reg with another reg or a number, the state of
# both registers remains unchanged
def *(val)
data * Reg.clean_value(val)
end
# Cleans an input value, in some cases it could be a register object, or an explicit value.
# This will return an explicit value in either case.
def self.clean_value(value) # :nodoc:
value = value.val if value.respond_to?('val') # Pull out the data value if a reg object has been passed in
value
end
# @api private
def meta_data_method?(method)
attr_name = method.to_s.gsub(/\??=?/, '').to_sym
if default_reg_metadata.key?(attr_name)
if method.to_s =~ /\?/
[true, false].include?(default_reg_metadata[attr_name])
else
true
end
else
false
end
end
def extract_meta_data(method, *args)
method = method.to_s.sub('?', '')
if method =~ /=/
instance_variable_set("@#{method.sub('=', '')}", args.first)
else
instance_variable_get("@#{method}") || meta[method.to_sym]
end
end
# Returns true if the register is constrained by the given/any feature
def enabled_by_feature?(name = nil)
if !name
!!feature
else
if feature.class == Array
feature.each do |f|
if f == name
return true
end
end
return false
else
return feature == name
end
end
end
alias_method :has_feature_constraint?, :enabled_by_feature?
# Query the owner heirarchy to see if this register is enabled
def enabled?
if feature
value = false
current_owner = self
if feature.class == Array
feature.each do |f|
current_owner = self
loop do
if current_owner.respond_to?(:owner)
current_owner = current_owner.owner
if current_owner.respond_to?(:has_feature?)
if current_owner.has_feature?(f)
value = true
break
end
end
else # if current owner does not have a owner
value = false
break
end
end # loop end
unless value
if Origen.top_level && \
Origen.top_level.respond_to?(:has_feature?) && \
Origen.top_level.has_feature?(f)
value = true
unless value
break
end
end
end
unless value
break # break if feature not found and return false
end
end # iterated through all features in array
return value
else # if feature.class != Array
loop do
if current_owner.respond_to?(:owner)
current_owner = current_owner.owner
if current_owner.respond_to?(:has_feature?)
if current_owner.has_feature?(feature)
value = true
break
end
end
else # if current owner does not have a owner
value = false
break
end
end # loop end
unless value
if Origen.top_level && \
Origen.top_level.respond_to?(:has_feature?) && \
Origen.top_level.has_feature?(feature)
value = true
end
end
return value
end
else
return true
end
end
# Returns true if any of the bits within this register has feature
# associated with it.
def has_bits_enabled_by_feature?(name = nil)
if !name
bits.any?(&:has_feature_constraint?)
else
bits.any? { |bit| bit.enabled_by_feature?(name) }
end
end
def to_json(*args)
JSON.pretty_generate({
name: name,
full_name: full_name,
address: address,
offset: offset,
size: size,
path: path,
reset_value: reset_value,
description: description(include_name: false, include_bit_values: false),
bits: named_bits.map do |name, bit|
{
name: name,
full_name: bit.full_name,
position: bit.position,
size: bit.size,
reset_value: bit.reset_value,
access: bit.access,
description: bit.description(include_name: false, include_bit_values: false),
bit_values: bit.bit_value_descriptions.map do |val, desc|
{
value: val,
description: desc
}
end
}
end
}, *args)
end
private
def _state_desc(bits)
state = []
unless bits.readable? && bits.writable?
if bits.readable?
state << 'RO'
else
state << 'WO'
end
end
state << 'Rd' if bits.is_to_be_read?
state << 'Str' if bits.is_to_be_stored?
state << 'Ov' if bits.has_overlay?
if state.empty?
''
else
"(#{state.join('|')})"
end
end
def _max_bit_in_range(bits, max, _min)
upper = bits.position + bits.size - 1
[upper, max].min - bits.position
end
def _min_bit_in_range(bits, _max, min)
lower = bits.position
[lower, min].max - bits.position
end
# Returns true if some portion of the given bits falls
# within the given range
def _bit_in_range?(bits, max, min)
upper = bits.position + bits.size - 1
lower = bits.position
!((lower > max) || (upper < min))
end
# Returns the number of bits from the given bits that
# fall within the given range
def _num_bits_in_range(bits, max, min)
upper = bits.position + bits.size - 1
lower = bits.position
[upper, max].min - [lower, min].max + 1
end
# Returns true if the given number is is the
# given range
def _index_in_range?(i, max, min)
!((i > max) || (i < min))
end
def _bit_rw(bits)
if bits.readable? && bits.writable?
'RW'
elsif bits.readable?
'RO'
elsif bits.writable?
'WO'
else
'X'
end
end
end
end
end
|
module Outbrain
module Api
VERSION = "0.2.10"
end
end
Un bump version
module Outbrain
module Api
VERSION = "0.2.9"
end
end
|
module PgSync
class TaskResolver
include Utils
attr_reader :args, :opts, :source, :destination, :config, :first_schema, :notes
def initialize(args:, opts:, source:, destination:, config:, first_schema:)
@args = args
@opts = opts
@source = source
@destination = destination
@config = config
@groups = config["groups"] || {}
@first_schema = first_schema
@notes = []
end
def tasks
tasks = []
# get lists from args
groups, tables = process_args
# expand groups into tasks
groups.each do |group|
tasks.concat(group_to_tasks(group))
end
# expand tables into tasks
tables.each do |table|
tasks.concat(table_to_tasks(table))
end
# get default if none given
if !opts[:groups] && !opts[:tables] && args.size == 0
tasks.concat(default_tasks)
end
# resolve any tables that need it
tasks.each do |task|
task[:table] = fully_resolve(task[:table])
end
tasks
end
def group?(group)
@groups.key?(group)
end
private
def group_to_tasks(value)
group, param = value.split(":", 2)
raise Error, "Group not found: #{group}" unless group?(group)
@groups[group].map do |table|
table_sql = nil
if table.is_a?(Array)
table, table_sql = table
end
{
table: to_table(table),
sql: expand_sql(table_sql, param)
}
end
end
def table_to_tasks(value)
raise Error, "Cannot use parameters with tables" if value.include?(":")
tables =
if value.include?("*")
regex = Regexp.new('\A' + Regexp.escape(value).gsub('\*','[^\.]*') + '\z')
shared_tables.select { |t| regex.match(t.full_name) || regex.match(t.name) }
else
[to_table(value)]
end
tables.map do |table|
{
table: table,
sql: sql_arg # doesn't support params
}
end
end
# treats identifiers as if they were quoted (Users == "Users")
# this is different from Postgres (Users == "users")
#
# TODO add support for quoted identifiers like "my.schema"."my.table"
# so it's possible to specify identifiers with "." in them
def to_table(value)
parts = value.split(".")
case parts.size
when 1
# unknown schema
Table.new(nil, parts[0])
when 2
Table.new(*parts)
else
raise Error, "Cannot resolve table: #{value}"
end
end
def default_tasks
shared_tables.map do |table|
{
table: table
}
end
end
# tables that exists in both source and destination
# used when no tables specified, or a wildcard
# removes excluded tables and filters by schema
def shared_tables
tables = source.tables
unless opts[:schema_only] || opts[:schema_first]
from_tables = tables
to_tables = destination.tables
extra_tables = to_tables - from_tables
notes << "Extra tables: #{extra_tables.map { |t| friendly_name(t) }.join(", ")}" if extra_tables.any?
missing_tables = from_tables - to_tables
notes << "Missing tables: #{missing_tables.map { |t| friendly_name(t) }.join(", ")}" if missing_tables.any?
tables &= to_tables
end
# could support wildcard schemas as well
if opts[:schemas]
schemas = Set.new(to_arr(opts[:schemas]))
tables.select! { |t| schemas.include?(t.schema) }
end
to_arr(opts[:exclude]).each do |value|
if value.include?("*")
regex = Regexp.new('\A' + Regexp.escape(value).gsub('\*','[^\.]*') + '\z')
tables.reject! { |t| regex.match(t.full_name) || regex.match(t.name) }
else
tables -= [fully_resolve(to_table(value))]
end
end
tables
end
def process_args
groups = to_arr(opts[:groups])
tables = to_arr(opts[:tables])
if args[0]
# could be a group, table, or mix
to_arr(args[0]).each do |value|
if group?(value.split(":", 2)[0])
groups << value
else
tables << value
end
end
end
[groups, tables]
end
def no_schema_tables
@no_schema_tables ||= begin
search_path_index = source.search_path.map.with_index.to_h
source.tables.group_by(&:name).map do |group, t2|
[group, t2.select { |t| search_path_index[t.schema] }.sort_by { |t| search_path_index[t.schema] }.first]
end.to_h
end
end
# for tables without a schema, find the table in the search path
def fully_resolve(table)
return table if table.schema
no_schema_tables[table.name] || (raise Error, "Table not found in source: #{table.name}")
end
# parse command line arguments and YAML
def to_arr(value)
if value.is_a?(Array)
value
else
# Split by commas, but don't use commas inside double quotes
# https://stackoverflow.com/questions/21105360/regex-find-comma-not-inside-quotes
value.to_s.split(/(?!\B"[^"]*),(?![^"]*"\B)/)
end
end
def sql_arg
args[1]
end
def expand_sql(sql, param)
# command line option takes precedence over group option
sql = sql_arg if sql_arg
return unless sql
# vars must match \w
missing_vars = sql.scan(/{\w+}/).map { |v| v[1..-2] }
vars = {}
if param
vars["id"] = cast(param)
vars["1"] = cast(param)
end
sql = sql.dup
vars.each do |k, v|
# only sub if in var list
sql.gsub!("{#{k}}", cast(v)) if missing_vars.delete(k)
end
raise Error, "Missing variables: #{missing_vars.uniq.join(", ")}" if missing_vars.any?
sql
end
# TODO quote vars in next major version
def cast(value)
value.to_s.gsub(/\A\"|\"\z/, '')
end
end
end
Filter tables before calculating notes
module PgSync
class TaskResolver
include Utils
attr_reader :args, :opts, :source, :destination, :config, :first_schema, :notes
def initialize(args:, opts:, source:, destination:, config:, first_schema:)
@args = args
@opts = opts
@source = source
@destination = destination
@config = config
@groups = config["groups"] || {}
@first_schema = first_schema
@notes = []
end
def tasks
tasks = []
# get lists from args
groups, tables = process_args
# expand groups into tasks
groups.each do |group|
tasks.concat(group_to_tasks(group))
end
# expand tables into tasks
tables.each do |table|
tasks.concat(table_to_tasks(table))
end
# get default if none given
if !opts[:groups] && !opts[:tables] && args.size == 0
tasks.concat(default_tasks)
end
# resolve any tables that need it
tasks.each do |task|
task[:table] = fully_resolve(task[:table])
end
tasks
end
def group?(group)
@groups.key?(group)
end
private
def group_to_tasks(value)
group, param = value.split(":", 2)
raise Error, "Group not found: #{group}" unless group?(group)
@groups[group].map do |table|
table_sql = nil
if table.is_a?(Array)
table, table_sql = table
end
{
table: to_table(table),
sql: expand_sql(table_sql, param)
}
end
end
def table_to_tasks(value)
raise Error, "Cannot use parameters with tables" if value.include?(":")
tables =
if value.include?("*")
regex = Regexp.new('\A' + Regexp.escape(value).gsub('\*','[^\.]*') + '\z')
shared_tables.select { |t| regex.match(t.full_name) || regex.match(t.name) }
else
[to_table(value)]
end
tables.map do |table|
{
table: table,
sql: sql_arg # doesn't support params
}
end
end
# treats identifiers as if they were quoted (Users == "Users")
# this is different from Postgres (Users == "users")
#
# TODO add support for quoted identifiers like "my.schema"."my.table"
# so it's possible to specify identifiers with "." in them
def to_table(value)
parts = value.split(".")
case parts.size
when 1
# unknown schema
Table.new(nil, parts[0])
when 2
Table.new(*parts)
else
raise Error, "Cannot resolve table: #{value}"
end
end
def default_tasks
shared_tables.map do |table|
{
table: table
}
end
end
# tables that exists in both source and destination
# used when no tables specified, or a wildcard
# removes excluded tables and filters by schema
def shared_tables
tables = filter_tables(source.tables)
unless opts[:schema_only] || opts[:schema_first]
from_tables = tables
to_tables = filter_tables(destination.tables)
extra_tables = to_tables - from_tables
notes << "Extra tables: #{extra_tables.map { |t| friendly_name(t) }.join(", ")}" if extra_tables.any?
missing_tables = from_tables - to_tables
notes << "Missing tables: #{missing_tables.map { |t| friendly_name(t) }.join(", ")}" if missing_tables.any?
tables &= to_tables
end
tables
end
def filter_tables(tables)
tables = tables.dup
# could support wildcard schemas as well
if opts[:schemas]
schemas = Set.new(to_arr(opts[:schemas]))
tables.select! { |t| schemas.include?(t.schema) }
end
to_arr(opts[:exclude]).each do |value|
if value.include?("*")
regex = Regexp.new('\A' + Regexp.escape(value).gsub('\*','[^\.]*') + '\z')
tables.reject! { |t| regex.match(t.full_name) || regex.match(t.name) }
else
tables -= [fully_resolve(to_table(value))]
end
end
tables
end
def process_args
groups = to_arr(opts[:groups])
tables = to_arr(opts[:tables])
if args[0]
# could be a group, table, or mix
to_arr(args[0]).each do |value|
if group?(value.split(":", 2)[0])
groups << value
else
tables << value
end
end
end
[groups, tables]
end
def no_schema_tables
@no_schema_tables ||= begin
search_path_index = source.search_path.map.with_index.to_h
source.tables.group_by(&:name).map do |group, t2|
[group, t2.select { |t| search_path_index[t.schema] }.sort_by { |t| search_path_index[t.schema] }.first]
end.to_h
end
end
# for tables without a schema, find the table in the search path
def fully_resolve(table)
return table if table.schema
no_schema_tables[table.name] || (raise Error, "Table not found in source: #{table.name}")
end
# parse command line arguments and YAML
def to_arr(value)
if value.is_a?(Array)
value
else
# Split by commas, but don't use commas inside double quotes
# https://stackoverflow.com/questions/21105360/regex-find-comma-not-inside-quotes
value.to_s.split(/(?!\B"[^"]*),(?![^"]*"\B)/)
end
end
def sql_arg
args[1]
end
def expand_sql(sql, param)
# command line option takes precedence over group option
sql = sql_arg if sql_arg
return unless sql
# vars must match \w
missing_vars = sql.scan(/{\w+}/).map { |v| v[1..-2] }
vars = {}
if param
vars["id"] = cast(param)
vars["1"] = cast(param)
end
sql = sql.dup
vars.each do |k, v|
# only sub if in var list
sql.gsub!("{#{k}}", cast(v)) if missing_vars.delete(k)
end
raise Error, "Missing variables: #{missing_vars.uniq.join(", ")}" if missing_vars.any?
sql
end
# TODO quote vars in next major version
def cast(value)
value.to_s.gsub(/\A\"|\"\z/, '')
end
end
end
|
# encoding: utf-8
require 'restclient'
require 'oj'
require 'nokogiri'
require 'phish_dot_net_client/version'
require 'phish_dot_net_client/set'
require 'phish_dot_net_client/setlist'
require 'phish_dot_net_client/song'
require 'phish_dot_net_client/song_transition'
# This module encapsulates interaction with the Phish.net API. It allows you to
# call any API method and will parse "setlistdata" fields in the JSON responses.
module PhishDotNetClient
extend self
# The possible API methods. Generated from +rake parse_method_docs+.
API_METHODS =
{
"pnet.api.authkey.get" => { :scope => "protected" },
"pnet.api.authorize" => { :scope => "protected" },
"pnet.api.authorized.check" => { :scope => "protected" },
"pnet.api.isAuthorized" => { :scope => "protected" },
"pnet.artists.get" => { :scope => "public" },
"pnet.blog.get" => { :scope => "public" },
"pnet.blog.item.get" => { :scope => "public" },
"pnet.collections.get" => { :scope => "protected" },
"pnet.collections.query" => { :scope => "protected" },
"pnet.forum.canpost" => { :scope => "protected" },
"pnet.forum.get" => { :scope => "public" },
"pnet.forum.thread.get" => { :scope => "protected" },
"pnet.forum.thread.new" => { :scope => "protected" },
"pnet.forum.thread.respond" => { :scope => "protected" },
"pnet.news.comments.get" => { :scope => "public" },
"pnet.news.get" => { :scope => "public" },
"pnet.reviews.query" => { :scope => "protected" },
"pnet.reviews.recent" => { :scope => "public" },
"pnet.shows.links.get" => { :scope => "protected" },
"pnet.shows.query" => { :scope => "protected" },
"pnet.shows.setlists.get" => { :scope => "protected" },
"pnet.shows.setlists.latest" => { :scope => "public" },
"pnet.shows.setlists.random" => { :scope => "public" },
"pnet.shows.setlists.recent" => { :scope => "public" },
"pnet.shows.setlists.tiph" => { :scope => "public" },
"pnet.shows.upcoming" => { :scope => "public" },
"pnet.user.get" => { :scope => "protected" },
"pnet.user.myshows.add" => { :scope => "protected" },
"pnet.user.myshows.get" => { :scope => "protected" },
"pnet.user.myshows.remove" => { :scope => "protected" },
"pnet.user.register" => { :scope => "protected" },
"pnet.user.shows.rate" => { :scope => "protected" },
"pnet.user.uid.get" => { :scope => "protected" },
"pnet.user.username.check" => { :scope => "protected" }
}
# The base URL for API calls
BASE_URL = "https://api.phish.net/api.js"
# "https://api.phish.net/api.js?api=2.0&method=pnet.shows.query&format=json&apikey=920FF765772E442F3E22&year=2011"
DEFAULT_PARAMS = { api: "2.0",
format: "json" }
# Set the apikey. The "private api key" from your Phish.net account should be
# used.
#
# @param private_api_key [String] the apikey
# @return [void]
def apikey=(private_api_key)
DEFAULT_PARAMS.merge!(:apikey => private_api_key)
end
# Calls pnet.api.authorize with the specified username and password, then stores
# the username and returned authkey if the call was successful. The password is
# not stored.
#
# @param username [String] the username
# @param password [String] the password
#
# @return [void]
def authorize(username, password)
resp = call_api_method("pnet.api.authorize", :username => username, :passwd => password)
if resp['success'] == 1 && resp.has_key?('authkey')
DEFAULT_PARAMS.merge!(:username => username, :authkey => resp['authkey'])
end
end
# Clears the stored API authentication parameters (apikey, username, authkey)
#
# @return [void]
def clear_auth
[:apikey, :username, :authkey].each { |key| DEFAULT_PARAMS.delete(key) }
end
# @api private
#
# Calls the specified Phish.net api method.
#
# @param api_method [String] the method to call
# @param params [Hash] the url parameters for the api call
#
# @raise [RuntimeError] if the http response status is a 2xx
#
# @return [Hash, Array] the parsed JSON of API response
def call_api_method(api_method, params={})
# method_data = API_METHODS[api_method]
# ensure_api_key if method_data[:scope] == "protected"
params.merge!(:method => api_method)
response = RestClient.get BASE_URL, { :params => DEFAULT_PARAMS.merge(params) }
if response.code < 200 || response.code > 299
raise "non 2xx reponse: status=#{response.code}"
end
parsed = Oj.load(response)
if parsed.is_a?(Array)
parsed.each do |obj|
obj["setlistdata"] = Setlist.new(obj["setlistdata"]) if obj.has_key?("setlistdata")
end
elsif parsed.is_a?(Hash)
parsed["setlistdata"] = Setlist.new(parsed["setlistdata"]) if parsed.has_key?("setlistdata")
end
return parsed
end
# Override method_missing to provide mapping of Ruby methods to API method names.
#
# @api private
def method_missing(name, *args)
api_method = get_api_method(name)
if api_method
call_api_method(api_method, *args)
else
super(name, *args)
end
end
# @api private
# @param rb_method_name [Symbol] the Ruby method name
# @return [String] the api method name
def get_api_method(rb_method_name)
api_method_name = rb_method_name.to_s.gsub("_", ".")
unless api_method_name.match(/\Apnet\./)
api_method_name = 'pnet.' + api_method_name
end
return api_method_name
# if API_METHODS.has_key?(api_method_name)
# return api_method_name
# else
# return nil
# end
end
# def ensure_api_key
# raise "api key is required" if DEFAULT_PARAMS[:apikey].nil?
# end
end
clean up comments
# encoding: utf-8
require 'restclient'
require 'oj'
require 'nokogiri'
require 'phish_dot_net_client/version'
require 'phish_dot_net_client/set'
require 'phish_dot_net_client/setlist'
require 'phish_dot_net_client/song'
require 'phish_dot_net_client/song_transition'
# This module encapsulates interaction with the Phish.net API. It allows you to
# call any API method and will parse "setlistdata" fields in the JSON responses.
module PhishDotNetClient
extend self
# The possible API methods. Generated from +rake parse_method_docs+.
API_METHODS =
{
"pnet.api.authkey.get" => { :scope => "protected" },
"pnet.api.authorize" => { :scope => "protected" },
"pnet.api.authorized.check" => { :scope => "protected" },
"pnet.api.isAuthorized" => { :scope => "protected" },
"pnet.artists.get" => { :scope => "public" },
"pnet.blog.get" => { :scope => "public" },
"pnet.blog.item.get" => { :scope => "public" },
"pnet.collections.get" => { :scope => "protected" },
"pnet.collections.query" => { :scope => "protected" },
"pnet.forum.canpost" => { :scope => "protected" },
"pnet.forum.get" => { :scope => "public" },
"pnet.forum.thread.get" => { :scope => "protected" },
"pnet.forum.thread.new" => { :scope => "protected" },
"pnet.forum.thread.respond" => { :scope => "protected" },
"pnet.news.comments.get" => { :scope => "public" },
"pnet.news.get" => { :scope => "public" },
"pnet.reviews.query" => { :scope => "protected" },
"pnet.reviews.recent" => { :scope => "public" },
"pnet.shows.links.get" => { :scope => "protected" },
"pnet.shows.query" => { :scope => "protected" },
"pnet.shows.setlists.get" => { :scope => "protected" },
"pnet.shows.setlists.latest" => { :scope => "public" },
"pnet.shows.setlists.random" => { :scope => "public" },
"pnet.shows.setlists.recent" => { :scope => "public" },
"pnet.shows.setlists.tiph" => { :scope => "public" },
"pnet.shows.upcoming" => { :scope => "public" },
"pnet.user.get" => { :scope => "protected" },
"pnet.user.myshows.add" => { :scope => "protected" },
"pnet.user.myshows.get" => { :scope => "protected" },
"pnet.user.myshows.remove" => { :scope => "protected" },
"pnet.user.register" => { :scope => "protected" },
"pnet.user.shows.rate" => { :scope => "protected" },
"pnet.user.uid.get" => { :scope => "protected" },
"pnet.user.username.check" => { :scope => "protected" }
}
# The base URL for API calls
BASE_URL = "https://api.phish.net/api.js"
# "https://api.phish.net/api.js?api=2.0&method=pnet.shows.query&format=json&apikey=XXX&year=2011"
# Default API parameters
DEFAULT_PARAMS = { api: "2.0",
format: "json" }
# Set the apikey. The "private api key" from your Phish.net account should be
# used.
#
# @param private_api_key [String] the apikey
# @return [void]
def apikey=(private_api_key)
DEFAULT_PARAMS.merge!(:apikey => private_api_key)
end
# Calls pnet.api.authorize with the specified username and password, then stores
# the username and returned authkey if the call was successful. The password is
# not stored.
#
# @param username [String] the username
# @param password [String] the password
#
# @return [void]
def authorize(username, password)
resp = call_api_method("pnet.api.authorize", :username => username, :passwd => password)
if resp['success'] == 1 && resp.has_key?('authkey')
DEFAULT_PARAMS.merge!(:username => username, :authkey => resp['authkey'])
end
end
# Clears the stored API authentication parameters (apikey, username, authkey)
#
# @return [void]
def clear_auth
[:apikey, :username, :authkey].each { |key| DEFAULT_PARAMS.delete(key) }
end
# @api private
#
# Calls the specified Phish.net api method.
#
# @param api_method [String] the method to call
# @param params [Hash] the url parameters for the api call
#
# @raise [RuntimeError] if the http response status is a 2xx
#
# @return [Hash, Array] the parsed JSON of API response
def call_api_method(api_method, params={})
# method_data = API_METHODS[api_method]
# ensure_api_key if method_data[:scope] == "protected"
params.merge!(:method => api_method)
response = RestClient.get BASE_URL, { :params => DEFAULT_PARAMS.merge(params) }
if response.code < 200 || response.code > 299
raise "non 2xx reponse: status=#{response.code}"
end
parsed = Oj.load(response)
if parsed.is_a?(Array)
parsed.each do |obj|
obj["setlistdata"] = Setlist.new(obj["setlistdata"]) if obj.has_key?("setlistdata")
end
elsif parsed.is_a?(Hash)
parsed["setlistdata"] = Setlist.new(parsed["setlistdata"]) if parsed.has_key?("setlistdata")
end
return parsed
end
# Override method_missing to provide mapping of Ruby methods to API method names.
#
# @api private
def method_missing(name, *args)
api_method = get_api_method(name)
if api_method
call_api_method(api_method, *args)
else
super(name, *args)
end
end
# @api private
# @param rb_method_name [Symbol] the Ruby method name
# @return [String] the api method name
def get_api_method(rb_method_name)
api_method_name = rb_method_name.to_s.gsub("_", ".")
unless api_method_name.match(/\Apnet\./)
api_method_name = 'pnet.' + api_method_name
end
return api_method_name
# if API_METHODS.has_key?(api_method_name)
# return api_method_name
# else
# return nil
# end
end
# def ensure_api_key
# raise "api key is required" if DEFAULT_PARAMS[:apikey].nil?
# end
end
|
require "yaml"
require "hashie"
require "diffy"
require "pathname"
module PipeFitter
class Pipeline
Diffy::Diff.default_options.merge!(diff: "-u", include_diff_info: true)
def self.create(definition_from_api, description_from_api)
new(PipelineObjects.create(definition_from_api[:pipeline_objects]),
ParameterObjects.create(definition_from_api[:parameter_objects]),
ParameterValues.create(definition_from_api[:parameter_values]),
PipelineDescription.create(description_from_api))
end
def self.load_yaml(filename)
filepath = Pathname.new(filename)
yml = YamlLoader.new.load(filepath)
new(PipelineObjects.new(yml["pipeline_objects"]),
ParameterObjects.new(yml["parameter_objects"]),
ParameterValues.new(yml["parameter_values"]),
PipelineDescription.new(yml["pipeline_description"]))
end
def initialize(pipeline_objects = nil, parameter_objects = nil,
parameter_values = nil, pipeline_description = nil)
@pipeline_objects = pipeline_objects
@parameter_objects = parameter_objects
@parameter_values = parameter_values
@pipeline_description = pipeline_description
end
def tags
@pipeline_description.tags
end
def to_yaml
{
"pipeline_description" => @pipeline_description.to_objs,
"pipeline_objects" => @pipeline_objects.to_objs,
"parameter_objects" => @parameter_objects.to_objs,
"parameter_values" => @parameter_values.to_objs,
}.to_yaml
end
def create_opts
@pipeline_description.to_api_opts
end
def put_definition_opts(pipeline_id)
{
pipeline_id: pipeline_id,
pipeline_objects: @pipeline_objects.to_api_opts,
parameter_objects: @parameter_objects.to_api_opts,
parameter_values: @parameter_values.to_api_opts,
}
end
def add_tags_opts(pipeline_id)
{ pipeline_id: pipeline_id, tags: @pipeline_description.tags_opts }
end
def remove_tags_opts(pipeline_id)
{ pipeline_id: pipeline_id, tag_keys: @pipeline_description.tag_keys }
end
def activate_opts(pipeline_id, start_timestamp)
opts = {
pipeline_id: pipeline_id,
start_timestamp: start_timestamp,
}
opts[:parameter_values] = @parameter_values.to_api_opts if @parameter_values
opts
end
def diff(other, format = nil)
Diffy::Diff.new(self.to_yaml, other.to_yaml).to_s(format)
end
class PipelineBaseObjects
def initialize(objs)
@objs = case objs
when Array then objs.map { |obj| symbolize_keys(obj) }
else symbolize_keys(objs) || {}
end
end
def to_objs
case @objs
when Array then @objs.map { |obj| stringify_keys(obj) }
else stringify_keys(@objs)
end
end
private
def stringify_keys(val)
modify_keys_recursively(val, __method__)
end
def symbolize_keys(val)
modify_keys_recursively(val, __method__)
end
def modify_keys_recursively(val, method)
return val unless val.is_a?(Hash)
h = Hashie.send(method, val.to_h)
h.each do |k, v|
case v
when Array then h[k].map! { |e| self.send(method, e) }
when Hash then h[k] = self.send(method, v)
end
end
h
end
private_class_method def self.update_hash(base, key, value)
if base.key?(key)
base[key] = [base[key]] unless base[key].is_a?(Array)
base[key] << value
else
base[key] = value
end
base
end
def split_object(obj, skip_keys)
res = []
obj.each do |k, v|
next if skip_keys.include?(k)
(v.is_a?(Array) ? v : [v]).each do |vv|
if vv.is_a?(Hash) && vv.key?(:ref)
res << { key: k, ref_value: vv[:ref] }
else
res << { key: k, string_value: vv }
end
end
end
res
end
end
class PipelineObjects < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).map(&:to_h).sort_by { |obj| obj[:id] }.map do |obj|
base = { id: obj[:id], name: obj[:name] }
fields = obj[:fields].inject({}) do |a, e|
update_hash(a, e[:key].to_sym, e[:string_value] || { ref: e[:ref_value] })
end
base.merge(fields.sort_by { |k, _| k }.to_h)
end
new(objs)
end
def to_api_opts
@objs.map do |obj|
{ id: obj[:id], name: obj[:name], fields: split_object(obj, %i(id name)) }
end
end
end
class ParameterObjects < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).map(&:to_h).sort_by { |obj| obj[:id] }.map do |obj|
base = { id: obj[:id] }
obj[:attributes].sort_by { |a| a[:key] }.inject(base) do |a, e|
update_hash(a, e[:key].to_sym, e[:string_value])
end
end
new(objs)
end
def to_api_opts
@objs.map do |obj|
{ id: obj[:id], attributes: split_object(obj, %i(id)) }
end
end
end
class ParameterValues < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).sort_by { |obj| [obj[:id], obj[:string_value]] }.map do |obj|
{ obj[:id].to_sym => obj[:string_value] }
end
new(objs)
end
def to_api_opts
@objs.map do |e|
e.map do |k, v|
{ id: k, string_value: v }
end
end.flatten
end
end
class PipelineDescription < PipelineBaseObjects
def self.create(api_res)
objs = {
pipeline_id: api_res[:pipeline_id],
name: api_res[:name],
description: api_res[:description],
tags: (api_res[:tags] || []).map { |e| { e[:key].to_sym => e[:value] } },
}
(api_res[:fields] || []).inject(objs) do |a, e|
a.update(e[:key].to_sym => (e[:string_value] || { ref: e[:ref_value] } ))
end
new(objs)
end
DESCRIPTION_KEYS = %i(name description tags uniqueId).freeze
def to_objs
stringify_keys(@objs.select { |k, _| DESCRIPTION_KEYS.include?(k) })
end
def to_api_opts
@objs.select { |k, _| DESCRIPTION_KEYS.include?(k) }.tap do |obj|
obj[:unique_id] = obj.delete(:uniqueId)
obj[:tags] = obj[:tags].map do |tag|
tag.map { |k, v| { key: k, value: v } }
end.flatten
end
end
def tags
@objs[:tags]
end
def tags_opts
@objs[:tags].map { |e| e.map { |k, v| { key: k, value: v } } }.flatten
end
def tag_keys
@objs[:tags].map(&keys).flatten
end
end
end
end
fix bug about updating tags
require "yaml"
require "hashie"
require "diffy"
require "pathname"
module PipeFitter
class Pipeline
Diffy::Diff.default_options.merge!(diff: "-u", include_diff_info: true)
def self.create(definition_from_api, description_from_api)
new(PipelineObjects.create(definition_from_api[:pipeline_objects]),
ParameterObjects.create(definition_from_api[:parameter_objects]),
ParameterValues.create(definition_from_api[:parameter_values]),
PipelineDescription.create(description_from_api))
end
def self.load_yaml(filename)
filepath = Pathname.new(filename)
yml = YamlLoader.new.load(filepath)
new(PipelineObjects.new(yml["pipeline_objects"]),
ParameterObjects.new(yml["parameter_objects"]),
ParameterValues.new(yml["parameter_values"]),
PipelineDescription.new(yml["pipeline_description"]))
end
def initialize(pipeline_objects = nil, parameter_objects = nil,
parameter_values = nil, pipeline_description = nil)
@pipeline_objects = pipeline_objects
@parameter_objects = parameter_objects
@parameter_values = parameter_values
@pipeline_description = pipeline_description
end
def tags
@pipeline_description.tags
end
def to_yaml
{
"pipeline_description" => @pipeline_description.to_objs,
"pipeline_objects" => @pipeline_objects.to_objs,
"parameter_objects" => @parameter_objects.to_objs,
"parameter_values" => @parameter_values.to_objs,
}.to_yaml
end
def create_opts
@pipeline_description.to_api_opts
end
def put_definition_opts(pipeline_id)
{
pipeline_id: pipeline_id,
pipeline_objects: @pipeline_objects.to_api_opts,
parameter_objects: @parameter_objects.to_api_opts,
parameter_values: @parameter_values.to_api_opts,
}
end
def add_tags_opts(pipeline_id)
{ pipeline_id: pipeline_id, tags: @pipeline_description.tags_opts }
end
def remove_tags_opts(pipeline_id)
{ pipeline_id: pipeline_id, tag_keys: @pipeline_description.tag_keys }
end
def activate_opts(pipeline_id, start_timestamp)
opts = {
pipeline_id: pipeline_id,
start_timestamp: start_timestamp,
}
opts[:parameter_values] = @parameter_values.to_api_opts if @parameter_values
opts
end
def diff(other, format = nil)
Diffy::Diff.new(self.to_yaml, other.to_yaml).to_s(format)
end
class PipelineBaseObjects
def initialize(objs)
@objs = case objs
when Array then objs.map { |obj| symbolize_keys(obj) }
else symbolize_keys(objs) || {}
end
end
def to_objs
case @objs
when Array then @objs.map { |obj| stringify_keys(obj) }
else stringify_keys(@objs)
end
end
private
def stringify_keys(val)
modify_keys_recursively(val, __method__)
end
def symbolize_keys(val)
modify_keys_recursively(val, __method__)
end
def modify_keys_recursively(val, method)
return val unless val.is_a?(Hash)
h = Hashie.send(method, val.to_h)
h.each do |k, v|
case v
when Array then h[k].map! { |e| self.send(method, e) }
when Hash then h[k] = self.send(method, v)
end
end
h
end
private_class_method def self.update_hash(base, key, value)
if base.key?(key)
base[key] = [base[key]] unless base[key].is_a?(Array)
base[key] << value
else
base[key] = value
end
base
end
def split_object(obj, skip_keys)
res = []
obj.each do |k, v|
next if skip_keys.include?(k)
(v.is_a?(Array) ? v : [v]).each do |vv|
if vv.is_a?(Hash) && vv.key?(:ref)
res << { key: k, ref_value: vv[:ref] }
else
res << { key: k, string_value: vv }
end
end
end
res
end
end
class PipelineObjects < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).map(&:to_h).sort_by { |obj| obj[:id] }.map do |obj|
base = { id: obj[:id], name: obj[:name] }
fields = obj[:fields].inject({}) do |a, e|
update_hash(a, e[:key].to_sym, e[:string_value] || { ref: e[:ref_value] })
end
base.merge(fields.sort_by { |k, _| k }.to_h)
end
new(objs)
end
def to_api_opts
@objs.map do |obj|
{ id: obj[:id], name: obj[:name], fields: split_object(obj, %i(id name)) }
end
end
end
class ParameterObjects < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).map(&:to_h).sort_by { |obj| obj[:id] }.map do |obj|
base = { id: obj[:id] }
obj[:attributes].sort_by { |a| a[:key] }.inject(base) do |a, e|
update_hash(a, e[:key].to_sym, e[:string_value])
end
end
new(objs)
end
def to_api_opts
@objs.map do |obj|
{ id: obj[:id], attributes: split_object(obj, %i(id)) }
end
end
end
class ParameterValues < PipelineBaseObjects
def self.create(api_res)
objs = (api_res || []).sort_by { |obj| [obj[:id], obj[:string_value]] }.map do |obj|
{ obj[:id].to_sym => obj[:string_value] }
end
new(objs)
end
def to_api_opts
@objs.map do |e|
e.map do |k, v|
{ id: k, string_value: v }
end
end.flatten
end
end
class PipelineDescription < PipelineBaseObjects
def self.create(api_res)
objs = {
pipeline_id: api_res[:pipeline_id],
name: api_res[:name],
description: api_res[:description],
tags: (api_res[:tags] || []).map { |e| { e[:key].to_sym => e[:value] } },
}
(api_res[:fields] || []).inject(objs) do |a, e|
a.update(e[:key].to_sym => (e[:string_value] || { ref: e[:ref_value] } ))
end
new(objs)
end
DESCRIPTION_KEYS = %i(name description tags uniqueId).freeze
def to_objs
stringify_keys(@objs.select { |k, _| DESCRIPTION_KEYS.include?(k) })
end
def to_api_opts
@objs.select { |k, _| DESCRIPTION_KEYS.include?(k) }.tap do |obj|
obj[:unique_id] = obj.delete(:uniqueId)
obj[:tags] = obj[:tags].map do |tag|
tag.map { |k, v| { key: k, value: v } }
end.flatten
end
end
def tags
@objs[:tags]
end
def tags_opts
@objs[:tags].map { |e| e.map { |k, v| { key: k, value: v } } }.flatten
end
def tag_keys
@objs[:tags].map(&:keys).flatten
end
end
end
end
|
module PodfileInfo
VERSION = '0.0.1'
end
Update to version 0.0.2
module PodfileInfo
VERSION = '0.0.2'
end
|
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'kitchen'
require 'kitchen-sync'
module PoiseBoiler
# Helpers for Test-Kitchen and .kitchen.yml configuration.
#
# @since 1.0.0
module Kitchen
extend self
# Shorthand names for kitchen platforms.
#
# @see PoiseBoiler::Kitchen.kitchen
PLATFORM_ALIASES = {
'ubuntu' => %w{ubuntu-12.04 ubuntu-14.04},
'rhel' => %w{centos-6 centos-7},
'centos' => %w{rhel},
'linux' => %w{ubuntu rhel},
'unix' => %w{linux freebsd},
'all' => %{unix windows},
}
# Return a YAML string suitable for inclusion in a .kitchen.yml config. This
# will include the standard Poise/Halite boilerplate and some default values.
#
# @param platforms [String, Array<String>] Name(s) of platforms to use by default.
# @see PoiseBoiler::Kitchen::PLATFORM_ALIASES
# @example .kitchen.yml
# #<% require 'poise_boiler' %>
# <%= PoiseBoiler.kitchen %>
def kitchen(platforms: 'ubuntu-14.04', root: nil)
# Figure out the directory that contains the kitchen.yml.
root ||= if caller.find {|line| !line.start_with?(File.expand_path('../..', __FILE__)) } =~ /^(.*?):\d+:in/
File.expand_path('..', $1)
else
# ¯\_(ツ)_/¯
Dir.pwd
end
# SPEC_BLOCK_CI is used to force non-locking behavior inside tests.
chef_version = ENV['CHEF_VERSION'] || if ENV['SPEC_BLOCK_CI'] != 'true'
# If there isn't a specific override, lock TK to use the same version of Chef as the Gemfile.
require 'chef/version'
Chef::VERSION
end
install_arguments = if ENV['POISE_MASTER_BUILD']
# Force it to use any version down below.
chef_version = nil
# Use today's date as an ignored param to force the layer to rebuild.
" -n -- #{Date.today}"
elsif chef_version
" -v #{chef_version}"
else
''
end
docker_enabled = File.exist?(File.expand_path('test/docker/docker.key', root))
{
'chef_versions' => %w{12},
'driver' => {
'name' => (docker_enabled ? 'docker' : ENV['TRAVIS'] == 'true' ? 'dummy' : 'vagrant'),
'require_chef_omnibus' => chef_version || true,
'dockerfile' => File.expand_path('../kitchen/Dockerfile.erb', __FILE__),
# No password for securiteeeee.
'password' => nil,
# Our docker settings.
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker',
'socket' => 'tcp://docker.poise.io:443',
'tls_verify' => 'true',
'tls_cacert' => 'test/docker/docker.ca',
'tls_cert' => 'test/docker/docker.pem',
'tls_key' => 'test/docker/docker.key',
# Cache some stuff in the Docker image.
'provision_command' => [
# Run some installs at provision so they are cached in the image.
# Install net-tools for netstat which is used by serverspec, and
# iproute for ss (only used on EL7).
"test ! -f /etc/debian_version || apt-get install -y net-tools",
"test ! -f /etc/redhat-release || yum -y install net-tools iproute",
# Make sure the hostname utilitiy is installed on CentOS 7. The
# ||true is for EL6 which has no hostname package. Sigh.
"test ! -f /etc/redhat-release || yum -y install hostname || true",
# Install Chef (with the correct verison).
"curl -L https://chef.io/chef/install.sh | bash -s --#{install_arguments}",
# Install some kitchen-related gems. Normally installed during the verify step but that is idempotent.
"env GEM_HOME=/tmp/verifier/gems GEM_PATH=/tmp/verifier/gems GEM_CACHE=/tmp/verifier/gems/cache /opt/chef/embedded/bin/gem install --no-rdoc --no-ri thor busser busser-serverspec serverspec bundler",
# Fix directory permissions.
"chown -R kitchen /tmp/verifier",
],
},
'transport' => {
'name' => 'sftp',
'ssh_key' => docker_enabled ? File.expand_path('.kitchen/docker_id_rsa', root) : nil,
},
'platforms' => expand_kitchen_platforms(platforms).map {|p| platform_definition(p) },
}.to_yaml.gsub(/---[ \n]/, '')
end
private
# Expand aliases from PLATFORM_ALIASES.
def expand_kitchen_platforms(platforms)
platforms = Array(platforms)
last_platforms = []
while platforms != last_platforms
last_platforms = platforms
platforms = platforms.map {|p| PLATFORM_ALIASES[p] || p}.flatten.uniq
end
platforms
end
def platform_definition(name)
{
'name' => name,
'run_list' => platform_run_list(name),
'driver_config' => platform_driver(name),
}
end
# Return the platform-level run list for a given platform.
#
# @param platform [String] Platform name.
# @return [Array<String>]
def platform_run_list(platform)
if platform.start_with?('debian') || platform.start_with?('ubuntu')
%w{apt}
else
[]
end
end
def platform_driver(platform)
if platform.start_with?('freebsd')
{
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker-1.7.1',
'image' => 'lexaguskov/freebsd',
'socket' => ENV['POISE_DOCKER_FREEBSD'] || 'tcp://dockerbsd.poise.io:443',
}
else
{
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker',
'socket' => ENV['POISE_DOCKER_LINUX'] || 'tcp://docker.poise.io:443',
}
end
end
end
end
Proxy poise_debug settings in to the VM so I can get vomitdebug more easily.
#
# Copyright 2015, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'kitchen'
require 'kitchen-sync'
module PoiseBoiler
# Helpers for Test-Kitchen and .kitchen.yml configuration.
#
# @since 1.0.0
module Kitchen
extend self
# Shorthand names for kitchen platforms.
#
# @see PoiseBoiler::Kitchen.kitchen
PLATFORM_ALIASES = {
'ubuntu' => %w{ubuntu-12.04 ubuntu-14.04},
'rhel' => %w{centos-6 centos-7},
'centos' => %w{rhel},
'linux' => %w{ubuntu rhel},
'unix' => %w{linux freebsd},
'all' => %{unix windows},
}
# Return a YAML string suitable for inclusion in a .kitchen.yml config. This
# will include the standard Poise/Halite boilerplate and some default values.
#
# @param platforms [String, Array<String>] Name(s) of platforms to use by default.
# @see PoiseBoiler::Kitchen::PLATFORM_ALIASES
# @example .kitchen.yml
# #<% require 'poise_boiler' %>
# <%= PoiseBoiler.kitchen %>
def kitchen(platforms: 'ubuntu-14.04', root: nil)
# Figure out the directory that contains the kitchen.yml.
root ||= if caller.find {|line| !line.start_with?(File.expand_path('../..', __FILE__)) } =~ /^(.*?):\d+:in/
File.expand_path('..', $1)
else
# ¯\_(ツ)_/¯
Dir.pwd
end
# SPEC_BLOCK_CI is used to force non-locking behavior inside tests.
chef_version = ENV['CHEF_VERSION'] || if ENV['SPEC_BLOCK_CI'] != 'true'
# If there isn't a specific override, lock TK to use the same version of Chef as the Gemfile.
require 'chef/version'
Chef::VERSION
end
install_arguments = if ENV['POISE_MASTER_BUILD']
# Force it to use any version down below.
chef_version = nil
# Use today's date as an ignored param to force the layer to rebuild.
" -n -- #{Date.today}"
elsif chef_version
" -v #{chef_version}"
else
''
end
docker_enabled = File.exist?(File.expand_path('test/docker/docker.key', root))
{
'chef_versions' => %w{12},
'driver' => {
'name' => (docker_enabled ? 'docker' : ENV['TRAVIS'] == 'true' ? 'dummy' : 'vagrant'),
'require_chef_omnibus' => chef_version || true,
'dockerfile' => File.expand_path('../kitchen/Dockerfile.erb', __FILE__),
# No password for securiteeeee.
'password' => nil,
# Our docker settings.
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker',
'socket' => 'tcp://docker.poise.io:443',
'tls_verify' => 'true',
'tls_cacert' => 'test/docker/docker.ca',
'tls_cert' => 'test/docker/docker.pem',
'tls_key' => 'test/docker/docker.key',
# Cache some stuff in the Docker image.
'provision_command' => [
# Run some installs at provision so they are cached in the image.
# Install net-tools for netstat which is used by serverspec, and
# iproute for ss (only used on EL7).
"test ! -f /etc/debian_version || apt-get install -y net-tools",
"test ! -f /etc/redhat-release || yum -y install net-tools iproute",
# Make sure the hostname utilitiy is installed on CentOS 7. The
# ||true is for EL6 which has no hostname package. Sigh.
"test ! -f /etc/redhat-release || yum -y install hostname || true",
# Install Chef (with the correct verison).
"curl -L https://chef.io/chef/install.sh | bash -s --#{install_arguments}",
# Install some kitchen-related gems. Normally installed during the verify step but that is idempotent.
"env GEM_HOME=/tmp/verifier/gems GEM_PATH=/tmp/verifier/gems GEM_CACHE=/tmp/verifier/gems/cache /opt/chef/embedded/bin/gem install --no-rdoc --no-ri thor busser busser-serverspec serverspec bundler",
# Fix directory permissions.
"chown -R kitchen /tmp/verifier",
],
},
'transport' => {
'name' => 'sftp',
'ssh_key' => docker_enabled ? File.expand_path('.kitchen/docker_id_rsa', root) : nil,
},
'provisioner' => {
'attributes' => {
'POISE_DEBUG' => !!((ENV['POISE_DEBUG'] && ENV['POISE_DEBUG'] != 'false') ||
(ENV['poise_debug'] && ENV['poise_debug'] != 'false') ||
(ENV['DEBUG'] && ENV['DEBUG'] != 'false')
),
},
},
'platforms' => expand_kitchen_platforms(platforms).map {|p| platform_definition(p) },
}.to_yaml.gsub(/---[ \n]/, '')
end
private
# Expand aliases from PLATFORM_ALIASES.
def expand_kitchen_platforms(platforms)
platforms = Array(platforms)
last_platforms = []
while platforms != last_platforms
last_platforms = platforms
platforms = platforms.map {|p| PLATFORM_ALIASES[p] || p}.flatten.uniq
end
platforms
end
def platform_definition(name)
{
'name' => name,
'run_list' => platform_run_list(name),
'driver_config' => platform_driver(name),
}
end
# Return the platform-level run list for a given platform.
#
# @param platform [String] Platform name.
# @return [Array<String>]
def platform_run_list(platform)
if platform.start_with?('debian') || platform.start_with?('ubuntu')
%w{apt}
else
[]
end
end
def platform_driver(platform)
if platform.start_with?('freebsd')
{
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker-1.7.1',
'image' => 'lexaguskov/freebsd',
'socket' => ENV['POISE_DOCKER_FREEBSD'] || 'tcp://dockerbsd.poise.io:443',
}
else
{
'binary' => (ENV['TRAVIS'] == 'true' ? './' : '') + 'docker',
'socket' => ENV['POISE_DOCKER_LINUX'] || 'tcp://docker.poise.io:443',
}
end
end
end
end
|
module Posterous
class Connection
include Typhoeus
remote_defaults :base_uri => Posterous.base_uri,
:on_failure => lambda {|response| raise "Request failed with code: #{response.code}" },
:headers => {
:Authorization => "Basic #{Posterous.encoded_credentials}"
}
# Some twisted injects to conform the ugly
# XML into pretty little objects.
def self.parse_nodes response
class_name = self.to_s.split("::").last
site_nodes = Nokogiri::HTML(response.body).css(class_name.downcase)
site_nodes.inject([]) do |site_array,site_node|
site_array << site_node.children.inject(self.new) do |site,c|
returning site do
site.send "#{c.name.to_sym}=", c.text if site.respond_to? "#{c.name.to_sym}="
end
end
end
end
end
end
refactoring
module Posterous
class Connection
include Typhoeus
remote_defaults :base_uri => Posterous.base_uri,
:on_failure => lambda {|response| raise "Request failed with code: #{response.code}" },
:headers => {
:Authorization => "Basic #{Posterous.encoded_credentials}"
}
# Some twisted injects to conform the ugly
# XML into pretty little objects.
def self.parse_nodes response
self.nodes_for(response).inject([]) do |site_array,site_node|
site_array << site_node.children.inject(self.new) do |site,c|
returning site do
site.send "#{c.name.to_sym}=", c.text if site.respond_to? "#{c.name.to_sym}="
end
end
end
end
def self.nodes_for response
Nokogiri::HTML(response.body).css(self.class_name.downcase)
end
def self.class_name
@class_name ||= self.to_s.split("::").last
end
end
end |
require "postgres_to_redshift/version"
require 'pg'
require 'uri'
require 'aws-sdk'
require 'zlib'
require 'stringio'
require "postgres_to_redshift/table"
require "postgres_to_redshift/column"
require 'dotenv'
Dotenv.load
class PostgresToRedshift
CONFIG = {
s3_bucket: ENV['S3_DATABASE_EXPORT_BUCKET'],
s3_key: ENV['S3_DATABASE_EXPORT_KEY'],
s3_id: ENV['S3_DATABASE_EXPORT_ID'],
schema: ENV['TARGET_SCHEMA'],
source_uri: ENV['POSTGRES_TO_REDSHIFT_SOURCE_URI'],
tables_to_export: ENV['TABLES_TO_EXPORT'],
target_uri: ENV['POSTGRES_TO_REDSHIFT_TARGET_URI'],
}
def initialize
end
def run
target_connection.exec("CREATE SCHEMA IF NOT EXISTS #{target_schema}")
tables.each do |table|
target_connection.exec("CREATE TABLE IF NOT EXISTS #{target_schema}.#{table.target_table_name} (#{table.columns_for_create})")
export_table(table)
import_table(table)
end
end
private
def target_schema
@target_schema ||= CONFIG[:schema] || 'public'
end
def source_connection
if @source_connection.nil?
@source_connection ||= PG::Connection.new(source_connection_params)
@source_connection.exec('SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY;')
end
@source_connection
end
def target_connection
@target_connection ||= PG::Connection.new(target_connection_params)
end
def source_connection_params
@source_uri ||= URI.parse(CONFIG[:source_uri])
uri_to_params(@source_uri)
end
def target_connection_params
@target_uri ||= URI.parse(CONFIG[:target_uri])
uri_to_params(@target_uri)
end
def uri_to_params(uri)
{
host: uri.host,
port: uri.port,
user: uri.user,
password: uri.password,
dbname: uri.path[1..-1]
}
end
def export_table?(table)
@tables_to_export ||= CONFIG[:tables_to_export].nil? ? [] : CONFIG[:tables_to_export].split(',')
return false if table.name =~ /^pg_/
@tables_to_export.empty? || @tables_to_export.include?(table.name)
end
def tables
source_connection.exec("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_type in ('BASE TABLE', 'VIEW')").map do |table_attributes|
table = Table.new(attributes: table_attributes)
next unless export_table?(table)
table.columns = column_definitions(table)
table
end.compact
end
def column_definitions(table)
source_connection.exec("SELECT * FROM information_schema.columns WHERE table_schema='public' AND table_name='#{table.name}' order by ordinal_position")
end
def s3
@s3 ||= AWS::S3.new(access_key_id: CONFIG[:s3_id], secret_access_key: CONFIG[:s3_key])
end
def bucket
@bucket ||= s3.buckets[CONFIG[:s3_bucket]]
end
def export_table(table)
buffer = StringIO.new
zip = Zlib::GzipWriter.new(buffer)
puts "Downloading #{table}"
copy_command = "COPY (SELECT #{table.columns_for_copy} FROM #{table.name}) TO STDOUT WITH DELIMITER '|'"
source_connection.copy_data(copy_command) do
while row = source_connection.get_copy_data
zip.write(row)
end
end
zip.finish
buffer.rewind
upload_table(table, buffer)
end
def upload_table(table, buffer)
puts "Uploading #{table.target_table_name}"
bucket.objects["export/#{table.target_table_name}.psv.gz"].delete
bucket.objects["export/#{table.target_table_name}.psv.gz"].write(buffer, acl: :authenticated_read)
end
def import_table(table)
puts "Importing #{table.target_table_name}"
target_connection.exec("DROP TABLE IF EXISTS #{target_schema}.#{table.target_temp_table_name}")
target_connection.exec("CREATE TABLE #{target_schema}.#{table.target_temp_table_name} (#{table.columns_for_create})")
target_connection.exec("COPY #{target_schema}.#{table.target_temp_table_name} FROM 's3://#{CONFIG[:s3_bucket]}/export/#{table.target_table_name}.psv.gz' CREDENTIALS 'aws_access_key_id=#{ENV['S3_DATABASE_EXPORT_ID']};aws_secret_access_key=#{ENV['S3_DATABASE_EXPORT_KEY']}' GZIP TRUNCATECOLUMNS ESCAPE DELIMITER as '|';")
target_connection.exec("BEGIN;")
target_connection.exec("DROP TABLE IF EXISTS #{target_schema}.#{table.target_table_name}")
target_connection.exec("ALTER TABLE #{target_schema}.#{table.target_temp_table_name} RENAME TO #{table.target_table_name}")
target_connection.exec("COMMIT;")
end
end
Allow passing of params
require "postgres_to_redshift/version"
require 'pg'
require 'uri'
require 'aws-sdk'
require 'zlib'
require 'stringio'
require "postgres_to_redshift/table"
require "postgres_to_redshift/column"
require 'dotenv'
Dotenv.load
class PostgresToRedshift
attr_reader :config
ENV_VARIABLES = {
s3_bucket: ENV['S3_DATABASE_EXPORT_BUCKET'],
s3_key: ENV['S3_DATABASE_EXPORT_KEY'],
s3_id: ENV['S3_DATABASE_EXPORT_ID'],
schema: ENV['TARGET_SCHEMA'],
source_uri: ENV['POSTGRES_TO_REDSHIFT_SOURCE_URI'],
tables_to_export: ENV['TABLES_TO_EXPORT'],
target_uri: ENV['POSTGRES_TO_REDSHIFT_TARGET_URI'],
}
def initialize(params = {})
@config = ENV_VARIABLES.merge(params)
end
def run
target_connection.exec("CREATE SCHEMA IF NOT EXISTS #{target_schema}")
tables.each do |table|
target_connection.exec("CREATE TABLE IF NOT EXISTS #{target_schema}.#{table.target_table_name} (#{table.columns_for_create})")
export_table(table)
import_table(table)
end
end
private
def target_schema
@target_schema ||= config[:schema] || 'public'
end
def source_connection
if @source_connection.nil?
@source_connection ||= PG::Connection.new(source_connection_params)
@source_connection.exec('SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY;')
end
@source_connection
end
def target_connection
@target_connection ||= PG::Connection.new(target_connection_params)
end
def source_connection_params
@source_uri ||= URI.parse(config[:source_uri])
uri_to_params(@source_uri)
end
def target_connection_params
@target_uri ||= URI.parse(config[:target_uri])
uri_to_params(@target_uri)
end
def uri_to_params(uri)
{
host: uri.host,
port: uri.port,
user: uri.user,
password: uri.password,
dbname: uri.path[1..-1]
}
end
def export_table?(table)
@tables_to_export ||= config[:tables_to_export].nil? ? [] : config[:tables_to_export].split(',')
return false if table.name =~ /^pg_/
@tables_to_export.empty? || @tables_to_export.include?(table.name)
end
def tables
source_connection.exec("SELECT * FROM information_schema.tables WHERE table_schema = 'public' AND table_type in ('BASE TABLE', 'VIEW')").map do |table_attributes|
table = Table.new(attributes: table_attributes)
next unless export_table?(table)
table.columns = column_definitions(table)
table
end.compact
end
def column_definitions(table)
source_connection.exec("SELECT * FROM information_schema.columns WHERE table_schema='public' AND table_name='#{table.name}' order by ordinal_position")
end
def s3
@s3 ||= AWS::S3.new(access_key_id: config[:s3_id], secret_access_key: config[:s3_key])
end
def bucket
@bucket ||= s3.buckets[config[:s3_bucket]]
end
def export_table(table)
buffer = StringIO.new
zip = Zlib::GzipWriter.new(buffer)
puts "Downloading #{table}"
copy_command = "COPY (SELECT #{table.columns_for_copy} FROM #{table.name}) TO STDOUT WITH DELIMITER '|'"
source_connection.copy_data(copy_command) do
while row = source_connection.get_copy_data
zip.write(row)
end
end
zip.finish
buffer.rewind
upload_table(table, buffer)
end
def upload_table(table, buffer)
puts "Uploading #{table.target_table_name}"
bucket.objects["export/#{table.target_table_name}.psv.gz"].delete
bucket.objects["export/#{table.target_table_name}.psv.gz"].write(buffer, acl: :authenticated_read)
end
def import_table(table)
puts "Importing #{table.target_table_name}"
target_connection.exec("DROP TABLE IF EXISTS #{target_schema}.#{table.target_temp_table_name}")
target_connection.exec("CREATE TABLE #{target_schema}.#{table.target_temp_table_name} (#{table.columns_for_create})")
target_connection.exec("COPY #{target_schema}.#{table.target_temp_table_name} FROM 's3://#{config[:s3_bucket]}/export/#{table.target_table_name}.psv.gz' CREDENTIALS 'aws_access_key_id=#{ENV['S3_DATABASE_EXPORT_ID']};aws_secret_access_key=#{ENV['S3_DATABASE_EXPORT_KEY']}' GZIP TRUNCATECOLUMNS ESCAPE DELIMITER as '|';")
target_connection.exec("BEGIN;")
target_connection.exec("DROP TABLE IF EXISTS #{target_schema}.#{table.target_table_name}")
target_connection.exec("ALTER TABLE #{target_schema}.#{table.target_temp_table_name} RENAME TO #{table.target_table_name}")
target_connection.exec("COMMIT;")
end
end
|
require 'protobuf/logger'
require 'protobuf/rpc/client'
require 'protobuf/rpc/error'
require 'protobuf/rpc/service_filters'
module Protobuf
module Rpc
# Object to encapsulate the request/response types for a given service method
#
RpcMethod = Struct.new("RpcMethod", :method, :request_type, :response_type)
class Service
include ::Protobuf::Rpc::ServiceFilters
include ::Protobuf::Logger::LogMethods
DEFAULT_HOST = '127.0.0.1'.freeze
DEFAULT_PORT = 9399
##
# Class Methods
#
# Create a new client for the given service.
# See Client#initialize and ClientConnection::DEFAULT_OPTIONS
# for all available options.
#
def self.client(options = {})
::Protobuf::Rpc::Client.new({ :service => self,
:host => host,
:port => port }.merge(options))
end
# Allows service-level configuration of location.
# Useful for system-startup configuration of a service
# so that any Clients using the Service.client sugar
# will not have to configure the location each time.
#
def self.configure(config = {})
self.host = config[:host] if config.key?(:host)
self.port = config[:port] if config.key?(:port)
end
# The host location of the service.
#
def self.host
@_host ||= DEFAULT_HOST
end
# The host location setter.
#
def self.host=(new_host)
@_host = new_host
end
def self.inherited(subclass)
@_subclasses ||= []
@_subclasses << subclass
end
# Shorthand call to configure, passing a string formatted as hostname:port
# e.g. 127.0.0.1:9933
# e.g. localhost:0
#
def self.located_at(location)
return if location.nil? || location.downcase.strip !~ /.+:\d+/
host, port = location.downcase.strip.split ':'
configure(:host => host, :port => port.to_i)
end
# The port of the service on the destination server.
#
def self.port
@_port ||= DEFAULT_PORT
end
# The port location setter.
#
def self.port=(new_port)
@_port = new_port
end
# Define an rpc method with the given request and response types.
# This methods is only used by the generated service definitions
# and not useful for user code.
#
def self.rpc(method, request_type, response_type)
rpcs[method] = RpcMethod.new(method, request_type, response_type)
end
# Hash containing the set of methods defined via `rpc`.
#
def self.rpcs
@_rpcs ||= {}
end
# Check if the given method name is a known rpc endpoint.
#
def self.rpc_method?(name)
rpcs.key?(name)
end
# An array of defined service classes
def self.services
(@_subclasses ||[]).select do |subclass|
subclass.rpcs.any? do |(name, method)|
subclass.method_defined? name
end
end
end
##
# Instance Methods
#
attr_reader :response, :method_name, :client_host
# Initialize a service with the rpc endpoint name and the bytes
# for the request.
def initialize(method_name, request_bytes, client_host = nil)
@method_name = method_name
@client_host = client_host
@_request_bytes = request_bytes
end
# Register a failure callback for use when rpc_failed is invoked.
#
def on_rpc_failed(callable)
@_rpc_failed_callback ||= callable
end
# Response object for this rpc cycle. Not assignable.
#
def response
@_response ||= response_type.new
end
# Request object for this rpc cycle. Not assignable.
#
def request
@_request ||= if @_request_bytes.present?
request_type.new.parse_from_string(@_request_bytes)
else
request_type.new
end
rescue => e
raise BadRequestProto, "Unable to parse request: #{e.message}"
end
# Convenience method to get back to class method.
#
def rpc_method?(name)
self.class.rpc_method?(name)
end
# Convenience method to get back to class rpcs hash.
#
def rpcs
self.class.rpcs
end
# Get a callable object that will be used by the dispatcher
# to invoke the specified rpc method. Facilitates callback dispatch.
# The returned lambda is expected to be called at a later time (which
# is why we wrap the method call).
#
def callable_rpc_method(method_name)
lambda { run_filters(method_name) }
end
private
def response_type
@_response_type ||= rpcs[@method_name].response_type
end
def request_type
@_request_type ||= rpcs[@method_name].request_type
end
# Sugar to make an rpc method feel like a controller method.
# If this method is not called, the response will be the memoized
# object returned by the response reader.
#
def respond_with(candidate)
@_response = candidate
end
alias_method :return_from_whence_you_came, :respond_with
# Renamed attribute from prior implementaiton due to lack of clarity
# in what the variable contained. DEPRECATED.
def rpc
if ::Protobuf.print_deprecation_warnings?
$stderr.puts <<-ERROR
[WARNING] Service#rpc method has been deprecated
and will be removed in a future version of protobuf.
ERROR
end
@method_name
end
# Automatically fail a service method.
#
def rpc_failed(message)
@_rpc_failed_callback.call(message)
end
end
end
end
Improve readability
require 'protobuf/logger'
require 'protobuf/rpc/client'
require 'protobuf/rpc/error'
require 'protobuf/rpc/service_filters'
module Protobuf
module Rpc
# Object to encapsulate the request/response types for a given service method
#
RpcMethod = Struct.new("RpcMethod", :method, :request_type, :response_type)
class Service
include ::Protobuf::Rpc::ServiceFilters
include ::Protobuf::Logger::LogMethods
DEFAULT_HOST = '127.0.0.1'.freeze
DEFAULT_PORT = 9399
##
# Class Methods
#
# Create a new client for the given service.
# See Client#initialize and ClientConnection::DEFAULT_OPTIONS
# for all available options.
#
def self.client(options = {})
::Protobuf::Rpc::Client.new({ :service => self,
:host => host,
:port => port }.merge(options))
end
# Allows service-level configuration of location.
# Useful for system-startup configuration of a service
# so that any Clients using the Service.client sugar
# will not have to configure the location each time.
#
def self.configure(config = {})
self.host = config[:host] if config.key?(:host)
self.port = config[:port] if config.key?(:port)
end
# The host location of the service.
#
def self.host
@_host ||= DEFAULT_HOST
end
# The host location setter.
#
def self.host=(new_host)
@_host = new_host
end
def self.inherited(subclass)
@_subclasses ||= []
@_subclasses << subclass
end
# Shorthand call to configure, passing a string formatted as hostname:port
# e.g. 127.0.0.1:9933
# e.g. localhost:0
#
def self.located_at(location)
return if location.nil? || location.downcase.strip !~ /.+:\d+/
host, port = location.downcase.strip.split ':'
configure(:host => host, :port => port.to_i)
end
# The port of the service on the destination server.
#
def self.port
@_port ||= DEFAULT_PORT
end
# The port location setter.
#
def self.port=(new_port)
@_port = new_port
end
# Define an rpc method with the given request and response types.
# This methods is only used by the generated service definitions
# and not useful for user code.
#
def self.rpc(method, request_type, response_type)
rpcs[method] = RpcMethod.new(method, request_type, response_type)
end
# Hash containing the set of methods defined via `rpc`.
#
def self.rpcs
@_rpcs ||= {}
end
# Check if the given method name is a known rpc endpoint.
#
def self.rpc_method?(name)
rpcs.key?(name)
end
# An array of defined service classes
def self.services
(@_subclasses || []).select do |subclass|
subclass.rpcs.any? do |(name, method)|
subclass.method_defined? name
end
end
end
##
# Instance Methods
#
attr_reader :response, :method_name, :client_host
# Initialize a service with the rpc endpoint name and the bytes
# for the request.
def initialize(method_name, request_bytes, client_host = nil)
@method_name = method_name
@client_host = client_host
@_request_bytes = request_bytes
end
# Register a failure callback for use when rpc_failed is invoked.
#
def on_rpc_failed(callable)
@_rpc_failed_callback ||= callable
end
# Response object for this rpc cycle. Not assignable.
#
def response
@_response ||= response_type.new
end
# Request object for this rpc cycle. Not assignable.
#
def request
@_request ||= if @_request_bytes.present?
request_type.new.parse_from_string(@_request_bytes)
else
request_type.new
end
rescue => e
raise BadRequestProto, "Unable to parse request: #{e.message}"
end
# Convenience method to get back to class method.
#
def rpc_method?(name)
self.class.rpc_method?(name)
end
# Convenience method to get back to class rpcs hash.
#
def rpcs
self.class.rpcs
end
# Get a callable object that will be used by the dispatcher
# to invoke the specified rpc method. Facilitates callback dispatch.
# The returned lambda is expected to be called at a later time (which
# is why we wrap the method call).
#
def callable_rpc_method(method_name)
lambda { run_filters(method_name) }
end
private
def response_type
@_response_type ||= rpcs[@method_name].response_type
end
def request_type
@_request_type ||= rpcs[@method_name].request_type
end
# Sugar to make an rpc method feel like a controller method.
# If this method is not called, the response will be the memoized
# object returned by the response reader.
#
def respond_with(candidate)
@_response = candidate
end
alias_method :return_from_whence_you_came, :respond_with
# Renamed attribute from prior implementaiton due to lack of clarity
# in what the variable contained. DEPRECATED.
def rpc
if ::Protobuf.print_deprecation_warnings?
$stderr.puts <<-ERROR
[WARNING] Service#rpc method has been deprecated
and will be removed in a future version of protobuf.
ERROR
end
@method_name
end
# Automatically fail a service method.
#
def rpc_failed(message)
@_rpc_failed_callback.call(message)
end
end
end
end
|
class User < ActiveRecord::Base
has_secure_password
end
Add associations to user model
class User < ActiveRecord::Base
has_secure_password
has_many :favorites
has_many :recipes, through: :favorites
end
|
module Purview
module Loggers
class Base
def initialize(opts={})
@opts = default_opts.merge(opts)
end
def debug(*args)
log(DEBUG, *args) if debug?
end
def error(*args)
log(ERROR, *args) if error?
end
def info(*args)
log(INFO, *args) if info?
end
def with_context_logging(*args)
debug(build_starting_message(*args))
yield.tap { |result| debug(build_finished_message(*args)) }
end
private
DEBUG = 'DEBUG'
ERROR = 'ERROR'
INFO = 'INFO'
attr_reader :opts
def build_finished_message(*args)
case args.length
when 1; "Finished #{args[0]}"
when 2; args[-1]
else; raise
end
end
def build_message(level, *args)
message, exception = args[0..1]
message_template(!!exception) % {
:exception => format_exception(exception),
:level => level,
:message => message,
:process_id => Process.pid,
:timestamp => Time.now.strftime('%Y-%m-%d %H:%M:%S.%L %z'),
}
end
def build_starting_message(*args)
case args.length
when 1; "Starting #{args[0]}"
when 2; args[0]
else; raise
end
end
def debug?
!!opts[:debug]
end
def default_opts
{
:debug => true,
:error => true,
:info => false,
}
end
def error?
!!opts[:error]
end
def format_exception(exception)
exception && exception.backtrace.map { |line| "\tfrom #{line}" }.join("\n")
end
def info?
!!opts[:info]
end
def log(level, *args)
stream.puts build_message(level, *args)
end
def message_template(exception)
"%{timestamp} %{level} (%{process_id}) %{message}".tap do |result|
result << ":\n%{exception}" if exception
end
end
def stream
opts[:stream]
end
end
end
end
Add "_LEVEL" suffix to logger constants
module Purview
module Loggers
class Base
def initialize(opts={})
@opts = default_opts.merge(opts)
end
def debug(*args)
log(DEBUG_LEVEL, *args) if debug?
end
def error(*args)
log(ERROR_LEVEL, *args) if error?
end
def info(*args)
log(INFO_LEVEL, *args) if info?
end
def with_context_logging(*args)
debug(build_starting_message(*args))
yield.tap { |result| debug(build_finished_message(*args)) }
end
private
DEBUG_LEVEL = 'DEBUG'
ERROR_LEVEL = 'ERROR'
INFO_LEVEL = 'INFO'
attr_reader :opts
def build_finished_message(*args)
case args.length
when 1; "Finished #{args[0]}"
when 2; args[-1]
else; raise
end
end
def build_message(level, *args)
message, exception = args[0..1]
message_template(!!exception) % {
:exception => format_exception(exception),
:level => level,
:message => message,
:process_id => Process.pid,
:timestamp => Time.now.strftime('%Y-%m-%d %H:%M:%S.%L %z'),
}
end
def build_starting_message(*args)
case args.length
when 1; "Starting #{args[0]}"
when 2; args[0]
else; raise
end
end
def debug?
!!opts[:debug]
end
def default_opts
{
:debug => true,
:error => true,
:info => false,
}
end
def error?
!!opts[:error]
end
def format_exception(exception)
exception && exception.backtrace.map { |line| "\tfrom #{line}" }.join("\n")
end
def info?
!!opts[:info]
end
def log(level, *args)
stream.puts build_message(level, *args)
end
def message_template(exception)
"%{timestamp} %{level} (%{process_id}) %{message}".tap do |result|
result << ":\n%{exception}" if exception
end
end
def stream
opts[:stream]
end
end
end
end
|
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "simple_form_fancy_uploads/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "simple_form_fancy_uploads"
s.version = SimpleFormFancyUploads::VERSION
s.authors = ["Andrea Pavoni"]
s.email = ["andrea.pavoni@gmail.com"]
s.homepage = "http://andreapavoni.com"
s.summary = "simple_form custom inputs to get image/link previews with file uploads."
s.description = "Use simple_form (>= v2.0) custom inputs to get image previews or a link to uploaded file. Save time and code when you need useful file uploads."
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 3.2.1"
s.add_dependency "simple_form", "~> 2.0.0.rc"
s.add_dependency "carrierwave"
end
update gemspec
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "simple_form_fancy_uploads/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "simple_form_fancy_uploads"
s.version = SimpleFormFancyUploads::VERSION
s.authors = ["Andrea Pavoni"]
s.email = ["andrea.pavoni@gmail.com"]
s.homepage = "http://github.com/apeacox/simple_form_fancy_uploads"
s.summary = "simple_form custom inputs to get image/link previews with file uploads."
s.description = "Use simple_form (>= v2.0) custom inputs to get image previews or a link to uploaded file. Save time and code when you need useful file uploads."
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "README.md"]
s.add_dependency "rails", "~> 3.2.1"
s.add_dependency "simple_form", "~> 2.0.0.rc"
s.add_dependency "carrierwave"
end
|
require "pathname"
require "yaml"
require "socket"
require "pwrake/option/host_map"
module Pwrake
def self.clock
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
START_TIME = Time.now
START_CLOCK = Pwrake.clock
class Option < Hash
def initialize
load_pwrake_conf
init_filesystem
init_options
init_pass_env
if self['SHOW_CONF']
require "yaml"
YAML.dump(self,$stdout)
exit
elsif self['REPORT_DIR']
require 'pwrake/report'
Report.new(self,[]).report_html
exit
end
setup_hosts
set_filesystem_option
end
attr_reader :counter
attr_accessor :total_cores
DEFAULT_CONFFILES = ["pwrake_conf.yaml","PwrakeConf.yaml"]
# ----- init -----
def load_pwrake_conf
# Read pwrake_conf
pwrake_conf = Rake.application.options.pwrake_conf
if pwrake_conf
if !File.exist?(pwrake_conf)
raise "Configuration file not found: #{pwrake_conf}"
end
else
pwrake_conf = DEFAULT_CONFFILES.find{|fn| File.exist?(fn)}
end
self['PWRAKE_CONF'] = pwrake_conf
if pwrake_conf.nil?
@yaml = {}
else
require "yaml"
@yaml = open(pwrake_conf){|f| YAML.load(f) }
end
end
# ----------------------------------------------------------
def init_filesystem
@filesystem = Rake.application.options.filesystem
@filesystem ||= mount_type.sub(/fuse\./,"")
begin
require "pwrake/option/option_#{@filesystem}"
rescue LoadError
require "pwrake/option/option_default_filesystem"
end
end
attr_reader :worker_progs
attr_reader :worker_option
attr_reader :queue_class
def mount_type(d=nil)
mtab = '/etc/mtab'
if File.exist?(mtab)
d ||= mountpoint_of_cwd
open(mtab,'r') do |f|
f.each_line do |l|
a = l.split
if a[1] == d
return a[2]
end
end
end
end
nil
end
def mountpoint_of_cwd
d = Pathname.pwd
while !d.mountpoint?
d = d.parent
end
d.to_s
end
# ----------------------------------------------------------
def init_options
option_data.each do |a|
prc = nil
keys = []
case a
when String
keys << a
when Array
a.each do |x|
case x
when String
keys << x
when Proc
prc = x
end
end
end
key = keys[0]
val = search_opts(keys)
val = prc.call(val) if prc
self[key] = val if !val.nil?
instance_variable_set("@"+key.downcase, val)
end
feedback_options
Rake.verbose(false) if Rake.application.options.silent
end
def option_data
[
'DRYRUN',
'IGNORE_SYSTEM',
'IGNORE_DEPRECATE',
'LOAD_SYSTEM',
'NOSEARCH',
'RAKELIB',
'SHOW_PREREQS',
'SILENT',
'TRACE',
'BACKTRACE',
'TRACE_OUTPUT',
'TRACE_RULES',
'SSH_OPTION',
'PASS_ENV',
'GNU_TIME',
'DEBUG',
'PLOT_PARALLELISM',
'SHOW_CONF',
['SUBDIR','SUBDIRS',
proc{|v|
if Array===v
v.each do |d|
if !File.directory?(d)
raise "directory #{d.inspect} does not exist"
end
end
elsif !v.nil?
raise "invalid argument for SUBDIR: #{v.inspect}"
end
}
],
['REPORT_DIR','REPORT'],
'REPORT_IMAGE',
'FAILED_TARGET', # rename(default), delete, leave
'FAILURE_TERMINATION', # wait, kill, continue
'QUEUE_PRIORITY', # RANK(default), FIFO, LIFO, DFS
'NOACTION_QUEUE_PRIORITY', # FIFO(default), LIFO, RAND
'GRAPH_PARTITION',
'PLOT_PARTITION',
['HOSTFILE','HOSTS'],
['LOG_DIR','LOG',
proc{|v|
if v
if v == "" || !v.kind_of?(String)
v = "Pwrake%Y%m%d-%H%M%S"
end
d = v = format_time_pid(v)
i = 1
while File.exist?(d)
d = "#{v}.#{i}"
i += 1
end
d
end
}],
['LOG_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"pwrake.log"
end
}],
['TASK_CSV_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"task.csv"
end
}],
['COMMAND_CSV_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"command.csv"
end
}],
['GC_LOG_FILE',
proc{|v|
if v
if v.kind_of?(String) && v != ""
v
else
"gc.log"
end
end
}],
['NUM_THREADS', proc{|v| v && v.to_i}],
['SHELL_START_INTERVAL', proc{|v| (v || 0.012).to_f}],
['HEARTBEAT', proc{|v| (v || 240).to_i}],
['RETRY', proc{|v| (v || 1).to_i}],
['MASTER_HOSTNAME', proc{|v| (v || Socket.gethostname).chomp}],
['WORK_DIR', proc{|v|
v ||= '%CWD_RELATIVE_TO_HOME'
v.sub('%CWD_RELATIVE_TO_HOME',cwd_relative_if_under_home)
}],
].concat(option_data_filesystem)
end
def format_time_pid(v)
START_TIME.strftime(v).sub("%$","%05d"%Process.pid)
end
def feedback_options
opts = Rake.application.options
['DRYRUN',
'IGNORE_SYSTEM',
'IGNORE_DEPRECATE',
'LOAD_SYSTEM',
'NOSEARCH',
'RAKELIB',
'SHOW_PREREQS',
'SILENT',
'TRACE',
'BACKTRACE',
'TRACE_OUTPUT',
'TRACE_RULES'
].each do |k|
if v=self[k]
m = (k.downcase+"=").to_sym
opts.send(m,v)
end
end
case opts.trace_output
when 'stdout'
opts.trace_output = $stdout
when 'stderr', nil
opts.trace_output = $stderr
end
end
# Priority of Option:
# command_option > ENV > pwrake_conf > DEFAULT_OPTIONS
def search_opts(keys)
val = Rake.application.options.send(keys[0].downcase.to_sym)
return parse_opt(val) if !val.nil?
#
keys.each do |k|
val = ENV[k.upcase]
return parse_opt(val) if !val.nil?
end
#
return nil if !@yaml
keys.each do |k|
val = @yaml[k.upcase]
return val if !val.nil?
end
nil
end
def parse_opt(s)
case s
when /^(false|nil|off)$/i
false
when /^(true|on)$/i
true
when $stdout
"stdout"
when $stderr
"stderr"
else
s
end
end
def cwd_relative_to_home
Pathname.pwd.relative_path_from(Pathname.new(ENV['HOME'])).to_s
end
def cwd_relative_if_under_home
home = Pathname.new(ENV['HOME']).realpath
path = pwd = Pathname.pwd.realpath
while path != home
if path.root?
return pwd.to_s
end
path = path.parent
end
return pwd.relative_path_from(home).to_s
end
# ----------------------------------------------------------
def init_pass_env
if envs = self['PASS_ENV']
pass_env = {}
case envs
when Array
envs.each do |k|
k = k.to_s
if v = ENV[k]
pass_env[k] = v
end
end
when Hash
envs.each do |k,v|
k = k.to_s
if v = ENV[k] || v
pass_env[k] = v
end
end
else
raise "invalid option for PASS_ENV in pwrake_conf.yaml"
end
if pass_env.empty?
self.delete('PASS_ENV')
else
self['PASS_ENV'] = pass_env
end
end
end
# ----------------------------------------------------------
def setup_hosts
if @hostfile && @num_threads
raise "Cannot set `hostfile' and `num_threads' simultaneously"
end
@host_map = HostMap.new(@hostfile || @num_threads)
end
attr_reader :host_map
# ----------------------------------------------------------
def put_log
Log.info "Options:"
self.each do |k,v|
Log.info " #{k} = #{v.inspect}"
end
Log.debug "@queue_class=#{@queue_class}"
Log.debug "@filesystem=#{@filesystem}"
end
end
end
delete environment variable LOG
require "pathname"
require "yaml"
require "socket"
require "pwrake/option/host_map"
module Pwrake
def self.clock
Process.clock_gettime(Process::CLOCK_MONOTONIC)
end
START_TIME = Time.now
START_CLOCK = Pwrake.clock
class Option < Hash
def initialize
load_pwrake_conf
init_filesystem
init_options
init_pass_env
if self['SHOW_CONF']
require "yaml"
YAML.dump(self,$stdout)
exit
elsif self['REPORT_DIR']
require 'pwrake/report'
Report.new(self,[]).report_html
exit
end
setup_hosts
set_filesystem_option
end
attr_reader :counter
attr_accessor :total_cores
DEFAULT_CONFFILES = ["pwrake_conf.yaml","PwrakeConf.yaml"]
# ----- init -----
def load_pwrake_conf
# Read pwrake_conf
pwrake_conf = Rake.application.options.pwrake_conf
if pwrake_conf
if !File.exist?(pwrake_conf)
raise "Configuration file not found: #{pwrake_conf}"
end
else
pwrake_conf = DEFAULT_CONFFILES.find{|fn| File.exist?(fn)}
end
self['PWRAKE_CONF'] = pwrake_conf
if pwrake_conf.nil?
@yaml = {}
else
require "yaml"
@yaml = open(pwrake_conf){|f| YAML.load(f) }
end
end
# ----------------------------------------------------------
def init_filesystem
@filesystem = Rake.application.options.filesystem
@filesystem ||= mount_type.sub(/fuse\./,"")
begin
require "pwrake/option/option_#{@filesystem}"
rescue LoadError
require "pwrake/option/option_default_filesystem"
end
end
attr_reader :worker_progs
attr_reader :worker_option
attr_reader :queue_class
def mount_type(d=nil)
mtab = '/etc/mtab'
if File.exist?(mtab)
d ||= mountpoint_of_cwd
open(mtab,'r') do |f|
f.each_line do |l|
a = l.split
if a[1] == d
return a[2]
end
end
end
end
nil
end
def mountpoint_of_cwd
d = Pathname.pwd
while !d.mountpoint?
d = d.parent
end
d.to_s
end
# ----------------------------------------------------------
def init_options
option_data.each do |a|
prc = nil
keys = []
case a
when String
keys << a
when Array
a.each do |x|
case x
when String
keys << x
when Proc
prc = x
end
end
end
key = keys[0]
val = search_opts(keys)
val = prc.call(val) if prc
self[key] = val if !val.nil?
instance_variable_set("@"+key.downcase, val)
end
feedback_options
Rake.verbose(false) if Rake.application.options.silent
end
def option_data
[
'DRYRUN',
'IGNORE_SYSTEM',
'IGNORE_DEPRECATE',
'LOAD_SYSTEM',
'NOSEARCH',
'RAKELIB',
'SHOW_PREREQS',
'SILENT',
'TRACE',
'BACKTRACE',
'TRACE_OUTPUT',
'TRACE_RULES',
'SSH_OPTION',
'PASS_ENV',
'GNU_TIME',
'DEBUG',
'PLOT_PARALLELISM',
'SHOW_CONF',
['SUBDIR','SUBDIRS',
proc{|v|
if Array===v
v.each do |d|
if !File.directory?(d)
raise "directory #{d.inspect} does not exist"
end
end
elsif !v.nil?
raise "invalid argument for SUBDIR: #{v.inspect}"
end
}
],
['REPORT_DIR','REPORT'],
'REPORT_IMAGE',
'FAILED_TARGET', # rename(default), delete, leave
'FAILURE_TERMINATION', # wait, kill, continue
'QUEUE_PRIORITY', # RANK(default), FIFO, LIFO, DFS
'NOACTION_QUEUE_PRIORITY', # FIFO(default), LIFO, RAND
'GRAPH_PARTITION',
'PLOT_PARTITION',
['HOSTFILE','HOSTS'],
['LOG_DIR',
proc{|v|
if v
if v == "" || !v.kind_of?(String)
v = "Pwrake%Y%m%d-%H%M%S"
end
d = v = format_time_pid(v)
i = 1
while File.exist?(d)
d = "#{v}.#{i}"
i += 1
end
d
end
}],
['LOG_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"pwrake.log"
end
}],
['TASK_CSV_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"task.csv"
end
}],
['COMMAND_CSV_FILE',
proc{|v|
if v.kind_of?(String) && v != ""
v
else
"command.csv"
end
}],
['GC_LOG_FILE',
proc{|v|
if v
if v.kind_of?(String) && v != ""
v
else
"gc.log"
end
end
}],
['NUM_THREADS', proc{|v| v && v.to_i}],
['SHELL_START_INTERVAL', proc{|v| (v || 0.012).to_f}],
['HEARTBEAT', proc{|v| (v || 240).to_i}],
['RETRY', proc{|v| (v || 1).to_i}],
['MASTER_HOSTNAME', proc{|v| (v || Socket.gethostname).chomp}],
['WORK_DIR', proc{|v|
v ||= '%CWD_RELATIVE_TO_HOME'
v.sub('%CWD_RELATIVE_TO_HOME',cwd_relative_if_under_home)
}],
].concat(option_data_filesystem)
end
def format_time_pid(v)
START_TIME.strftime(v).sub("%$","%05d"%Process.pid)
end
def feedback_options
opts = Rake.application.options
['DRYRUN',
'IGNORE_SYSTEM',
'IGNORE_DEPRECATE',
'LOAD_SYSTEM',
'NOSEARCH',
'RAKELIB',
'SHOW_PREREQS',
'SILENT',
'TRACE',
'BACKTRACE',
'TRACE_OUTPUT',
'TRACE_RULES'
].each do |k|
if v=self[k]
m = (k.downcase+"=").to_sym
opts.send(m,v)
end
end
case opts.trace_output
when 'stdout'
opts.trace_output = $stdout
when 'stderr', nil
opts.trace_output = $stderr
end
end
# Priority of Option:
# command_option > ENV > pwrake_conf > DEFAULT_OPTIONS
def search_opts(keys)
val = Rake.application.options.send(keys[0].downcase.to_sym)
return parse_opt(val) if !val.nil?
#
keys.each do |k|
val = ENV[k.upcase]
return parse_opt(val) if !val.nil?
end
#
return nil if !@yaml
keys.each do |k|
val = @yaml[k.upcase]
return val if !val.nil?
end
nil
end
def parse_opt(s)
case s
when /^(false|nil|off)$/i
false
when /^(true|on)$/i
true
when $stdout
"stdout"
when $stderr
"stderr"
else
s
end
end
def cwd_relative_to_home
Pathname.pwd.relative_path_from(Pathname.new(ENV['HOME'])).to_s
end
def cwd_relative_if_under_home
home = Pathname.new(ENV['HOME']).realpath
path = pwd = Pathname.pwd.realpath
while path != home
if path.root?
return pwd.to_s
end
path = path.parent
end
return pwd.relative_path_from(home).to_s
end
# ----------------------------------------------------------
def init_pass_env
if envs = self['PASS_ENV']
pass_env = {}
case envs
when Array
envs.each do |k|
k = k.to_s
if v = ENV[k]
pass_env[k] = v
end
end
when Hash
envs.each do |k,v|
k = k.to_s
if v = ENV[k] || v
pass_env[k] = v
end
end
else
raise "invalid option for PASS_ENV in pwrake_conf.yaml"
end
if pass_env.empty?
self.delete('PASS_ENV')
else
self['PASS_ENV'] = pass_env
end
end
end
# ----------------------------------------------------------
def setup_hosts
if @hostfile && @num_threads
raise "Cannot set `hostfile' and `num_threads' simultaneously"
end
@host_map = HostMap.new(@hostfile || @num_threads)
end
attr_reader :host_map
# ----------------------------------------------------------
def put_log
Log.info "Options:"
self.each do |k,v|
Log.info " #{k} = #{v.inspect}"
end
Log.debug "@queue_class=#{@queue_class}"
Log.debug "@filesystem=#{@filesystem}"
end
end
end
|
module Rack
class PermissiveCors
def initialize(app)
@app = app
end
def call(env)
status, headers, body = @app.call(env)
headers = headers.merge(cors_header)
[status, headers, body]
end
def cors_header
{'Access-Control-Allow-Origin' => '*', 'Access-Control-Allow-Methods' => 'GET, POST'}
end
end
end
allow all headers, and all methods, what the heck why not
module Rack
class PermissiveCors
def initialize(app)
@app = app
end
def call(env)
status, headers, body = @app.call(env)
headers = headers.merge(cors_header)
[status, headers, body]
end
def cors_header
{
'Access-Control-Allow-Origin' => '*',
'Access-Control-Allow-Methods' => '*',
'Access-Control-Allow-Headers' => '*'
}
end
end
end
|
module RackConsole
VERSION = "0.2.1"
end
Version 0.2.2.
module RackConsole
VERSION = "0.2.2"
end
|
require 'rails_admin_currency/engine'
module RailsAdminCurrency
end
require 'rails_admin/config/fields/types/decimal'
require 'json'
module RailsAdmin
module Config
module Fields
module Types
class Currency < RailsAdmin::Config::Fields::Types::Decimal
# Register field type for the type loader
RailsAdmin::Config::Fields::Types.register(:currency, self)
register_instance_option :formatted_value do
ActionController::Base.helpers.number_to_currency value
end
register_instance_option :html_attributes do
{
'data-prefix': prefix,
'data-suffix': suffix,
'data-thousands': delimiter,
'data-decimal': separator
onfocus: '$(this).maskMoney("destroy");$(this).maskMoney();'
}
end
def parse_input(params)
if params.has_key?(:value) && !params[:value].nil?
p = params[:value].gsub(unit, '').split(separator)
params[:value]= "#{p[0].gsub(delimiter, '')}.#{p[1]}".to_f
end
end
register_instance_option :prefix do
I18n.t('number.currency.format.unit')
end
register_instance_option :suffix do
end
register_instance_option :delimiter do
I18n.t('number.currency.format.delimiter')
end
register_instance_option :separator do
I18n.t('number.currency.format.separator')
end
end
end
end
end
end
Ordem dos metodos
require 'rails_admin_currency/engine'
module RailsAdminCurrency
end
require 'rails_admin/config/fields/types/decimal'
require 'json'
module RailsAdmin
module Config
module Fields
module Types
class Currency < RailsAdmin::Config::Fields::Types::Decimal
# Register field type for the type loader
RailsAdmin::Config::Fields::Types.register(:currency, self)
register_instance_option :formatted_value do
ActionController::Base.helpers.number_to_currency value
end
register_instance_option :prefix do
I18n.t('number.currency.format.unit')
end
register_instance_option :suffix do
end
register_instance_option :delimiter do
I18n.t('number.currency.format.delimiter')
end
register_instance_option :separator do
I18n.t('number.currency.format.separator')
end
register_instance_option :precision do
2
end
register_instance_option :html_attributes do
{
'data-prefix' => prefix,
'data-suffix' => suffix,
'data-thousands' => delimiter,
'data-decimal' => separator,
'data-precision' => precision,
'onfocus' => '$(this).maskMoney("destroy");$(this).maskMoney();'
}
end
def parse_input(params)
if params.has_key?(:value) && !params[:value].nil?
p = params[:value].gsub(unit, '').split(separator)
params[:value]= "#{p[0].gsub(delimiter, '')}.#{p[1]}".to_f
end
end
end
end
end
end
end
|
# encoding: utf-8
#--
# Copyright (c) 2010 Richard Huang (flyerhzm@gmail.com)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'rubygems'
require 'progressbar'
require 'colored'
require 'rails_best_practices/lexicals'
require 'rails_best_practices/prepares'
require 'rails_best_practices/reviews'
require 'rails_best_practices/core'
require 'fileutils'
# RailsBestPractices helps you to analyze your rails code, according to best practices on http://rails-bestpractices.
# if it finds any violatioins to best practices, it will give you some readable suggestions.
#
# The analysis process is partitioned into two parts,
#
# 1. prepare process, it checks only model and mailer files, do some preparations, such as remember model names and associations.
# 2. review process, it checks all files, according to configuration, it really check if codes violate the best practices, if so, remember the violations.
#
# After analyzing, output the violations.
module RailsBestPractices
DEFAULT_CONFIG = File.join(File.dirname(__FILE__), "..", "rails_best_practices.yml")
class <<self
attr_writer :runner
# generate configuration yaml file.
#
# @param [String] path where to generate the configuration yaml file
def generate(path)
@path = path || '.'
FileUtils.cp DEFAULT_CONFIG, File.join(@path, 'config/rails_best_practices.yml')
end
# start checking rails codes.
#
# there are two steps to check rails codes,
#
# 1. prepare process, check all model and mailer files.
# 2. review process, check all files.
#
# if there are violations to rails best practices, output them.
#
# @param [String] path the directory of rails project
# @param [Hash] options
def start(path, options)
@path = path || '.'
@options = options
@options[:exclude] ||= []
Core::Runner.base_path = @path
@runner = Core::Runner.new
@runner.debug = true if @options['debug']
@runner.color = !options['without-color']
if @runner.checks.find { |check| check.is_a? Reviews::AlwaysAddDbIndexReview } &&
!review_files.find { |file| file.index "db\/schema.rb" }
plain_output("AlwaysAddDbIndexReview is disabled as there is no db/schema.rb file in your rails project.", 'blue')
end
@bar = ProgressBar.new('Source Codes', lexical_files.size + prepare_files.size + review_files.size)
["lexical", "prepare", "review"].each { |process| send(:process, process) }
@runner.on_complete
@bar.finish
if @options['format'] == 'html'
load_git_info if @options["with-git"]
output_html_errors
else
output_terminal_errors
end
exit @runner.errors.size
end
# process lexical, prepare or reivew.
#
# get all files for the process, analyze each file,
# and increment progress bar unless debug.
#
# @param [String] process the process name, lexical, prepare or review.
def process(process)
files = send("#{process}_files")
files.each do |file|
@runner.send("#{process}_file", file)
@bar.inc unless @options['debug']
end
end
# get all files for prepare process.
#
# @return [Array] all files for prepare process
def prepare_files
@prepare_files ||= begin
['app/models', 'app/mailers', 'db/schema.rb', 'app/controllers'].inject([]) { |files, name|
files += expand_dirs_to_files(File.join(@path, name))
}.compact
end
end
# get all files for review process.
#
# @return [Array] all files for review process
def review_files
@review_files ||= begin
files = expand_dirs_to_files(@path)
files = file_sort(files)
# By default, tmp, vender, spec, test, features are ignored.
['vendor', 'spec', 'test', 'features', 'tmp'].each do |pattern|
files = file_ignore(files, "#{pattern}/") unless @options[pattern]
end
# Exclude files based on exclude regexes if the option is set.
@options[:exclude].each do |pattern|
files = file_ignore(files, pattern)
end
files.compact
end
end
alias :lexical_files :review_files
# expand all files with extenstion rb, erb, haml and builder under the dirs
#
# @param [Array] dirs what directories to expand
# @return [Array] all files expanded
def expand_dirs_to_files *dirs
extensions = ['rb', 'erb', 'rake', 'rhtml', 'haml', 'builder']
dirs.flatten.map { |entry|
next unless File.exist? entry
if File.directory? entry
Dir[File.join(entry, '**', "*.{#{extensions.join(',')}}")]
else
entry
end
}.flatten
end
# sort files, models first, then mailers, and sort other files by characters.
#
# models and mailers first as for prepare process.
#
# @param [Array] files
# @return [Array] sorted files
def file_sort files
models = []
mailers = []
files.each do |a|
if a =~ Core::Check::MODEL_FILES
models << a
end
end
files.each do |a|
if a =~ Core::Check::MAILER_FILES
mailers << a
end
end
files.collect! do |a|
if a =~ Core::Check::MAILER_FILES || a =~ Core::Check::MODEL_FILES
#nil
else
a
end
end
files.compact!
models.sort
mailers.sort
files.sort
return models + mailers + files
end
# ignore specific files.
#
# @param [Array] files
# @param [Regexp] pattern files match the pattern will be ignored
# @return [Array] files that not match the pattern
def file_ignore files, pattern
files.reject { |file| file.index(pattern) }
end
# output errors on terminal.
def output_terminal_errors
@runner.errors.each { |error| plain_output(error.to_s, 'red') }
plain_output("\nPlease go to http://rails-bestpractices.com to see more useful Rails Best Practices.", 'green')
if @runner.errors.empty?
plain_output("\nNo warning found. Cool!", 'green')
else
plain_output("\nFound #{@runner.errors.size} warnings.", 'red')
end
end
# load git commit and git username info.
def load_git_info
git_progressbar = ProgressBar.new('Git Info', @runner.errors.size)
@runner.errors.each do |error|
git_info = `cd #{@runner.class.base_path}; git blame #{error.filename[@runner.class.base_path.size..-1]} | sed -n #{error.line_number.split(',').first}p`
unless git_info == ""
git_commit, git_username = git_info.split(/\d{4}-\d{2}-\d{2}/).first.split("(")
error.git_commit = git_commit.split(" ").first.strip
error.git_username = git_username.strip
end
git_progressbar.inc unless @options['debug']
end
git_progressbar.finish
end
# output errors with html format.
def output_html_errors
require 'erubis'
template = File.read(File.join(File.dirname(__FILE__), "..", "assets", "result.html.erb"))
File.open("rails_best_practices_output.html", "w+") do |file|
eruby = Erubis::Eruby.new(template)
file.puts eruby.evaluate(:errors => @runner.errors, :error_types => error_types, :textmate => @options["with-textmate"], :mvim => @options["with-mvim"], :git => @options["with-git"])
end
end
# plain output with color.
#
# @param [String] message to output
# @param [String] color
def plain_output(message, color)
if @options["without-color"]
puts message
else
puts message.send(color)
end
end
# unique error types.
def error_types
@runner.errors.map(&:type).uniq
end
end
end
parse all file in all lexical, prepare and review proceses
# encoding: utf-8
#--
# Copyright (c) 2010 Richard Huang (flyerhzm@gmail.com)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
require 'rubygems'
require 'progressbar'
require 'colored'
require 'rails_best_practices/lexicals'
require 'rails_best_practices/prepares'
require 'rails_best_practices/reviews'
require 'rails_best_practices/core'
require 'fileutils'
# RailsBestPractices helps you to analyze your rails code, according to best practices on http://rails-bestpractices.
# if it finds any violatioins to best practices, it will give you some readable suggestions.
#
# The analysis process is partitioned into two parts,
#
# 1. prepare process, it checks only model and mailer files, do some preparations, such as remember model names and associations.
# 2. review process, it checks all files, according to configuration, it really check if codes violate the best practices, if so, remember the violations.
#
# After analyzing, output the violations.
module RailsBestPractices
DEFAULT_CONFIG = File.join(File.dirname(__FILE__), "..", "rails_best_practices.yml")
class <<self
attr_writer :runner
# generate configuration yaml file.
#
# @param [String] path where to generate the configuration yaml file
def generate(path)
@path = path || '.'
FileUtils.cp DEFAULT_CONFIG, File.join(@path, 'config/rails_best_practices.yml')
end
# start checking rails codes.
#
# there are two steps to check rails codes,
#
# 1. prepare process, check all model and mailer files.
# 2. review process, check all files.
#
# if there are violations to rails best practices, output them.
#
# @param [String] path the directory of rails project
# @param [Hash] options
def start(path, options)
@path = path || '.'
@options = options
@options[:exclude] ||= []
Core::Runner.base_path = @path
@runner = Core::Runner.new
@runner.debug = true if @options['debug']
@runner.color = !options['without-color']
if @runner.checks.find { |check| check.is_a? Reviews::AlwaysAddDbIndexReview } &&
!parse_files.find { |file| file.index "db\/schema.rb" }
plain_output("AlwaysAddDbIndexReview is disabled as there is no db/schema.rb file in your rails project.", 'blue')
end
@bar = ProgressBar.new('Source Codes', parse_files.size * 3)
["lexical", "prepare", "review"].each { |process| send(:process, process) }
@runner.on_complete
@bar.finish
if @options['format'] == 'html'
load_git_info if @options["with-git"]
output_html_errors
else
output_terminal_errors
end
exit @runner.errors.size
end
# process lexical, prepare or reivew.
#
# get all files for the process, analyze each file,
# and increment progress bar unless debug.
#
# @param [String] process the process name, lexical, prepare or review.
def process(process)
parse_files.each do |file|
@runner.send("#{process}_file", file)
@bar.inc unless @options['debug']
end
end
# get all files for parsing.
#
# @return [Array] all files for parsing
def parse_files
@parse_files ||= begin
files = expand_dirs_to_files(@path)
files = file_sort(files)
# By default, tmp, vender, spec, test, features are ignored.
['vendor', 'spec', 'test', 'features', 'tmp'].each do |pattern|
files = file_ignore(files, "#{pattern}/") unless @options[pattern]
end
# Exclude files based on exclude regexes if the option is set.
@options[:exclude].each do |pattern|
files = file_ignore(files, pattern)
end
files.compact
end
end
# expand all files with extenstion rb, erb, haml and builder under the dirs
#
# @param [Array] dirs what directories to expand
# @return [Array] all files expanded
def expand_dirs_to_files *dirs
extensions = ['rb', 'erb', 'rake', 'rhtml', 'haml', 'builder']
dirs.flatten.map { |entry|
next unless File.exist? entry
if File.directory? entry
Dir[File.join(entry, '**', "*.{#{extensions.join(',')}}")]
else
entry
end
}.flatten
end
# sort files, models first, then mailers, and sort other files by characters.
#
# models and mailers first as for prepare process.
#
# @param [Array] files
# @return [Array] sorted files
def file_sort files
models = []
mailers = []
files.each do |a|
if a =~ Core::Check::MODEL_FILES
models << a
end
end
files.each do |a|
if a =~ Core::Check::MAILER_FILES
mailers << a
end
end
files.collect! do |a|
if a =~ Core::Check::MAILER_FILES || a =~ Core::Check::MODEL_FILES
#nil
else
a
end
end
files.compact!
models.sort
mailers.sort
files.sort
return models + mailers + files
end
# ignore specific files.
#
# @param [Array] files
# @param [Regexp] pattern files match the pattern will be ignored
# @return [Array] files that not match the pattern
def file_ignore files, pattern
files.reject { |file| file.index(pattern) }
end
# output errors on terminal.
def output_terminal_errors
@runner.errors.each { |error| plain_output(error.to_s, 'red') }
plain_output("\nPlease go to http://rails-bestpractices.com to see more useful Rails Best Practices.", 'green')
if @runner.errors.empty?
plain_output("\nNo warning found. Cool!", 'green')
else
plain_output("\nFound #{@runner.errors.size} warnings.", 'red')
end
end
# load git commit and git username info.
def load_git_info
git_progressbar = ProgressBar.new('Git Info', @runner.errors.size)
@runner.errors.each do |error|
git_info = `cd #{@runner.class.base_path}; git blame #{error.filename[@runner.class.base_path.size..-1]} | sed -n #{error.line_number.split(',').first}p`
unless git_info == ""
git_commit, git_username = git_info.split(/\d{4}-\d{2}-\d{2}/).first.split("(")
error.git_commit = git_commit.split(" ").first.strip
error.git_username = git_username.strip
end
git_progressbar.inc unless @options['debug']
end
git_progressbar.finish
end
# output errors with html format.
def output_html_errors
require 'erubis'
template = File.read(File.join(File.dirname(__FILE__), "..", "assets", "result.html.erb"))
File.open("rails_best_practices_output.html", "w+") do |file|
eruby = Erubis::Eruby.new(template)
file.puts eruby.evaluate(:errors => @runner.errors, :error_types => error_types, :textmate => @options["with-textmate"], :mvim => @options["with-mvim"], :git => @options["with-git"])
end
end
# plain output with color.
#
# @param [String] message to output
# @param [String] color
def plain_output(message, color)
if @options["without-color"]
puts message
else
puts message.send(color)
end
end
# unique error types.
def error_types
@runner.errors.map(&:type).uniq
end
end
end
|
module RailsBlocks
VERSION = "0.7.1".freeze
end
new version
module RailsBlocks
VERSION = "0.7.2".freeze
end
|
Capistrano::Configuration.instance(:must_exist).load do
default_run_options[:pty] = true
set :keep_releases, 3
set :app_symlinks, nil
set :scm, :subversion
set :httpd, :apache
set :app_server, :mongrel
set :db_adapter, :mysql
set :rails_env, "production"
load 'config/deploy'
set :repository do
scm = fetch(:scm)
repos_base = "#{user}@#{domain}#{deploy_to}"
if scm == :subversion
"svn+ssh://#{repos_base}/repos/trunk"
elsif scm == :git
"ssh://#{repos_base}/#{application}.git"
end
end
task :validate_required_variables do
validate_option(:scm, :in => [:subversion, :git])
validate_option(:app_server, :in => [:mongrel, :passenger])
validate_option(:httpd, :in => [:apache])
validate_option(:db_adapter, :in => [:mysql, :postgresql, :sqlite3])
end
before :require_recipes, :validate_required_variables
require "railsmachine/recipes/app/deploy"
require "railsmachine/recipes/app/mongrel"
require "railsmachine/recipes/app/passenger"
# defer requires until variables have been set
task :require_recipes do
require "railsmachine/recipes/scm/#{scm}"
require "railsmachine/recipes/web/#{httpd}"
require "railsmachine/recipes/db/#{db_adapter}"
end
namespace :servers do
desc <<-DESC
A macro task that calls setup for db, app, symlinks, and web.
Used to configure your deployment environment in one command.
DESC
task :setup do
sudo "chown -R #{user}:#{user} #{deploy_to.gsub(application,'')}"
deploy.setup
begin
db.setup
rescue
puts "db:setup failed!"
end
app.setup
web.setup
end
desc <<-DESC
A macro task that restarts the application and web servers
DESC
task :restart do
app.restart
web.restart
end
end
namespace :app do
desc <<-DESC
Setup #{app_server}
DESC
task :setup, :roles => :app do
case app_server.to_s
when "mongrel"
setup_mongrel
when "passenger"
# do nothing
end
end
desc <<-DESC
Restart application server.
DESC
task :restart, :roles => :app do
application_servlet.restart
end
desc <<-DESC
Start application server.
DESC
task :start, :roles => :app do
application_servlet.start
end
desc <<-DESC
Stop application server.
DESC
task :stop, :roles => :app do
application_servlet.stop
end
desc <<-DESC
Switch your application to run on mongrel or passenger.
DESC
task :switch do
case app_server.to_s
when "mongrel"
switch_to_mongrel
when "passenger"
switch_to_passenger
end
end
namespace :symlinks do
desc <<-DESC
Setup application symlinks in the public
DESC
task :setup, :roles => [:app, :web] do
if app_symlinks
app_symlinks.each { |link| run "mkdir -p #{shared_path}/public/#{link}" }
end
end
desc <<-DESC
Link public directories to shared location.
DESC
task :update, :roles => [:app, :web] do
if app_symlinks
app_symlinks.each { |link| run "ln -nfs #{shared_path}/public/#{link} #{current_path}/public/#{link}" }
end
end
end
end
namespace :web do
desc <<-DESC
Setup web server.
DESC
task :setup, :roles => :web do
set :apache_server_name, domain unless apache_server_name
apache.configure
end
desc <<-DESC
Restart web server.
DESC
task :restart, :roles => :web do
apache.restart
end
desc <<-DESC
Reload web server configuration.
DESC
task :reload, :roles => :web do
apache.reload
end
desc <<-DESC
Start web server.
DESC
task :start, :roles => :web do
apache.start
end
desc <<-DESC
Stop web server.
DESC
task :stop, :roles => :web do
apache.stop
end
end
namespace :repos do
desc <<-DESC
Setup source control repository.
DESC
task :setup, :roles => :scm do
begin
sudo "chown -R #{user}:#{user} #{deploy_to.gsub(application,'')}"
localrepo.setup
rescue
puts "repos:setup failed!"
end
localrepo.import
end
end
on :start, :require_recipes
before 'deploy:update_code', 'app:symlinks:setup'
after 'deploy:symlink', 'app:symlinks:update'
after 'deploy:cold', 'web:reload'
after :deploy,'deploy:cleanup'
def setup_mongrel
set_mongrel_conf
set :mongrel_environment, rails_env
set :mongrel_port, apache_proxy_port
set :mongrel_servers, apache_proxy_servers
set :mongrel_user, user unless mongrel_user
set :mongrel_group, mongrel_user unless mongrel_group
mongrel.cluster.configure
end
def switch_to_mongrel
app.setup
app.start
web.setup
web.restart
end
def switch_to_passenger
web.setup
mongrel.cluster.remove
web.restart
end
def validate_option(key, options = {})
if !(options[:in].map{|o| o.to_s } + ['']).include?(self[key].to_s)
raise(ArgumentError, error_msg("Invalid value '#{self[key]}' for option '#{key}' must be one of the following: '#{options[:in].join(', ')}'"))
end
end
def application_servlet
case app_server.to_s
when 'mongrel'
mongrel.cluster
when 'passenger'
passenger
end
end
def error_msg(msg)
banner = ''; msg.length.times { banner << "+" }
return "\n\n#{banner}\n#{msg}\n#{banner}\n\n"
end
end
fixed location of git repo
Capistrano::Configuration.instance(:must_exist).load do
default_run_options[:pty] = true
set :keep_releases, 3
set :app_symlinks, nil
set :scm, :subversion
set :httpd, :apache
set :app_server, :mongrel
set :db_adapter, :mysql
set :rails_env, "production"
load 'config/deploy'
set :repository do
scm = fetch(:scm)
repos_base = "#{user}@#{domain}#{deploy_to}"
if scm == :subversion
"svn+ssh://#{repos_base}/repos/trunk"
elsif scm == :git
"ssh://#{repos_base}/repos/#{application}.git"
end
end
task :validate_required_variables do
validate_option(:scm, :in => [:subversion, :git])
validate_option(:app_server, :in => [:mongrel, :passenger])
validate_option(:httpd, :in => [:apache])
validate_option(:db_adapter, :in => [:mysql, :postgresql, :sqlite3])
end
before :require_recipes, :validate_required_variables
require "railsmachine/recipes/app/deploy"
require "railsmachine/recipes/app/mongrel"
require "railsmachine/recipes/app/passenger"
# defer requires until variables have been set
task :require_recipes do
require "railsmachine/recipes/scm/#{scm}"
require "railsmachine/recipes/web/#{httpd}"
require "railsmachine/recipes/db/#{db_adapter}"
end
namespace :servers do
desc <<-DESC
A macro task that calls setup for db, app, symlinks, and web.
Used to configure your deployment environment in one command.
DESC
task :setup do
sudo "chown -R #{user}:#{user} #{deploy_to.gsub(application,'')}"
deploy.setup
begin
db.setup
rescue
puts "db:setup failed!"
end
app.setup
web.setup
end
desc <<-DESC
A macro task that restarts the application and web servers
DESC
task :restart do
app.restart
web.restart
end
end
namespace :app do
desc <<-DESC
Setup #{app_server}
DESC
task :setup, :roles => :app do
case app_server.to_s
when "mongrel"
setup_mongrel
when "passenger"
# do nothing
end
end
desc <<-DESC
Restart application server.
DESC
task :restart, :roles => :app do
application_servlet.restart
end
desc <<-DESC
Start application server.
DESC
task :start, :roles => :app do
application_servlet.start
end
desc <<-DESC
Stop application server.
DESC
task :stop, :roles => :app do
application_servlet.stop
end
desc <<-DESC
Switch your application to run on mongrel or passenger.
DESC
task :switch do
case app_server.to_s
when "mongrel"
switch_to_mongrel
when "passenger"
switch_to_passenger
end
end
namespace :symlinks do
desc <<-DESC
Setup application symlinks in the public
DESC
task :setup, :roles => [:app, :web] do
if app_symlinks
app_symlinks.each { |link| run "mkdir -p #{shared_path}/public/#{link}" }
end
end
desc <<-DESC
Link public directories to shared location.
DESC
task :update, :roles => [:app, :web] do
if app_symlinks
app_symlinks.each { |link| run "ln -nfs #{shared_path}/public/#{link} #{current_path}/public/#{link}" }
end
end
end
end
namespace :web do
desc <<-DESC
Setup web server.
DESC
task :setup, :roles => :web do
set :apache_server_name, domain unless apache_server_name
apache.configure
end
desc <<-DESC
Restart web server.
DESC
task :restart, :roles => :web do
apache.restart
end
desc <<-DESC
Reload web server configuration.
DESC
task :reload, :roles => :web do
apache.reload
end
desc <<-DESC
Start web server.
DESC
task :start, :roles => :web do
apache.start
end
desc <<-DESC
Stop web server.
DESC
task :stop, :roles => :web do
apache.stop
end
end
namespace :repos do
desc <<-DESC
Setup source control repository.
DESC
task :setup, :roles => :scm do
begin
sudo "chown -R #{user}:#{user} #{deploy_to.gsub(application,'')}"
localrepo.setup
rescue
puts "repos:setup failed!"
end
localrepo.import
end
end
on :start, :require_recipes
before 'deploy:update_code', 'app:symlinks:setup'
after 'deploy:symlink', 'app:symlinks:update'
after 'deploy:cold', 'web:reload'
after :deploy,'deploy:cleanup'
def setup_mongrel
set_mongrel_conf
set :mongrel_environment, rails_env
set :mongrel_port, apache_proxy_port
set :mongrel_servers, apache_proxy_servers
set :mongrel_user, user unless mongrel_user
set :mongrel_group, mongrel_user unless mongrel_group
mongrel.cluster.configure
end
def switch_to_mongrel
app.setup
app.start
web.setup
web.restart
end
def switch_to_passenger
web.setup
mongrel.cluster.remove
web.restart
end
def validate_option(key, options = {})
if !(options[:in].map{|o| o.to_s } + ['']).include?(self[key].to_s)
raise(ArgumentError, error_msg("Invalid value '#{self[key]}' for option '#{key}' must be one of the following: '#{options[:in].join(', ')}'"))
end
end
def application_servlet
case app_server.to_s
when 'mongrel'
mongrel.cluster
when 'passenger'
passenger
end
end
def error_msg(msg)
banner = ''; msg.length.times { banner << "+" }
return "\n\n#{banner}\n#{msg}\n#{banner}\n\n"
end
end
|
# class HtmlTagWrapper
# def initialize(name)
# @name = name
# end
# def to_s
# @name
# end
# def method_missing(n)
#
# end
module React
module Component
# contains the name of all HTML tags, and the mechanism to register a component
# class as a new tag
module Tags
HTML_TAGS = %w(a abbr address area article aside audio b base bdi bdo big blockquote body br
button canvas caption cite code col colgroup data datalist dd del details dfn
dialog div dl dt em embed fieldset figcaption figure footer form h1 h2 h3 h4 h5
h6 head header hr html i iframe img input ins kbd keygen label legend li link
main map mark menu menuitem meta meter nav noscript object ol optgroup option
output p param picture pre progress q rp rt ruby s samp script section select
small source span strong style sub summary sup table tbody td textarea tfoot th
thead time title tr track u ul var video wbr) +
# The SVG Tags
%w(circle clipPath defs ellipse g line linearGradient mask path pattern polygon polyline
radialGradient rect stop svg text tspan)
# the present method is retained as a legacy behavior
def present(component, *params, &children)
React::RenderingContext.render(component, *params, &children)
end
# define each predefined tag as an instance method
HTML_TAGS.each do |tag|
if tag == 'p'
define_method(tag) do |*params, &children|
if children || params.count == 0 || (params.count == 1 && params.first.is_a?(Hash))
React::RenderingContext.render(tag, *params, &children)
else
Kernel.p(*params)
end
end
else
define_method(tag) do |*params, &children|
React::RenderingContext.render(tag, *params, &children)
end
end
alias_method tag.upcase, tag
const_set tag.upcase, tag
end
def self.html_tag_class_for(tag)
downcased_tag = tag.downcase
if tag =~ /[A-Z]+/ && HTML_TAGS.include?(downcased_tag)
Object.const_set tag, React.create_element(downcased_tag)
end
end
# use method_missing to look up component names in the form of "Foo(..)"
# where there is no preceeding scope.
def method_missing(name, *params, &children)
component = find_component(name)
return React::RenderingContext.render(component, *params, &children) if component
Object.method_missing(name, *params, &children)
end
# install methods with the same name as the component in the parent class/module
# thus component names in the form Foo::Bar(...) will work
class << self
def included(component)
name, parent = find_name_and_parent(component)
tag_names_module = Module.new do
define_method name do |*params, &children|
React::RenderingContext.render(component, *params, &children)
end
# handle deprecated _as_node style
define_method "#{name}_as_node" do |*params, &children|
React::RenderingContext.build_only(component, *params, &children)
end
end
parent.extend(tag_names_module)
end
private
def find_name_and_parent(component)
split_name = component.name && component.name.split('::')
if split_name && split_name.length > 1
[split_name.last, split_name.inject([Module]) { |a, e| a + [a.last.const_get(e)] }[-2]]
end
end
end
private
def find_component(name)
component = lookup_const(name)
if component && !component.method_defined?(:render)
raise "#{name} does not appear to be a react component."
end
component
end
def lookup_const(name)
return nil unless name =~ /^[A-Z]/
#html_tag = React::Component::Tags.html_tag_class(name)
#return html_tag if html_tag
scopes = self.class.name.to_s.split('::').inject([Module]) do |nesting, next_const|
nesting + [nesting.last.const_get(next_const)]
end.reverse
scope = scopes.detect { |s| s.const_defined?(name) }
scope.const_get(name) if scope
end
end
end
end
remove emptynes
# class HtmlTagWrapper
# def initialize(name)
# @name = name
# end
# def to_s
# @name
# end
# def method_missing(n)
#
# end
module React
module Component
# contains the name of all HTML tags, and the mechanism to register a component
# class as a new tag
module Tags
HTML_TAGS = %w(a abbr address area article aside audio b base bdi bdo big blockquote body br
button canvas caption cite code col colgroup data datalist dd del details dfn
dialog div dl dt em embed fieldset figcaption figure footer form h1 h2 h3 h4 h5
h6 head header hr html i iframe img input ins kbd keygen label legend li link
main map mark menu menuitem meta meter nav noscript object ol optgroup option
output p param picture pre progress q rp rt ruby s samp script section select
small source span strong style sub summary sup table tbody td textarea tfoot th
thead time title tr track u ul var video wbr) +
# The SVG Tags
%w(circle clipPath defs ellipse g line linearGradient mask path pattern polygon polyline
radialGradient rect stop svg text tspan)
# the present method is retained as a legacy behavior
def present(component, *params, &children)
React::RenderingContext.render(component, *params, &children)
end
# define each predefined tag as an instance method
HTML_TAGS.each do |tag|
if tag == 'p'
define_method(tag) do |*params, &children|
if children || params.count == 0 || (params.count == 1 && params.first.is_a?(Hash))
React::RenderingContext.render(tag, *params, &children)
else
Kernel.p(*params)
end
end
else
define_method(tag) do |*params, &children|
React::RenderingContext.render(tag, *params, &children)
end
end
alias_method tag.upcase, tag
const_set tag.upcase, tag
end
def self.html_tag_class_for(tag)
downcased_tag = tag.downcase
if tag =~ /[A-Z]+/ && HTML_TAGS.include?(downcased_tag)
Object.const_set tag, React.create_element(downcased_tag)
end
end
# use method_missing to look up component names in the form of "Foo(..)"
# where there is no preceeding scope.
def method_missing(name, *params, &children)
component = find_component(name)
return React::RenderingContext.render(component, *params, &children) if component
Object.method_missing(name, *params, &children)
end
# install methods with the same name as the component in the parent class/module
# thus component names in the form Foo::Bar(...) will work
class << self
def included(component)
name, parent = find_name_and_parent(component)
tag_names_module = Module.new do
define_method name do |*params, &children|
React::RenderingContext.render(component, *params, &children)
end
# handle deprecated _as_node style
define_method "#{name}_as_node" do |*params, &children|
React::RenderingContext.build_only(component, *params, &children)
end
end
parent.extend(tag_names_module)
end
private
def find_name_and_parent(component)
split_name = component.name && component.name.split('::')
if split_name && split_name.length > 1
[split_name.last, split_name.inject([Module]) { |a, e| a + [a.last.const_get(e)] }[-2]]
end
end
end
private
def find_component(name)
component = lookup_const(name)
if component && !component.method_defined?(:render)
raise "#{name} does not appear to be a react component."
end
component
end
def lookup_const(name)
return nil unless name =~ /^[A-Z]/
#html_tag = React::Component::Tags.html_tag_class(name)
#return html_tag if html_tag
scopes = self.class.name.to_s.split('::').inject([Module]) do |nesting, next_const|
nesting + [nesting.last.const_get(next_const)]
end.reverse
scope = scopes.detect { |s| s.const_defined?(name) }
scope.const_get(name) if scope
end
end
end
end
|
module RedmineRest
VERSION = '0.6.0-find.by.id'.freeze
end
bump version to 0.6.0-issue.priority.model
module RedmineRest
VERSION = '0.6.0-issue.priority.model'.freeze
end
|
require 'nokogiri'
module Res
module Formatters
class Junit
attr_accessor :junit, :results, :output
def initialize(junit_file)
@junit = Nokogiri::XML(File.open(junit_file))
@output = Hash.new
@output["type"] = "Junit"
@output["started"] = ""
@output["finished"] = ""
@output["results"] = Array.new
@results = Array.new
@test_suites = false
end
def start(suite)
if suite.name == "testsuites"
attach_suite_to_suites(suite)
elsif suite.name == "testsuite"
attach_suite_to_suite(suite)
end
@output["results"] = @results
end
def attach_suite_to_suites(suite)
@test_suites = true
j = 0
results = Array.new
if suite.name == "testsuites"
count = 0
results = Hash.new
results["children"] = Array.new
while count < suite.children.count
if suite.children[count].name == "testsuite"
results["children"][j] = attach_tests_to_suite(suite.children[count])
j = j + 1 if results["children"][j] != []
end
count = count +1
end
@results = results
end
end
def attach_suite_to_suite(suite)
j = 0
if suite.name == "testsuite"
count = 0
results = Array.new
while count < suite.children.count
if suite.children[count].name == "testsuite"
results[j] = Hash.new
results[j]["children"] = attach_tests_to_suite(suite.children[count])
j = j + 1 if results[j]["children"] != []
elsif suite.children[count].name == "testcase"
results[j] = Hash.new
results[j]["children"] = attach_tests_to_suite(suite)
count = suite.children.count
if results[j]["children"] != []
results[j]["name"] = suite.attributes["name"].value
results[j]["started"] = suite.attributes["timestamp"].value
j = j + 1
end
end
count = count + 1
end
results
@results = results if @test_suites != true
end
end
def attach_tests_to_suite(suite)
index = 0
count = 0
test_suite = 0
testcase = Array.new
while suite.children[count] != nil
if suite.children[count].name != "text" or suite.children[count].name != "properties"
if suite.children[count].name == "testcase"
testcase[index] = Hash.new
testcase[index]["name"] = suite.children[count].attributes["name"].value
if suite.children[count].children[0] != nil
if suite.children[count].children[1].name == "failure"
testcase[index]["status"] = "failed"
elsif suite.children[count].children[1].name == "skipped"
testcase[index]["status"] = "skipped"
else
testcase[index]["status"] = "passed"
end
else
testcase[index]["status"] = "passed"
end
testcase[index]["duration"] = suite.children[count].attributes["time"].value
index = index + 1
elsif suite.children[count].name == "testsuite"
testcase[index] = Hash.new
testcase[index]["children"] = attach_tests_to_suite(suite.children[count])
index = index + 1
end
count = count + 1
end
end
testcase
end
end
end
end
Added timestamp and minor fix
require 'nokogiri'
require 'json'
module Res
module Formatters
class Junit
attr_accessor :junit, :results, :output
def initialize(junit_file)
@junit = Nokogiri::XML(File.open(junit_file))
@output = Hash.new
@output["type"] = "Junit"
@output["started"] = ""
@output["finished"] = ""
@output["results"] = Array.new
@results = Array.new
@test_suites = false
end
def start(suite)
if suite.name == "testsuites"
attach_suite_to_suites(suite)
elsif suite.name == "testsuite"
attach_suite_to_suite(suite)
end
@output["results"] = @results
end
def attach_suite_to_suites(suite)
@test_suites = true
j = 0
results = Array.new
if suite.name == "testsuites"
count = 0
results = Hash.new
results["children"] = Array.new
results["name"] = suite.attributes["name"].value
while count < suite.children.count
if suite.children[count].name == "testsuite"
results["children"][j] = Hash.new
results["children"][j]["name"] = suite.children[count].attributes["name"].value
results["children"][j]["type"] = "TestSuite"
results["children"][j]["started"] = suite.children[count].attributes["timestamp"].value
results["children"][j]["children"] = attach_tests_to_suite(suite.children[count])
j = j + 1 if results["children"][j] != []
end
count = count +1
end
@results = results
end
end
def attach_suite_to_suite(suite)
j = 0
if suite.name == "testsuite"
count = 0
results = Array.new
while count < suite.children.count
if suite.children[count].name == "testsuite"
results[j] = Hash.new
results[j]["type"] = "TestSuite"
results[j]["started"] = suite.children[count].attributes["timestamp"].value
results[j]["children"] = attach_tests_to_suite(suite.children[count])
j = j + 1 if results[j]["children"] != []
elsif suite.children[count].name == "testcase"
results[j] = Hash.new
results[j]["type"] = "TestSuite"
results[j]["children"] = attach_tests_to_suite(suite)
count = suite.children.count
if results[j]["children"] != []
results[j]["name"] = suite.attributes["name"].value
results[j]["started"] = suite.attributes["timestamp"].value
j = j + 1
end
end
count = count + 1
end
results
@results = results if @test_suites != true
end
end
def attach_tests_to_suite(suite)
index = 0
count = 0
test_suite = 0
testcase = Array.new
while suite.children[count] != nil
if suite.children[count].name != "text" or suite.children[count].name != "properties"
if suite.children[count].name == "testcase"
testcase[index] = Hash.new
testcase[index]["type"] = "TestCase"
testcase[index]["name"] = suite.children[count].attributes["name"].value
if suite.children[count].children[0] != nil
if suite.children[count].children[1].name == "failure" or suite.children[count].children[0].name == "failure"
testcase[index]["status"] = "failed"
elsif suite.children[count].children[1].name == "skipped"
testcase[index]["status"] = "skipped"
else
testcase[index]["status"] = "passed"
end
else
testcase[index]["status"] = "passed"
end
testcase[index]["duration"] = suite.children[count].attributes["time"].value if suite.children[count].attributes["time"] != nil
index = index + 1
elsif suite.children[count].name == "testsuite"
testcase[index] = Hash.new
testcase[index]["type"] = "TestSuite"
testcase[index]["children"] = attach_tests_to_suite(suite.children[count])
testcase[index]["name"] = suite.children[count].attributes["name"].value
testcase[index]["started"] = suite.children[count].attributes["timestamp"].value
index = index + 1
end
count = count + 1
end
end
testcase
end
end
end
end |
module Resque
module Plugins
##
# If you want your job to retry on failure, simply extend your module/class
# with this module:
#
# class DeliverWebHook
# extend Resque::Plugins::Retry # allows 1 retry by default.
# @queue = :web_hooks
#
# def self.perform(url, hook_id, hmac_key)
# heavy_lifting
# end
# end
#
# Easily do something custom:
#
# class DeliverWebHook
# extend Resque::Plugins::Retry
# @queue = :web_hooks
#
# @retry_limit = 8 # default: 1
# @retry_delay = 60 # default: 0
#
# # used to build redis key, for counting job attempts.
# def self.identifier(url, hook_id, hmac_key)
# "#{url}-#{hook_id}"
# end
#
# def self.perform(url, hook_id, hmac_key)
# heavy_lifting
# end
# end
#
module Retry
def inherited(subclass)
subclass.instance_variable_set("@retry_criteria_checks", retry_criteria_checks.dup)
end
#def self.included(base)
# p "included!!!!!"
# subclass.instance_variable_set("@retry_criteria_checks", retry_criteria_checks.dup)
#end
##
# @abstract You may override to implement a custom identifier,
# you should consider doing this if your job arguments
# are many/long or may not cleanly cleanly to strings.
#
# Builds an identifier using the job arguments. This identifier
# is used as part of the redis key.
#
# @param [Array] args job arguments
# @return [String] job identifier
def identifier(*args)
args_string = args.join('-')
args_string.empty? ? nil : args_string
end
##
# Builds the redis key to be used for keeping state of the job
# attempts.
#
# @return [String] redis key
def redis_retry_key(*args)
['resque-retry', name, identifier(*args)].compact.join(":").gsub(/\s/, '')
end
##
# Maximum number of retrys we can attempt to successfully perform the job.
# A retry limit of 0 or below will retry forever.
#
# @return [Fixnum]
def retry_limit
@retry_limit ||= 1
end
##
# Number of retry attempts used to try and perform the job.
#
# The real value is kept in Redis, it is accessed and incremented using
# a before_perform hook.
#
# @return [Fixnum] number of attempts
def retry_attempt
@retry_attempt ||= 0
end
##
# @abstract
# Number of seconds to delay until the job is retried.
#
# @return [Number] number of seconds to delay
def retry_delay
@retry_delay ||= 0
end
##
# @abstract
# Modify the arguments used to retry the job. Use this to do something
# other than try the exact same job again.
#
# @return [Array] new job arguments
def args_for_retry(*args)
args
end
##
# Convenience method to test whether you may retry on a given exception.
#
# @return [Boolean]
def retry_exception?(exception)
return true if retry_exceptions.nil?
!! retry_exceptions.any? { |ex| ex >= exception }
end
##
# @abstract
# Controls what exceptions may be retried.
#
# Default: `nil` - this will retry all exceptions.
#
# @return [Array, nil]
def retry_exceptions
@retry_exceptions ||= nil
end
##
# Test if the retry criteria is valid.
#
# @param [Exception] exception
# @param [Array] args job arguments
# @return [Boolean]
def retry_criteria_valid?(exception, *args)
# if the retry limit was reached, dont bother checking anything else.
return false if retry_limit_reached?
should_retry = false
# call user retry criteria check blocks.
retry_criteria_checks.each do |criteria_check|
should_retry ||= !!instance_exec(exception, *args, &criteria_check)
end
should_retry
end
def retry_criteria_checks
@retry_criteria_checks ||= []
# add built in crteria checks.
if @retry_criteria_checks.empty?
@retry_criteria_checks << lambda do |exception, *args|
retry_exception?(exception.class)
end
end
@retry_criteria_checks
end
def retry_limit_reached?
if retry_limit > 0
return true if retry_attempt >= retry_limit
end
false
end
def retry_criteria_check(&block)
#@retry_criteria_checks = retry_criteria_checks
retry_criteria_checks << block
end
##
# Will retry the job.
def try_again(*args)
if retry_delay <= 0
# If the delay is 0, no point passing it through the scheduler
Resque.enqueue(self, *args_for_retry(*args))
else
Resque.enqueue_in(retry_delay, self, *args_for_retry(*args))
end
end
##
# Resque before_perform hook.
#
# Increments and sets the `@retry_attempt` count.
def before_perform_retry(*args)
retry_key = redis_retry_key(*args)
Resque.redis.setnx(retry_key, -1) # default to -1 if not set.
@retry_attempt = Resque.redis.incr(retry_key) # increment by 1.
end
##
# Resque after_perform hook.
#
# Deletes retry attempt count from Redis.
def after_perform_retry(*args)
Resque.redis.del(redis_retry_key(*args))
end
##
# Resque on_failure hook.
#
# Checks if our retry criteria is valid, if it is we try again.
# Otherwise the retry attempt count is deleted from Redis.
def on_failure_retry(exception, *args)
if retry_criteria_valid?(exception, *args)
try_again(*args)
else
Resque.redis.del(redis_retry_key(*args))
end
end
end
end
end
Make sure we call super.
module Resque
module Plugins
##
# If you want your job to retry on failure, simply extend your module/class
# with this module:
#
# class DeliverWebHook
# extend Resque::Plugins::Retry # allows 1 retry by default.
# @queue = :web_hooks
#
# def self.perform(url, hook_id, hmac_key)
# heavy_lifting
# end
# end
#
# Easily do something custom:
#
# class DeliverWebHook
# extend Resque::Plugins::Retry
# @queue = :web_hooks
#
# @retry_limit = 8 # default: 1
# @retry_delay = 60 # default: 0
#
# # used to build redis key, for counting job attempts.
# def self.identifier(url, hook_id, hmac_key)
# "#{url}-#{hook_id}"
# end
#
# def self.perform(url, hook_id, hmac_key)
# heavy_lifting
# end
# end
#
module Retry
def inherited(subclass)
super(subclass)
subclass.instance_variable_set("@retry_criteria_checks", retry_criteria_checks.dup)
end
#def self.included(base)
# p "included!!!!!"
# subclass.instance_variable_set("@retry_criteria_checks", retry_criteria_checks.dup)
#end
##
# @abstract You may override to implement a custom identifier,
# you should consider doing this if your job arguments
# are many/long or may not cleanly cleanly to strings.
#
# Builds an identifier using the job arguments. This identifier
# is used as part of the redis key.
#
# @param [Array] args job arguments
# @return [String] job identifier
def identifier(*args)
args_string = args.join('-')
args_string.empty? ? nil : args_string
end
##
# Builds the redis key to be used for keeping state of the job
# attempts.
#
# @return [String] redis key
def redis_retry_key(*args)
['resque-retry', name, identifier(*args)].compact.join(":").gsub(/\s/, '')
end
##
# Maximum number of retrys we can attempt to successfully perform the job.
# A retry limit of 0 or below will retry forever.
#
# @return [Fixnum]
def retry_limit
@retry_limit ||= 1
end
##
# Number of retry attempts used to try and perform the job.
#
# The real value is kept in Redis, it is accessed and incremented using
# a before_perform hook.
#
# @return [Fixnum] number of attempts
def retry_attempt
@retry_attempt ||= 0
end
##
# @abstract
# Number of seconds to delay until the job is retried.
#
# @return [Number] number of seconds to delay
def retry_delay
@retry_delay ||= 0
end
##
# @abstract
# Modify the arguments used to retry the job. Use this to do something
# other than try the exact same job again.
#
# @return [Array] new job arguments
def args_for_retry(*args)
args
end
##
# Convenience method to test whether you may retry on a given exception.
#
# @return [Boolean]
def retry_exception?(exception)
return true if retry_exceptions.nil?
!! retry_exceptions.any? { |ex| ex >= exception }
end
##
# @abstract
# Controls what exceptions may be retried.
#
# Default: `nil` - this will retry all exceptions.
#
# @return [Array, nil]
def retry_exceptions
@retry_exceptions ||= nil
end
##
# Test if the retry criteria is valid.
#
# @param [Exception] exception
# @param [Array] args job arguments
# @return [Boolean]
def retry_criteria_valid?(exception, *args)
# if the retry limit was reached, dont bother checking anything else.
return false if retry_limit_reached?
should_retry = false
# call user retry criteria check blocks.
retry_criteria_checks.each do |criteria_check|
should_retry ||= !!instance_exec(exception, *args, &criteria_check)
end
should_retry
end
def retry_criteria_checks
@retry_criteria_checks ||= []
# add built in crteria checks.
if @retry_criteria_checks.empty?
@retry_criteria_checks << lambda do |exception, *args|
retry_exception?(exception.class)
end
end
@retry_criteria_checks
end
def retry_limit_reached?
if retry_limit > 0
return true if retry_attempt >= retry_limit
end
false
end
def retry_criteria_check(&block)
#@retry_criteria_checks = retry_criteria_checks
retry_criteria_checks << block
end
##
# Will retry the job.
def try_again(*args)
if retry_delay <= 0
# If the delay is 0, no point passing it through the scheduler
Resque.enqueue(self, *args_for_retry(*args))
else
Resque.enqueue_in(retry_delay, self, *args_for_retry(*args))
end
end
##
# Resque before_perform hook.
#
# Increments and sets the `@retry_attempt` count.
def before_perform_retry(*args)
retry_key = redis_retry_key(*args)
Resque.redis.setnx(retry_key, -1) # default to -1 if not set.
@retry_attempt = Resque.redis.incr(retry_key) # increment by 1.
end
##
# Resque after_perform hook.
#
# Deletes retry attempt count from Redis.
def after_perform_retry(*args)
Resque.redis.del(redis_retry_key(*args))
end
##
# Resque on_failure hook.
#
# Checks if our retry criteria is valid, if it is we try again.
# Otherwise the retry attempt count is deleted from Redis.
def on_failure_retry(exception, *args)
if retry_criteria_valid?(exception, *args)
try_again(*args)
else
Resque.redis.del(redis_retry_key(*args))
end
end
end
end
end |
require 'rest-core'
require 'cgi'
module RestCore::Middleware
include RestCore
def self.included mod
mod.send(:include, RestCore)
mod.send(:attr_reader, :app)
return unless mod.respond_to?(:members)
src = mod.members.map{ |member| <<-RUBY }
def #{member} env
if env.key?('#{member}')
env['#{member}']
else
@#{member}
end
end
RUBY
args = [:app] + mod.members
para_list = args.map{ |a| "#{a}=nil"}.join(', ')
args_list = args .join(', ')
ivar_list = args.map{ |a| "@#{a}" }.join(', ')
src << <<-RUBY
def initialize #{para_list}
#{ivar_list} = #{args_list}
end
RUBY
accessor = Module.new
accessor.module_eval(src.join("\n"), __FILE__, __LINE__)
mod.const_set(:Accessor, accessor)
mod.send(:include, accessor)
end
def call env ; app.call(env) ; end
def fail env, obj; env.merge(FAIL => (env[FAIL] || []) + [obj]); end
def log env, obj; env.merge(LOG => (env[LOG] || []) + [obj]); end
module_function
def request_uri env
# compacting the hash
if (query = (env[REQUEST_QUERY] || {}).select{ |k, v| v }).empty?
env[REQUEST_PATH].to_s
else
q = if env[REQUEST_PATH] =~ /\?/ then '&' else '?' end
"#{env[REQUEST_PATH]}#{q}" \
"#{query.map{ |(k, v)| "#{k}=#{CGI.escape(v.to_s)}" }.join('&')}"
end
end
public :request_uri
end
middleware.rb: keys in the request_uri should also be escaped
require 'rest-core'
require 'cgi'
module RestCore::Middleware
include RestCore
def self.included mod
mod.send(:include, RestCore)
mod.send(:attr_reader, :app)
return unless mod.respond_to?(:members)
src = mod.members.map{ |member| <<-RUBY }
def #{member} env
if env.key?('#{member}')
env['#{member}']
else
@#{member}
end
end
RUBY
args = [:app] + mod.members
para_list = args.map{ |a| "#{a}=nil"}.join(', ')
args_list = args .join(', ')
ivar_list = args.map{ |a| "@#{a}" }.join(', ')
src << <<-RUBY
def initialize #{para_list}
#{ivar_list} = #{args_list}
end
RUBY
accessor = Module.new
accessor.module_eval(src.join("\n"), __FILE__, __LINE__)
mod.const_set(:Accessor, accessor)
mod.send(:include, accessor)
end
def call env ; app.call(env) ; end
def fail env, obj; env.merge(FAIL => (env[FAIL] || []) + [obj]); end
def log env, obj; env.merge(LOG => (env[LOG] || []) + [obj]); end
module_function
def request_uri env
# compacting the hash
if (query = (env[REQUEST_QUERY] || {}).select{ |k, v| v }).empty?
env[REQUEST_PATH].to_s
else
q = if env[REQUEST_PATH] =~ /\?/ then '&' else '?' end
"#{env[REQUEST_PATH]}#{q}" \
"#{query.map{ |(k, v)|
"#{CGI.escape(k.to_s)}=#{CGI.escape(v.to_s)}" }.join('&')}"
end
end
public :request_uri
end
|
module Restulicious
VERSION = "0.1.1"
end
Version bump 0.1.2
module Restulicious
VERSION = "0.1.2"
end
|
module ResumeStylist
class Theme
# def intialize(theme_path)
# @source = File.read(theme_path)
def initialize(source)
@source = source
@frontmatter = {
"flags" => []
}
parse_frontmatter!
@template = Liquid::Template.parse(@source)
end
def render(resume_data)
ctx = resume_data.merge({ "frontmatter" => @frontmatter })
@resume = @template.render(ctx)
post_process!
@resume
end
private
def post_process!
return if @frontmatter["flags"].include? "disable_post_processing"
@document = Oga.parse_html @resume
@document.css(%q{style[type="text/scss"], style[type="text/sass"]}).each do |style|
syntax = style.get("type")[5, 4].to_sym # Going to be :scss or :sass
css = Sass::Engine.new(style.text, syntax: syntax, style: :compressed)
style.inner_text = css.render
style.set("type", "text/css")
end
@resume = @document.to_xml
end
private
def parse_frontmatter!
if @source =~ /\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)/m
@source = $POSTMATCH
@frontmatter.merge!(YAML.load($1) || {})
end
nil
end
end
end
class NormalizeCSS < Liquid::Tag
NormalizeCSS_URI = URI("https://necolas.github.io/normalize.css/latest/normalize.css")
def initialize(tag_name, tokens, ctx)
if tokens.include? "inline"
req = Net::HTTP::Get.new(NormalizeCSS_URI.request_uri)
http = Net::HTTP.new(NormalizeCSS_URI.host, NormalizeCSS_URI.port)
http.use_ssl = (NormalizeCSS_URI.scheme == "https")
response = http.request(req)
if response.code == "200"
@content = response.body
else
@content = "/*! ERROR: Request for `#{NormalizeCSS_URI}` returned #{response.code}! Please report this bug at https://github.com/omninonsense/resume-stylist/issues/new */"
end
else
@content = %Q{<link rel="stylesheet" href="#{NormalizeCSS_URI}" media="screen">}
end
end
def render(context)
@content
end
end
Liquid::Template.register_tag('normalize_css', NormalizeCSS)
rename argument, even though it's unused
module ResumeStylist
class Theme
# def intialize(theme_path)
# @source = File.read(theme_path)
def initialize(source)
@source = source
@frontmatter = {
"flags" => []
}
parse_frontmatter!
@template = Liquid::Template.parse(@source)
end
def render(resume_data)
ctx = resume_data.merge({ "frontmatter" => @frontmatter })
@resume = @template.render(ctx)
post_process!
@resume
end
private
def post_process!
return if @frontmatter["flags"].include? "disable_post_processing"
@document = Oga.parse_html @resume
@document.css(%q{style[type="text/scss"], style[type="text/sass"]}).each do |style|
syntax = style.get("type")[5, 4].to_sym # Going to be :scss or :sass
css = Sass::Engine.new(style.text, syntax: syntax, style: :compressed)
style.inner_text = css.render
style.set("type", "text/css")
end
@resume = @document.to_xml
end
private
def parse_frontmatter!
if @source =~ /\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)/m
@source = $POSTMATCH
@frontmatter.merge!(YAML.load($1) || {})
end
nil
end
end
end
class NormalizeCSS < Liquid::Tag
NormalizeCSS_URI = URI("https://necolas.github.io/normalize.css/latest/normalize.css")
def initialize(tag_name, tokens, liq)
if tokens.include? "inline"
req = Net::HTTP::Get.new(NormalizeCSS_URI.request_uri)
http = Net::HTTP.new(NormalizeCSS_URI.host, NormalizeCSS_URI.port)
http.use_ssl = (NormalizeCSS_URI.scheme == "https")
response = http.request(req)
if response.code == "200"
@content = response.body
else
@content = "/*! ERROR: Request for `#{NormalizeCSS_URI}` returned #{response.code}! Please report this bug at https://github.com/omninonsense/resume-stylist/issues/new */"
end
else
@content = %Q{<link rel="stylesheet" href="#{NormalizeCSS_URI}" media="screen">}
end
end
def render(context)
@content
end
end
Liquid::Template.register_tag('normalize_css', NormalizeCSS)
|
module Rocksdb
module Ruby
VERSION = "0.1.5"
end
end
version 0.2.0
module Rocksdb
module Ruby
VERSION = "0.2.0"
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "quaderno"
s.version = "1.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Carlos Hernandez"]
s.date = "2013-05-10"
s.description = "longer description of your gem"
s.email = "carlos@recrea.es"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"build_install.sh",
"lib/quaderno-ruby.rb",
"lib/quaderno-ruby/base.rb",
"lib/quaderno-ruby/behavior/crud.rb",
"lib/quaderno-ruby/behavior/deliver.rb",
"lib/quaderno-ruby/behavior/payment.rb",
"lib/quaderno-ruby/contact.rb",
"lib/quaderno-ruby/estimate.rb",
"lib/quaderno-ruby/exceptions/exceptions.rb",
"lib/quaderno-ruby/expense.rb",
"lib/quaderno-ruby/invoice.rb",
"lib/quaderno-ruby/item.rb",
"lib/quaderno-ruby/payment.rb",
"quaderno-ruby.gemspec",
"test/fixtures/quaderno_cassettes/all_contacts.yml",
"test/fixtures/quaderno_cassettes/all_estimates.yml",
"test/fixtures/quaderno_cassettes/all_expenses.yml",
"test/fixtures/quaderno_cassettes/all_invoices.yml",
"test/fixtures/quaderno_cassettes/deleted_contact.yml",
"test/fixtures/quaderno_cassettes/deleted_estimate.yml",
"test/fixtures/quaderno_cassettes/deleted_expense.yml",
"test/fixtures/quaderno_cassettes/deleted_invoice.yml",
"test/fixtures/quaderno_cassettes/delivered_estimate.yml",
"test/fixtures/quaderno_cassettes/delivered_invoice.yml",
"test/fixtures/quaderno_cassettes/found_contact.yml",
"test/fixtures/quaderno_cassettes/found_estimate.yml",
"test/fixtures/quaderno_cassettes/found_expense.yml",
"test/fixtures/quaderno_cassettes/found_invoice.yml",
"test/fixtures/quaderno_cassettes/new_contact.yml",
"test/fixtures/quaderno_cassettes/new_estimate.yml",
"test/fixtures/quaderno_cassettes/new_expense.yml",
"test/fixtures/quaderno_cassettes/new_invoice.yml",
"test/fixtures/quaderno_cassettes/paid_expense.yml",
"test/fixtures/quaderno_cassettes/paid_invoice.yml",
"test/fixtures/quaderno_cassettes/rate_limit.yml",
"test/fixtures/quaderno_cassettes/unpay_an_expense.yml",
"test/fixtures/quaderno_cassettes/unpay_an_invoice.yml",
"test/fixtures/quaderno_cassettes/updated_contact.yml",
"test/fixtures/quaderno_cassettes/updated_estimate.yml",
"test/fixtures/quaderno_cassettes/updated_expense.yml",
"test/fixtures/quaderno_cassettes/updated_invoice.yml",
"test/helper.rb",
"test/test_quaderno-ruby.rb",
"test/unit/test_quaderno_contacts.rb",
"test/unit/test_quaderno_estimates.rb",
"test/unit/test_quaderno_expenses.rb",
"test/unit/test_quaderno_invoices.rb"
]
s.homepage = "http://github.com/polimorfico/quaderno-ruby"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "one-line summary of your gem"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, [">= 1.0.0"])
s.add_development_dependency(%q<debugger>, [">= 0"])
s.add_development_dependency(%q<httparty>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<vcr>, [">= 0"])
else
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<vcr>, [">= 0"])
end
else
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<vcr>, [">= 0"])
end
end
Regenerate gemspec for version 1.0.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "quaderno-ruby"
s.version = "1.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Carlos Hernandez"]
s.date = "2013-05-10"
s.description = "longer description of your gem"
s.email = "carlos@recrea.es"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"build_install.sh",
"lib/quaderno-ruby.rb",
"lib/quaderno-ruby/base.rb",
"lib/quaderno-ruby/behavior/crud.rb",
"lib/quaderno-ruby/behavior/deliver.rb",
"lib/quaderno-ruby/behavior/payment.rb",
"lib/quaderno-ruby/contact.rb",
"lib/quaderno-ruby/estimate.rb",
"lib/quaderno-ruby/exceptions/exceptions.rb",
"lib/quaderno-ruby/expense.rb",
"lib/quaderno-ruby/invoice.rb",
"lib/quaderno-ruby/item.rb",
"lib/quaderno-ruby/payment.rb",
"quaderno-ruby.gemspec",
"test/fixtures/quaderno_cassettes/all_contacts.yml",
"test/fixtures/quaderno_cassettes/all_estimates.yml",
"test/fixtures/quaderno_cassettes/all_expenses.yml",
"test/fixtures/quaderno_cassettes/all_invoices.yml",
"test/fixtures/quaderno_cassettes/deleted_contact.yml",
"test/fixtures/quaderno_cassettes/deleted_estimate.yml",
"test/fixtures/quaderno_cassettes/deleted_expense.yml",
"test/fixtures/quaderno_cassettes/deleted_invoice.yml",
"test/fixtures/quaderno_cassettes/delivered_estimate.yml",
"test/fixtures/quaderno_cassettes/delivered_invoice.yml",
"test/fixtures/quaderno_cassettes/found_contact.yml",
"test/fixtures/quaderno_cassettes/found_estimate.yml",
"test/fixtures/quaderno_cassettes/found_expense.yml",
"test/fixtures/quaderno_cassettes/found_invoice.yml",
"test/fixtures/quaderno_cassettes/new_contact.yml",
"test/fixtures/quaderno_cassettes/new_estimate.yml",
"test/fixtures/quaderno_cassettes/new_expense.yml",
"test/fixtures/quaderno_cassettes/new_invoice.yml",
"test/fixtures/quaderno_cassettes/paid_expense.yml",
"test/fixtures/quaderno_cassettes/paid_invoice.yml",
"test/fixtures/quaderno_cassettes/rate_limit.yml",
"test/fixtures/quaderno_cassettes/unpay_an_expense.yml",
"test/fixtures/quaderno_cassettes/unpay_an_invoice.yml",
"test/fixtures/quaderno_cassettes/updated_contact.yml",
"test/fixtures/quaderno_cassettes/updated_estimate.yml",
"test/fixtures/quaderno_cassettes/updated_expense.yml",
"test/fixtures/quaderno_cassettes/updated_invoice.yml",
"test/helper.rb",
"test/test_quaderno-ruby.rb",
"test/unit/test_quaderno_contacts.rb",
"test/unit/test_quaderno_estimates.rb",
"test/unit/test_quaderno_expenses.rb",
"test/unit/test_quaderno_invoices.rb"
]
s.homepage = "http://github.com/polimorfico/quaderno-ruby"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "one-line summary of your gem"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, [">= 1.0.0"])
s.add_development_dependency(%q<debugger>, [">= 0"])
s.add_development_dependency(%q<httparty>, [">= 0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<vcr>, [">= 0"])
else
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<vcr>, [">= 0"])
end
else
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<debugger>, [">= 0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rcov>, ["~> 0.9.11"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<vcr>, [">= 0"])
end
end
|
module ROXML
class RequiredElementMissing < Exception # :nodoc:
end
#
# Internal base class that represents an XML - Class binding.
#
class XMLRef # :nodoc:
delegate :required?, :array?, :blocks, :accessor, :default, :to => :opts
def initialize(opts, instance)
@opts = opts
@instance = instance
end
def to_xml
val = @instance.__send__(accessor)
opts.to_xml.respond_to?(:call) ? opts.to_xml.call(val) : val
end
def update_xml(xml, value)
returning wrap(xml) do |xml|
write_xml(xml, value)
end
end
def name
opts.name_explicit? ? opts.name : conventionize(opts.name)
end
def xpath_name
opts.name_explicit? ? name : namespacify(name)
end
def value_in(xml)
xml = XML::Node.from(xml)
value = fetch_value(xml)
value = default if value.nil?
freeze(apply_blocks(value))
end
private
attr_reader :opts
def conventionize(what)
convention ||= @instance.class.respond_to?(:roxml_naming_convention) && @instance.class.roxml_naming_convention
if !what.blank? && convention.respond_to?(:call)
URI.unescape(convention.call(URI.escape(what, /\/|::/)))
else
what
end
end
def namespacify(what)
if what.present? && namespace = @instance.class.roxml_namespace
"#{namespace}:#{what}"
else
what
end
end
def wrapper
namespacify(conventionize(opts.wrapper))
end
def apply_blocks(val)
begin
blocks.inject(val) {|val, block| block.call(val) }
rescue Exception => ex
raise ex, "#{accessor}: #{ex.message}"
end
end
def freeze(val)
if opts.frozen?
val.each(&:freeze) if val.is_a?(Enumerable)
val.freeze
else
val
end
end
def xpath
wrapper ? "#{wrapper}/#{xpath_name}" : xpath_name.to_s
end
def auto_xpath
"#{namespacify(conventionize(opts.name.pluralize))}/#{xpath_name}" if array?
end
def wrap(xml)
return xml if !wrapper || xml.name == wrapper
if child = xml.children.find {|c| c.name == wrapper }
return child
end
xml.add_child(XML::Node.create(wrapper.to_s))
end
def nodes_in(xml)
vals = xml.search(xpath)
if (opts.hash? || opts.array?) && vals.empty? && !wrapper && auto_xpath
vals = xml.search(auto_xpath)
@auto_vals = !vals.empty?
end
if vals.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
yield(vals)
end
end
end
# Interal class representing an XML attribute binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLAttributeRef < XMLRef # :nodoc:
private
# Updates the attribute in the given XML block to
# the value provided.
def write_xml(xml, value)
xml.attributes[name] = value.to_s
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
nodes.first.value
end
end
def xpath_name
"@#{name}"
end
end
# Interal class representing XML content text binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLTextRef < XMLRef # :nodoc:
delegate :cdata?, :content?, :name?, :to => :opts
private
# Updates the text in the given _xml_ block to
# the _value_ provided.
def write_xml(xml, value)
if content?
add(xml, value)
elsif name?
xml.name = value
elsif array?
value.each do |v|
add(xml.add_child(XML::Node.create(name)), v)
end
else
add(xml.add_child(XML::Node.create(name)), value)
end
end
def fetch_value(xml)
if content? || name?
value =
if content?
xml.content.to_s.strip
elsif name?
xml.name
end
if value.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
value
end
else
nodes_in(xml) do |nodes|
if array?
nodes.collect do |e|
e.content.strip
end
else
nodes.first.content
end
end
end
end
def add(dest, value)
if cdata?
dest.add_child(XML::Node.new_cdata(value.to_s))
else
dest.content = value.to_s
end
end
end
class XMLHashRef < XMLTextRef # :nodoc:
delegate :hash, :to => :opts
def initialize(opts, inst)
super(opts, inst)
@key = opts.hash.key.to_ref(inst)
@value = opts.hash.value.to_ref(inst)
end
private
# Updates the composed XML object in the given XML block to
# the value provided.
def write_xml(xml, value)
value.each_pair do |k, v|
node = xml.add_child(XML::Node.create(hash.wrapper))
@key.update_xml(node, k)
@value.update_xml(node, v)
end
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
nodes.collect do |e|
[@key.value_in(e), @value.value_in(e)]
end
end
end
def apply_blocks(vals)
unless blocks.empty?
vals.collect! do |kvp|
super(kvp)
end
end
to_hash(vals) if vals
end
def freeze(vals)
if opts.frozen?
vals.each_pair{|k, v| k.freeze; v.freeze }
vals.freeze
else
vals
end
end
def to_hash(array)
hash = array.inject({}) do |result, (k, v)|
result[k] ||= []
result[k] << v
result
end
hash.each_pair do |k, v|
hash[k] = v.first if v.size == 1
end
end
end
class XMLObjectRef < XMLTextRef # :nodoc:
delegate :type, :to => :opts
private
# Updates the composed XML object in the given XML block to
# the value provided.
def write_xml(xml, value)
if array?
value.each do |v|
xml.add_child(v.to_xml(name))
end
elsif value.is_a?(ROXML)
xml.add_child(value.to_xml(name))
else
node = XML::Node.create(name)
node.content = value.to_xml
xml.add_child(node)
end
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
unless array?
instantiate(nodes.first)
else
nodes.collect do |e|
instantiate(e)
end
end
end
end
def instantiate(elem)
if type.respond_to? :from_xml
type.from_xml(elem)
else
type.new(elem)
end
end
end
end
Much like we don't conventionize or namespace explicit names, don't do so with wrappers
module ROXML
class RequiredElementMissing < Exception # :nodoc:
end
#
# Internal base class that represents an XML - Class binding.
#
class XMLRef # :nodoc:
delegate :required?, :array?, :blocks, :accessor, :default, :wrapper, :to => :opts
def initialize(opts, instance)
@opts = opts
@instance = instance
end
def to_xml
val = @instance.__send__(accessor)
opts.to_xml.respond_to?(:call) ? opts.to_xml.call(val) : val
end
def update_xml(xml, value)
returning wrap(xml) do |xml|
write_xml(xml, value)
end
end
def name
opts.name_explicit? ? opts.name : conventionize(opts.name)
end
def xpath_name
opts.name_explicit? ? name : namespacify(name)
end
def value_in(xml)
xml = XML::Node.from(xml)
value = fetch_value(xml)
value = default if value.nil?
freeze(apply_blocks(value))
end
private
attr_reader :opts
def conventionize(what)
convention ||= @instance.class.respond_to?(:roxml_naming_convention) && @instance.class.roxml_naming_convention
if !what.blank? && convention.respond_to?(:call)
URI.unescape(convention.call(URI.escape(what, /\/|::/)))
else
what
end
end
def namespacify(what)
if what.present? && namespace = @instance.class.roxml_namespace
"#{namespace}:#{what}"
else
what
end
end
def apply_blocks(val)
begin
blocks.inject(val) {|val, block| block.call(val) }
rescue Exception => ex
raise ex, "#{accessor}: #{ex.message}"
end
end
def freeze(val)
if opts.frozen?
val.each(&:freeze) if val.is_a?(Enumerable)
val.freeze
else
val
end
end
def xpath
opts.wrapper ? "#{opts.wrapper}/#{xpath_name}" : xpath_name.to_s
end
def auto_xpath
"#{namespacify(conventionize(opts.name.pluralize))}/#{xpath_name}" if array?
end
def wrap(xml)
return xml if !wrapper || xml.name == wrapper
if child = xml.children.find {|c| c.name == wrapper }
return child
end
xml.add_child(XML::Node.create(wrapper.to_s))
end
def nodes_in(xml)
vals = xml.search(xpath)
if (opts.hash? || opts.array?) && vals.empty? && !wrapper && auto_xpath
vals = xml.search(auto_xpath)
@auto_vals = !vals.empty?
end
if vals.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
yield(vals)
end
end
end
# Interal class representing an XML attribute binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLAttributeRef < XMLRef # :nodoc:
private
# Updates the attribute in the given XML block to
# the value provided.
def write_xml(xml, value)
xml.attributes[name] = value.to_s
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
nodes.first.value
end
end
def xpath_name
"@#{name}"
end
end
# Interal class representing XML content text binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLTextRef < XMLRef # :nodoc:
delegate :cdata?, :content?, :name?, :to => :opts
private
# Updates the text in the given _xml_ block to
# the _value_ provided.
def write_xml(xml, value)
if content?
add(xml, value)
elsif name?
xml.name = value
elsif array?
value.each do |v|
add(xml.add_child(XML::Node.create(name)), v)
end
else
add(xml.add_child(XML::Node.create(name)), value)
end
end
def fetch_value(xml)
if content? || name?
value =
if content?
xml.content.to_s.strip
elsif name?
xml.name
end
if value.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
value
end
else
nodes_in(xml) do |nodes|
if array?
nodes.collect do |e|
e.content.strip
end
else
nodes.first.content
end
end
end
end
def add(dest, value)
if cdata?
dest.add_child(XML::Node.new_cdata(value.to_s))
else
dest.content = value.to_s
end
end
end
class XMLHashRef < XMLTextRef # :nodoc:
delegate :hash, :to => :opts
def initialize(opts, inst)
super(opts, inst)
@key = opts.hash.key.to_ref(inst)
@value = opts.hash.value.to_ref(inst)
end
private
# Updates the composed XML object in the given XML block to
# the value provided.
def write_xml(xml, value)
value.each_pair do |k, v|
node = xml.add_child(XML::Node.create(hash.wrapper))
@key.update_xml(node, k)
@value.update_xml(node, v)
end
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
nodes.collect do |e|
[@key.value_in(e), @value.value_in(e)]
end
end
end
def apply_blocks(vals)
unless blocks.empty?
vals.collect! do |kvp|
super(kvp)
end
end
to_hash(vals) if vals
end
def freeze(vals)
if opts.frozen?
vals.each_pair{|k, v| k.freeze; v.freeze }
vals.freeze
else
vals
end
end
def to_hash(array)
hash = array.inject({}) do |result, (k, v)|
result[k] ||= []
result[k] << v
result
end
hash.each_pair do |k, v|
hash[k] = v.first if v.size == 1
end
end
end
class XMLObjectRef < XMLTextRef # :nodoc:
delegate :type, :to => :opts
private
# Updates the composed XML object in the given XML block to
# the value provided.
def write_xml(xml, value)
if array?
value.each do |v|
xml.add_child(v.to_xml(name))
end
elsif value.is_a?(ROXML)
xml.add_child(value.to_xml(name))
else
node = XML::Node.create(name)
node.content = value.to_xml
xml.add_child(node)
end
end
def fetch_value(xml)
nodes_in(xml) do |nodes|
unless array?
instantiate(nodes.first)
else
nodes.collect do |e|
instantiate(e)
end
end
end
end
def instantiate(elem)
if type.respond_to? :from_xml
type.from_xml(elem)
else
type.new(elem)
end
end
end
end |
module ROXML
class RequiredElementMissing < Exception # :nodoc:
end
#
# Internal base class that represents an XML - Class binding.
#
class XMLRef # :nodoc:
attr_reader :opts
delegate :required?, :array?, :accessor, :default, :wrapper, :to => :opts
def initialize(opts, instance)
@opts = opts
@instance = instance
end
def blocks
opts.blocks || []
end
def to_xml(instance)
val = instance.__send__(accessor)
opts.to_xml.respond_to?(:call) ? opts.to_xml.call(val) : val
end
def name
opts.name_explicit? ? opts.name : conventionize(opts.name)
end
def xpath_name
namespacify(name)
end
def value_in(xml)
xml = XML::Node.from(xml)
value = fetch_value(xml)
value = default if value.nil?
freeze(apply_blocks(value))
end
private
def conventionize(what)
convention ||= @instance.class.respond_to?(:roxml_naming_convention) && @instance.class.roxml_naming_convention
if !what.blank? && convention.respond_to?(:call)
URI.unescape(convention.call(URI.escape(what, /\/|::/)))
else
what
end
end
def namespacify(what)
if what.to_s.present? && !what.to_s.include?(':') && opts.namespace != false
[opts.namespace, @instance.class.roxml_namespace, @default_namespace].each do |namespace|
return opts.namespace == '*' ? "*[local-name()='#{what}']" : "#{namespace}:#{what}" if namespace
end
end
what
end
def apply_blocks(val)
begin
blocks.inject(val) {|val, block| block.call(val) }
rescue Exception => ex
raise ex, "#{accessor}: #{ex.message}"
end
end
def freeze(val)
if opts.frozen?
val.each(&:freeze) if val.is_a?(Enumerable)
val.freeze
else
val
end
end
def xpath
opts.wrapper ? "#{namespacify(opts.wrapper)}/#{xpath_name}" : xpath_name.to_s
end
def auto_wrapper
namespacify(conventionize(opts.name.pluralize))
end
def auto_xpath
"#{auto_wrapper}/#{xpath_name}" if array?
end
def several?
array?
end
def wrap(xml, opts = {:always_create => false})
wrap_with = @auto_vals ? auto_wrapper : wrapper
return xml if !wrap_with || xml.name == wrap_with
if !opts[:always_create] && (child = xml.children.find {|c| c.name == wrap_with })
return child
end
XML.add_node(xml, wrap_with.to_s)
end
def nodes_in(xml)
@default_namespace = xml.default_namespace
vals = xml.search(xpath, @instance.class.roxml_namespaces)
if several? && vals.empty? && !wrapper && auto_xpath
vals = xml.search(auto_xpath, @instance.class.roxml_namespaces)
@auto_vals = !vals.empty?
end
if vals.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
elsif several?
vals.map do |val|
yield val
end
else
yield(vals.first)
end
end
end
# Interal class representing an XML attribute binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLAttributeRef < XMLRef # :nodoc:
# Updates the attribute in the given XML block to
# the value provided.
def update_xml(xml, values)
if array?
values.each do |value|
wrap(xml, :always_create => true).tap do |node|
XML.set_attribute(node, name, value.to_s)
end
end
else
wrap(xml).tap do |xml|
XML.set_attribute(xml, name, values.to_s)
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
node.value
end
end
def xpath_name
"@#{name}"
end
end
# Interal class representing XML content text binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLTextRef < XMLRef # :nodoc:
delegate :cdata?, :content?, :name?, :to => :opts
# Updates the text in the given _xml_ block to
# the _value_ provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
if content?
add(xml, value)
elsif name?
xml.name = value
elsif array?
value.each do |v|
add(XML.add_node(xml, name), v)
end
else
add(XML.add_node(xml, name), value)
end
end
end
private
def fetch_value(xml)
if content? || name?
value =
if content?
xml.content.to_s
elsif name?
xml.name
end
if value.blank?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
value
end
else
nodes_in(xml) do |node|
node.content
end
end
end
def add(dest, value)
if cdata?
XML.add_cdata(dest, value.to_s)
else
XML.set_content(dest, value.to_s)
end
end
end
class XMLNameSpaceRef < XMLRef # :nodoc:
private
def fetch_value(xml)
xml.namespace.prefix
end
end
class XMLHashRef < XMLTextRef # :nodoc:
delegate :hash, :to => :opts
def initialize(opts, inst)
super(opts, inst)
@key = opts.hash.key.to_ref(inst)
@value = opts.hash.value.to_ref(inst)
end
def several?
true
end
# Updates the composed XML object in the given XML block to
# the value provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
value.each_pair do |k, v|
node = XML.add_node(xml, hash.wrapper)
@key.update_xml(node, k)
@value.update_xml(node, v)
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
[@key.value_in(node), @value.value_in(node)]
end
end
def apply_blocks(vals)
unless blocks.empty?
vals.collect! do |kvp|
super(kvp)
end
end
to_hash(vals) if vals
end
def freeze(vals)
if opts.frozen?
vals.each_pair{|k, v| k.freeze; v.freeze }
vals.freeze
else
vals
end
end
def to_hash(array)
hash = array.inject({}) do |result, (k, v)|
result[k] ||= []
result[k] << v
result
end
hash.each_pair do |k, v|
hash[k] = v.first if v.size == 1
end
end
end
class XMLObjectRef < XMLTextRef # :nodoc:
delegate :sought_type, :to => :opts
# Updates the composed XML object in the given XML block to
# the value provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
params = {:name => name, :namespace => opts.namespace}
if array?
value.each do |v|
XML.add_child(xml, v.to_xml(params))
end
elsif value.is_a?(ROXML)
XML.add_child(xml, value.to_xml(params))
else
XML.add_node(xml, name).tap do |node|
XML.set_content(node, value.to_xml)
end
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
if sought_type.respond_to? :from_xml
sought_type.from_xml(node)
else
sought_type.new(node)
end
end
end
end
end
Add support for :name => '*', :namespace => '*'
module ROXML
class RequiredElementMissing < Exception # :nodoc:
end
#
# Internal base class that represents an XML - Class binding.
#
class XMLRef # :nodoc:
attr_reader :opts
delegate :required?, :array?, :accessor, :default, :wrapper, :to => :opts
def initialize(opts, instance)
@opts = opts
@instance = instance
end
def blocks
opts.blocks || []
end
def to_xml(instance)
val = instance.__send__(accessor)
opts.to_xml.respond_to?(:call) ? opts.to_xml.call(val) : val
end
def name
opts.name_explicit? ? opts.name : conventionize(opts.name)
end
def xpath_name
namespacify(name)
end
def value_in(xml)
xml = XML::Node.from(xml)
value = fetch_value(xml)
value = default if value.nil?
freeze(apply_blocks(value))
end
private
def conventionize(what)
convention ||= @instance.class.respond_to?(:roxml_naming_convention) && @instance.class.roxml_naming_convention
if !what.blank? && convention.respond_to?(:call)
URI.unescape(convention.call(URI.escape(what, /\/|::/)))
else
what
end
end
def namespacify(what)
if what.to_s.present? && !what.to_s.include?(':') && opts.namespace != false
[opts.namespace, @instance.class.roxml_namespace, @default_namespace].each do |namespace|
return opts.namespace == '*' ? (what == '*' ? "*" : "*[local-name()='#{what}']") : "#{namespace}:#{what}" if namespace
end
end
what
end
def apply_blocks(val)
begin
blocks.inject(val) {|val, block| block.call(val) }
rescue Exception => ex
raise ex, "#{accessor}: #{ex.message}"
end
end
def freeze(val)
if opts.frozen?
val.each(&:freeze) if val.is_a?(Enumerable)
val.freeze
else
val
end
end
def xpath
opts.wrapper ? "#{namespacify(opts.wrapper)}/#{xpath_name}" : xpath_name.to_s
end
def auto_wrapper
namespacify(conventionize(opts.name.pluralize))
end
def auto_xpath
"#{auto_wrapper}/#{xpath_name}" if array?
end
def several?
array?
end
def wrap(xml, opts = {:always_create => false})
wrap_with = @auto_vals ? auto_wrapper : wrapper
return xml if !wrap_with || xml.name == wrap_with
if !opts[:always_create] && (child = xml.children.find {|c| c.name == wrap_with })
return child
end
XML.add_node(xml, wrap_with.to_s)
end
def nodes_in(xml)
@default_namespace = xml.default_namespace
vals = xml.search(xpath, @instance.class.roxml_namespaces)
if several? && vals.empty? && !wrapper && auto_xpath
vals = xml.search(auto_xpath, @instance.class.roxml_namespaces)
@auto_vals = !vals.empty?
end
if vals.empty?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
elsif several?
vals.map do |val|
yield val
end
else
yield(vals.first)
end
end
end
# Interal class representing an XML attribute binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLAttributeRef < XMLRef # :nodoc:
# Updates the attribute in the given XML block to
# the value provided.
def update_xml(xml, values)
if array?
values.each do |value|
wrap(xml, :always_create => true).tap do |node|
XML.set_attribute(node, name, value.to_s)
end
end
else
wrap(xml).tap do |xml|
XML.set_attribute(xml, name, values.to_s)
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
node.value
end
end
def xpath_name
"@#{name}"
end
end
# Interal class representing XML content text binding
#
# In context:
# <element attribute="XMLAttributeRef">
# XMLTextRef
# </element>
class XMLTextRef < XMLRef # :nodoc:
delegate :cdata?, :content?, :name?, :to => :opts
# Updates the text in the given _xml_ block to
# the _value_ provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
if content?
add(xml, value)
elsif name?
xml.name = value
elsif array?
value.each do |v|
add(XML.add_node(xml, name), v)
end
else
add(XML.add_node(xml, name), value)
end
end
end
private
def fetch_value(xml)
if content? || name?
value =
if content?
xml.content.to_s
elsif name?
xml.name
end
if value.blank?
raise RequiredElementMissing, "#{name} from #{xml} for #{accessor}" if required?
default
else
value
end
else
nodes_in(xml) do |node|
node.content
end
end
end
def add(dest, value)
if cdata?
XML.add_cdata(dest, value.to_s)
else
XML.set_content(dest, value.to_s)
end
end
end
class XMLNameSpaceRef < XMLRef # :nodoc:
private
def fetch_value(xml)
xml.namespace.prefix
end
end
class XMLHashRef < XMLTextRef # :nodoc:
delegate :hash, :to => :opts
def initialize(opts, inst)
super(opts, inst)
@key = opts.hash.key.to_ref(inst)
@value = opts.hash.value.to_ref(inst)
end
def several?
true
end
# Updates the composed XML object in the given XML block to
# the value provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
value.each_pair do |k, v|
node = XML.add_node(xml, hash.wrapper)
@key.update_xml(node, k)
@value.update_xml(node, v)
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
[@key.value_in(node), @value.value_in(node)]
end
end
def apply_blocks(vals)
unless blocks.empty?
vals.collect! do |kvp|
super(kvp)
end
end
to_hash(vals) if vals
end
def freeze(vals)
if opts.frozen?
vals.each_pair{|k, v| k.freeze; v.freeze }
vals.freeze
else
vals
end
end
def to_hash(array)
hash = array.inject({}) do |result, (k, v)|
result[k] ||= []
result[k] << v
result
end
hash.each_pair do |k, v|
hash[k] = v.first if v.size == 1
end
end
end
class XMLObjectRef < XMLTextRef # :nodoc:
delegate :sought_type, :to => :opts
# Updates the composed XML object in the given XML block to
# the value provided.
def update_xml(xml, value)
wrap(xml).tap do |xml|
params = {:name => name, :namespace => opts.namespace}
if array?
value.each do |v|
XML.add_child(xml, v.to_xml(params))
end
elsif value.is_a?(ROXML)
XML.add_child(xml, value.to_xml(params))
else
XML.add_node(xml, name).tap do |node|
XML.set_content(node, value.to_xml)
end
end
end
end
private
def fetch_value(xml)
nodes_in(xml) do |node|
if sought_type.respond_to? :from_xml
sought_type.from_xml(node)
else
sought_type.new(node)
end
end
end
end
end
|
# Easily test your SSL/TLS with RSpec.
module RspecSsltls
VERSION = '0.0.8dev'
end
Bump up version to 0.0.8
# Easily test your SSL/TLS with RSpec.
module RspecSsltls
VERSION = '0.0.8'
end
|
module RSS
module DCTERMS
module BasePropertyModel
def append_features(klass)
super
return if klass.instance_of?(Module)
PropertyModel::ELEMENT_NAME_INFOS.each do |name, plural_name|
plural = plural_name || "#{name}s"
full_name = "#{PREFIX}_#{name}"
full_plural_name = "#{PREFIX}_#{plural}"
klass_name = Utils.to_class_name(name)
klass.install_must_call_validator(PREFIX, URI)
klass.install_have_children_element(name, URI, "*",
full_name, full_plural_name)
klass.module_eval(<<-EOC, *get_file_and_line_from_caller(0))
remove_method :#{full_name}
remove_method :#{full_name}=
remove_method :set_#{full_name}
def #{full_name}
@#{full_name}.first and @#{full_name}.first.value
end
alias #{to_attr_name(full_name)} #{full_name}
def #{full_name}=(new_value)
@#{full_name}[0] = Utils.new_with_value_if_need(#{klass_name}, new_value)
end
alias set_#{full_name} #{full_name}=
alias #{to_attr_name(full_name)}= #{full_name}=
alias set_#{to_attr_name(full_name)} #{full_name}=
alias #{to_attr_name(full_plural_name)} #{full_plural_name}
EOC
end
klass.module_eval(<<-EOC, *get_file_and_line_from_caller(0))
if method_defined?(:date)
alias date_without_#{PREFIX}_date= date=
def date=(value)
self.date_without_#{PREFIX}_date = value
self.#{PREFIX}_date = value
end
else
alias date #{PREFIX}_date
alias date= #{PREFIX}_date=
end
EOC
end
end
module PropertyModel
extend BaseModel
extend BasePropertyModel
TEXT_ELEMENTS = {
"contributor" => nil,
"creator" => nil,
"coverage" => nil,
"spatial" => nil,
"temporal" => nil,
"description" => nil,
"abstract" => nil,
"tableOfContents" => "tableOfContents_list",
"format" => nil,
"extent" => nil,
"medium" => nil,
"identifier" => nil,
"bibliographicCitation" => nil,
"language" => nil,
"publisher" => nil,
"relation" => nil,
"source" => nil,
"conformsTo" => nil,
"hasFormat" => nil,
"hasPart" => nil,
"hasVersion" => nil,
"isFormatOf" => nil,
"isPartOf" => nil,
"isReferencedBy" => nil,
"isReplacedBy" => nil,
"isRequiredBy" => nil,
"isVersionOf" => nil,
"references" => "references_list",
"replaces" => "replaces_list",
"requires" => "requires_list",
"rights" => "rights_list",
"accessRights" => "accessRights_list",
"license" => nil,
"subject" => nil,
"title" => nil,
"alternative" => nil,
"type" => nil,
"audience" => nil,
"educationLevel" => nil,
"mediator" => nil,
"accrualMethod" => nil,
"accrualPeriodicity" => nil,
"accrualPolicy" => nil,
"instructionalMethod" => nil,
"provenance" => nil,
"rightsHolder" => nil
}
DATE_ELEMENTS = {
"date" => "w3cdtf",
"available" => "w3cdtf",
"created" => "w3cdtf",
"dateAccepted" => "w3cdtf",
"dateCopyrighted" => "w3cdtf",
"dateSubmitted" => "w3cdtf",
"issued" => "w3cdtf",
"modified" => "w3cdtf",
"valid" => "w3cdtf"
}
ELEMENT_NAME_INFOS = PropertyModel::TEXT_ELEMENTS.to_a
PropertyModel::DATE_ELEMENTS.each do |name, |
ELEMENT_NAME_INFOS << [name, nil]
end
ELEMENTS = TEXT_ELEMENTS.keys + DATE_ELEMENTS.keys
ELEMENTS.each do |name, plural_name|
module_eval(<<-EOC, *get_file_and_line_from_caller(0))
class #{Utils.to_class_name(name)} < Element
include RSS10
content_setup
class << self
def required_prefix
PREFIX
end
def required_uri
URI
end
end
@tag_name = #{name.dump}
alias_method(:value, :content)
alias_method(:value=, :content=)
def initialize(*args)
if Utils.element_initialize_arguments?(args)
super
else
super()
self.content = args[0]
end
end
def full_name
tag_name_with_prefix(PREFIX)
end
def maker_target(target)
target.new_#{name}
end
def setup_maker_attributes(#{name})
#{name}.content = content
end
end
EOC
end
DATE_ELEMENTS.each do |name, type|
tag_name = "#{PREFIX}:#{name}"
module_eval(<<-EOC, *get_file_and_line_from_caller(0))
class #{Utils.to_class_name(name)} < Element
remove_method(:content=)
remove_method(:value=)
date_writer("content", #{type.dump}, #{tag_name.dump})
alias_method(:value=, :content=)
end
EOC
end
end
DCTERMS::PropertyModel::ELEMENTS.each do |name|
class_name = Utils.to_class_name(name)
BaseListener.install_class_name(URI, name, class_name)
end
DCTERMS::PropertyModel::ELEMENTS.collect! {|name| "#{PREFIX}_#{name}"}
end
end
Reimplement after DublinCore
module RSS
module DCTERMS
module BasePropertyModel
def append_features(klass)
super
return if klass.instance_of?(Module)
PropertyModel::ELEMENT_NAME_INFOS.each do |name, plural_name|
plural = plural_name || "#{name}s"
full_name = "#{PREFIX}_#{name}"
full_plural_name = "#{PREFIX}_#{plural}"
klass_name = "DCTERMS#{Utils.to_class_name(name)}"
klass.install_must_call_validator(PREFIX, URI)
klass.install_have_children_element(name, URI, "*",
full_name, full_plural_name)
klass.module_eval(<<-EOC, *get_file_and_line_from_caller(0))
remove_method :#{full_name}
remove_method :#{full_name}=
remove_method :set_#{full_name}
def #{full_name}
@#{full_name}.first and @#{full_name}.first.value
end
alias #{to_attr_name(full_name)} #{full_name}
def #{full_name}=(new_value)
@#{full_name}[0] = Utils.new_with_value_if_need(#{klass_name}, new_value)
end
alias set_#{full_name} #{full_name}=
alias #{to_attr_name(full_name)}= #{full_name}=
alias set_#{to_attr_name(full_name)} #{full_name}=
alias #{to_attr_name(full_plural_name)} #{full_plural_name}
EOC
end
klass.module_eval(<<-EOC, *get_file_and_line_from_caller(0))
if method_defined?(:date)
alias date_without_#{PREFIX}_date= date=
def date=(value)
self.date_without_#{PREFIX}_date = value
self.#{PREFIX}_date = value
end
else
alias date #{PREFIX}_date
alias date= #{PREFIX}_date=
end
EOC
end
end
module PropertyModel
extend BaseModel
extend BasePropertyModel
TEXT_ELEMENTS = {
"contributor" => nil,
"creator" => nil,
"coverage" => nil,
"spatial" => nil,
"temporal" => nil,
"description" => nil,
"abstract" => nil,
"tableOfContents" => "tableOfContents_list",
"format" => nil,
"extent" => nil,
"medium" => nil,
"identifier" => nil,
"bibliographicCitation" => nil,
"language" => nil,
"publisher" => nil,
"relation" => nil,
"source" => nil,
"conformsTo" => nil,
"hasFormat" => nil,
"hasPart" => nil,
"hasVersion" => nil,
"isFormatOf" => nil,
"isPartOf" => nil,
"isReferencedBy" => nil,
"isReplacedBy" => nil,
"isRequiredBy" => nil,
"isVersionOf" => nil,
"references" => "references_list",
"replaces" => "replaces_list",
"requires" => "requires_list",
"rights" => "rights_list",
"accessRights" => "accessRights_list",
"license" => nil,
"subject" => nil,
"title" => nil,
"alternative" => nil,
"type" => nil,
"audience" => nil,
"educationLevel" => nil,
"mediator" => nil,
"accrualMethod" => nil,
"accrualPeriodicity" => nil,
"accrualPolicy" => nil,
"instructionalMethod" => nil,
"provenance" => nil,
"rightsHolder" => nil
}
DATE_ELEMENTS = {
"date" => "w3cdtf",
"available" => "w3cdtf",
"created" => "w3cdtf",
"dateAccepted" => "w3cdtf",
"dateCopyrighted" => "w3cdtf",
"dateSubmitted" => "w3cdtf",
"issued" => "w3cdtf",
"modified" => "w3cdtf",
"valid" => "w3cdtf"
}
ELEMENT_NAME_INFOS = PropertyModel::TEXT_ELEMENTS.to_a
PropertyModel::DATE_ELEMENTS.each do |name, |
ELEMENT_NAME_INFOS << [name, nil]
end
ELEMENTS = TEXT_ELEMENTS.keys + DATE_ELEMENTS.keys
ELEMENTS.each do |name, plural_name|
module_eval(<<-EOC, *get_file_and_line_from_caller(0))
class DCTERMS#{Utils.to_class_name(name)} < Element
include RSS10
content_setup
class << self
def required_prefix
PREFIX
end
def required_uri
URI
end
end
@tag_name = #{name.dump}
alias_method(:value, :content)
alias_method(:value=, :content=)
def initialize(*args)
if Utils.element_initialize_arguments?(args)
super
else
super()
self.content = args[0]
end
end
def full_name
tag_name_with_prefix(PREFIX)
end
def maker_target(target)
target.new_#{name}
end
def setup_maker_attributes(#{name})
#{name}.content = content
end
end
EOC
end
DATE_ELEMENTS.each do |name, type|
tag_name = "#{PREFIX}:#{name}"
module_eval(<<-EOC, *get_file_and_line_from_caller(0))
class DCTERMS#{Utils.to_class_name(name)} < Element
remove_method(:content=)
remove_method(:value=)
date_writer("content", #{type.dump}, #{tag_name.dump})
alias_method(:value=, :content=)
end
EOC
end
end
DCTERMS::PropertyModel::ELEMENTS.each do |name|
class_name = Utils.to_class_name(name)
BaseListener.install_class_name(URI, name, class_name)
end
DCTERMS::PropertyModel::ELEMENTS.collect! {|name| "#{PREFIX}_#{name}"}
end
end
|
#!/usr/bin/ruby -w
module Rtt
module ReportGenerator
attr_accessor :data, :different_fixed
DEFAULT_FILENAME = 'rtt_report'
FORMATS_ACCEPTED = [ :csv, :pdf ]
REPORT_FIELDS = %w(Client Project Name Date Duration)
FIXED_FIELDS = %w(Client Project)
REPORT_FIELD_OUTPUT = {
'Client' => Proc.new { |task| (task.client.name) if task.present? && task.client.present? },
'Project' => Proc.new { |task| (task.project.name) if task.present? && task.project.present? },
'Name' => Proc.new { |task| task.name if task.present? },
'Date' => Proc.new { |task| task.date.strftime('%m-%d-%y') if task.present? },
'Duration' => Proc.new { |task| task.duration if task.present? }
}
def custom_user_is_defined?
current_user.present? && current_user.nickname != Rtt::User::DEFAULT_NICK
end
def fill_user_information(pdf)
pdf.cell [330, 790],
:text => current_user.full_name_and_nickname,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 770],
:text => current_user.company,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 750],
:text => current_user.location,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 730],
:text => current_user.address,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 710],
:text => current_user.phone,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 690],
:text => current_user.email,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 670],
:text => current_user.site,
:width => 225, :padding => 10, :border_width => 0, :align => :right
end
def fixed_fields_for_current_data
@fixed_fields_for_current_data ||= begin
calculate_fixed_fields_based_on_data
@data[:fixed_fields].keys + @different_fixed.keys.reject { |key| @different_fixed[key].length > 1 }
end
end
def fixed_value(field)
if @data[:fixed_fields].include? field
@data[:fixed_fields][field]
else
@different_fixed[field].first
end
end
def full_path(output_path = nil)
entered_filename = output_path || DEFAULT_FILENAME
filename, directory, extension = File.basename(entered_filename), File.dirname(entered_filename), File.extname(entered_filename)
path = directory.present? && directory != '.' && File.exists?(directory) ? directory : ENV['HOME']
ext = extension.present? ? '' : '.pdf'
"#{File.join(path, filename)}#{ext}"
end
def has_default_value?(field)
task = self.data[:rows].first
return true if task.nil?
REPORT_FIELD_OUTPUT[field].call(task) == eval("Rtt::#{field}::DEFAULT_NAME")
end
#
#
def report options = {}
raise 'Argument must be a valid Hash. Checkout: rtt usage' unless options.is_a?(Hash) || options.keys.empty?
@different_fixed ||= FIXED_FIELDS.inject({}) { |result, key| result[key] = []; result }
extension = options.keys.select { |key| FORMATS_ACCEPTED.include?(key) }.first
path = options[extension]
fixed_fields = extract_fixed_fields(options)
fixed_fields_and_values = fixed_fields.inject({}) { |hash, key| hash[key] = options[key.downcase.to_sym]; hash }
@data = { :fixed_fields => fixed_fields_and_values, :rows => query(options) }
filename_path = full_path(path)
case extension
when :pdf
report_to_pdf filename_path
when :csv
raise 'CSV format report not implemented yet'
report_to_csv path
else
raise 'Format not supported. Only csv and pdf are available for the moment.'
end
end
private
def calculate_total_hours_and_minutes(data)
data.inject([0, 0]) do |totals, task|
total_h, total_m = totals
if task[4 - fixed_fields_for_current_data.length].match(/^(\d+)h(\d+)m$/)
total_m += ($2.to_i % 60)
total_h += ($1.to_i + $2.to_i / 60)
end
[ total_h, total_m ]
end
end
def extract_fixed_fields(options)
# remove Duration as we can't filter by that
REPORT_FIELDS[0..-2].select { |field| options.include?(field.downcase.to_sym) }
end
def report_to_csv output_path
require 'fastercsv'
rescue LoadError
puts "Missing gem: Fastercsv"
end
def report_to_pdf output_path
require 'prawn'
require 'prawn/layout'
require "prawn/measurement_extensions"
columns = REPORT_FIELDS - fixed_fields_for_current_data
data = @data[:rows].map { |task| task_row_for_fields(task, columns) }
total_h, total_m = calculate_total_hours_and_minutes(data)
report_generator = self
pdf = Prawn::Document.new(:page_layout => :portrait,
:left_margin => 10.mm, # different
:right_margin => 1.cm, # units
:top_margin => 0.1.dm, # work
:bottom_margin => 0.01.m, # well
:page_size => 'A4') do
report_generator.fill_user_information(self) if report_generator.custom_user_is_defined?
move_up(140) if report_generator.custom_user_is_defined?
font_size 16
text "RTT Report"
text "=========="
move_down 40
report_generator.fixed_fields_for_current_data.each do |field|
text("#{field}: #{report_generator.fixed_value(field)}") unless report_generator.has_default_value?(field)
end
move_down(report_generator.custom_user_is_defined? ? 40 : 0)
if data.present?
table data,
:headers => columns,
:position => :left,
:border_width => 1,
:row_colors => [ 'fafafa', 'f0f0f0' ],
:font_size => 12,
:padding => 5,
:align => :left
end
move_down 20
text "Total: #{total_h}h#{total_m}m"
number_pages "Page <page> / <total>", [bounds.right - 80, 0]
render_file output_path
end
rescue LoadError
puts "Missing gem: prawn, prawn/layout or prawn/measurement_extensions"
rescue => e
puts "[rtt] Error while generating report: #{e.to_s}"
end
def calculate_fixed_fields_based_on_data
@data[:rows].each do |task|
(REPORT_FIELDS - @data[:fixed_fields].keys).each do |field|
value = REPORT_FIELD_OUTPUT[field].call(task)
@different_fixed[field] << value if FIXED_FIELDS.include?(field) && !@different_fixed[field].include?(value)
end
end
end
def task_row_for_fields(task, fields)
fields.map do |field|
REPORT_FIELD_OUTPUT[field].call(task)
end
end
end
end
Loggin the report's file path
#!/usr/bin/ruby -w
module Rtt
module ReportGenerator
attr_accessor :data, :different_fixed
DEFAULT_FILENAME = 'rtt_report'
FORMATS_ACCEPTED = [ :csv, :pdf ]
REPORT_FIELDS = %w(Client Project Name Date Duration)
FIXED_FIELDS = %w(Client Project)
REPORT_FIELD_OUTPUT = {
'Client' => Proc.new { |task| (task.client.name) if task.present? && task.client.present? },
'Project' => Proc.new { |task| (task.project.name) if task.present? && task.project.present? },
'Name' => Proc.new { |task| task.name if task.present? },
'Date' => Proc.new { |task| task.date.strftime('%m-%d-%y') if task.present? },
'Duration' => Proc.new { |task| task.duration if task.present? }
}
def custom_user_is_defined?
current_user.present? && current_user.nickname != Rtt::User::DEFAULT_NICK
end
def fill_user_information(pdf)
pdf.cell [330, 790],
:text => current_user.full_name_and_nickname,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 770],
:text => current_user.company,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 750],
:text => current_user.location,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 730],
:text => current_user.address,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 710],
:text => current_user.phone,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 690],
:text => current_user.email,
:width => 225, :padding => 10, :border_width => 0, :align => :right
pdf.cell [330, 670],
:text => current_user.site,
:width => 225, :padding => 10, :border_width => 0, :align => :right
end
def fixed_fields_for_current_data
@fixed_fields_for_current_data ||= begin
calculate_fixed_fields_based_on_data
@data[:fixed_fields].keys + @different_fixed.keys.reject { |key| @different_fixed[key].length > 1 }
end
end
def fixed_value(field)
if @data[:fixed_fields].include? field
@data[:fixed_fields][field]
else
@different_fixed[field].first
end
end
def full_path(output_path = nil)
entered_filename = output_path || DEFAULT_FILENAME
filename, directory, extension = File.basename(entered_filename), File.dirname(entered_filename), File.extname(entered_filename)
path = directory.present? && directory != '.' && File.exists?(directory) ? directory : ENV['HOME']
ext = extension.present? ? '' : '.pdf'
"#{File.join(path, filename)}#{ext}"
end
def has_default_value?(field)
task = self.data[:rows].first
return true if task.nil?
REPORT_FIELD_OUTPUT[field].call(task) == eval("Rtt::#{field}::DEFAULT_NAME")
end
#
#
def report options = {}
raise 'Argument must be a valid Hash. Checkout: rtt usage' unless options.is_a?(Hash) || options.keys.empty?
@different_fixed ||= FIXED_FIELDS.inject({}) { |result, key| result[key] = []; result }
extension = options.keys.select { |key| FORMATS_ACCEPTED.include?(key) }.first
path = options[extension]
fixed_fields = extract_fixed_fields(options)
fixed_fields_and_values = fixed_fields.inject({}) { |hash, key| hash[key] = options[key.downcase.to_sym]; hash }
@data = { :fixed_fields => fixed_fields_and_values, :rows => query(options) }
filename_path = full_path(path)
case extension
when :pdf
report_to_pdf filename_path
when :csv
raise 'CSV format report not implemented yet'
report_to_csv path
else
raise 'Format not supported. Only csv and pdf are available for the moment.'
end
end
private
def calculate_total_hours_and_minutes(data)
data.inject([0, 0]) do |totals, task|
total_h, total_m = totals
if task[4 - fixed_fields_for_current_data.length].match(/^(\d+)h(\d+)m$/)
total_m += ($2.to_i % 60)
total_h += ($1.to_i + $2.to_i / 60)
end
[ total_h, total_m ]
end
end
def extract_fixed_fields(options)
# remove Duration as we can't filter by that
REPORT_FIELDS[0..-2].select { |field| options.include?(field.downcase.to_sym) }
end
def report_to_csv output_path
require 'fastercsv'
rescue LoadError
puts "Missing gem: Fastercsv"
end
def report_to_pdf output_path
require 'prawn'
require 'prawn/layout'
require "prawn/measurement_extensions"
columns = REPORT_FIELDS - fixed_fields_for_current_data
data = @data[:rows].map { |task| task_row_for_fields(task, columns) }
total_h, total_m = calculate_total_hours_and_minutes(data)
report_generator = self
pdf = Prawn::Document.new(:page_layout => :portrait,
:left_margin => 10.mm, # different
:right_margin => 1.cm, # units
:top_margin => 0.1.dm, # work
:bottom_margin => 0.01.m, # well
:page_size => 'A4') do
report_generator.fill_user_information(self) if report_generator.custom_user_is_defined?
move_up(140) if report_generator.custom_user_is_defined?
font_size 16
text "RTT Report"
text "=========="
move_down 40
report_generator.fixed_fields_for_current_data.each do |field|
text("#{field}: #{report_generator.fixed_value(field)}") unless report_generator.has_default_value?(field)
end
move_down(report_generator.custom_user_is_defined? ? 40 : 0)
if data.present?
table data,
:headers => columns,
:position => :left,
:border_width => 1,
:row_colors => [ 'fafafa', 'f0f0f0' ],
:font_size => 12,
:padding => 5,
:align => :left
end
move_down 20
text "Total: #{total_h}h#{total_m}m"
number_pages "Page <page> / <total>", [bounds.right - 80, 0]
puts "Report saved at #{output_path}"
render_file output_path
end
rescue LoadError
puts "Missing gem: prawn, prawn/layout or prawn/measurement_extensions"
rescue => e
puts "[rtt] Error while generating report: #{e.to_s}"
end
def calculate_fixed_fields_based_on_data
@data[:rows].each do |task|
(REPORT_FIELDS - @data[:fixed_fields].keys).each do |field|
value = REPORT_FIELD_OUTPUT[field].call(task)
@different_fixed[field] << value if FIXED_FIELDS.include?(field) && !@different_fixed[field].include?(value)
end
end
end
def task_row_for_fields(task, fields)
fields.map do |field|
REPORT_FIELD_OUTPUT[field].call(task)
end
end
end
end
|
require 'io/console'
module Rubcat
class PrettyLogcat
attr_reader :opt, :last_tag
LOG_LEVELS = [:V, :D, :I, :W, :E, :F]
def initialize(options)
@opt = {
min_level: :V,
tag_length: 25,
split_tags: false
}.merge options
end
def parse_message(message)
m = message.match(/^([VDIWEFS])\/(.*)\((\s*[0-9]+)\):\s(.*)$/)
{
type: m[1].to_sym,
tag: m[2].strip,
pid: m[3],
message: m[4]
}
end
def colorize_type(type)
case type
when :V
" #{type} ".bold.bg_gray.black
when :D
" #{type} ".bold.bg_blue
when :I
" #{type} ".bold.bg_green
when :W
" #{type} ".bold.bg_brown
else
" #{type} ".bold.bg_red
end
end
KNOWN_TAGS = %w{dalvikvm art dex2oat}
def format_tag(type, tag)
if type == :normal
unless tag == @last_tag
@last_tag = tag
puts if @opt[:split_tags]
if KNOWN_TAGS.include? tag
tag.trim_and_rjust(@opt[:tag_length]).bold.black.bg_gray
elsif tag == "ActivityManager"
tag.trim_and_rjust(@opt[:tag_length]).bold
else
tag.trim_and_rjust(@opt[:tag_length]).randomize_color.bold
end
else
" " * @opt[:tag_length]
end
elsif type == :activity_kill
tag.trim_and_rjust(@opt[:tag_length]).bold.bg_red
elsif type == :activity_start
tag.trim_and_rjust(@opt[:tag_length]).bold.bg_green
end
end
def wrap_message(mes, type)
mes.scan(/.{1,#{IO.console.winsize[1] - @opt[:tag_length] - 5}}/).join("\n#{' ' * @opt[:tag_length]} #{type} ")
end
def pretty_print(mes)
return if (LOG_LEVELS.find_index @opt[:min_level]) > (LOG_LEVELS.find_index mes[:type])
type = colorize_type mes[:type]
if mes[:tag] == "ActivityManager"
if mes[:message] =~ /^Killing/
m = mes[:message].match(/^Killing ([0-9]+):([^\s\/]+)/)
puts
puts "#{format_tag :activity_kill, "Killing process"} #{wrap_message(m[2] + " (pid " + m[1], "")})"
puts
elsif mes[:message] =~ /^Start proc/
m = mes[:message].match(/^Start proc (.*)$/)
puts
puts "#{format_tag :activity_start, "Start process"} #{wrap_message(m[1], "")}"
puts
else
puts "#{format_tag :normal, mes[:tag]} #{type} #{wrap_message mes[:message], type}"
end
else
puts "#{format_tag :normal, mes[:tag]} #{type} #{wrap_message mes[:message], type}"
end
end
def echo(mes)
pretty_print parse_message mes
end
end
end
KNOWN_TAGS << dalvikvm-heap
require 'io/console'
module Rubcat
class PrettyLogcat
attr_reader :opt, :last_tag
LOG_LEVELS = [:V, :D, :I, :W, :E, :F]
def initialize(options)
@opt = {
min_level: :V,
tag_length: 25,
split_tags: false
}.merge options
end
def parse_message(message)
m = message.match(/^([VDIWEFS])\/(.*)\((\s*[0-9]+)\):\s(.*)$/)
{
type: m[1].to_sym,
tag: m[2].strip,
pid: m[3],
message: m[4]
}
end
def colorize_type(type)
case type
when :V
" #{type} ".bold.bg_gray.black
when :D
" #{type} ".bold.bg_blue
when :I
" #{type} ".bold.bg_green
when :W
" #{type} ".bold.bg_brown
else
" #{type} ".bold.bg_red
end
end
KNOWN_TAGS = %w{dalvikvm dalvikvm-heap art dex2oat}
def format_tag(type, tag)
if type == :normal
unless tag == @last_tag
@last_tag = tag
puts if @opt[:split_tags]
if KNOWN_TAGS.include? tag
tag.trim_and_rjust(@opt[:tag_length]).bold.black.bg_gray
elsif tag == "ActivityManager"
tag.trim_and_rjust(@opt[:tag_length]).bold
else
tag.trim_and_rjust(@opt[:tag_length]).randomize_color.bold
end
else
" " * @opt[:tag_length]
end
elsif type == :activity_kill
tag.trim_and_rjust(@opt[:tag_length]).bold.bg_red
elsif type == :activity_start
tag.trim_and_rjust(@opt[:tag_length]).bold.bg_green
end
end
def wrap_message(mes, type)
mes.scan(/.{1,#{IO.console.winsize[1] - @opt[:tag_length] - 5}}/).join("\n#{' ' * @opt[:tag_length]} #{type} ")
end
def pretty_print(mes)
return if (LOG_LEVELS.find_index @opt[:min_level]) > (LOG_LEVELS.find_index mes[:type])
type = colorize_type mes[:type]
if mes[:tag] == "ActivityManager"
if mes[:message] =~ /^Killing/
m = mes[:message].match(/^Killing ([0-9]+):([^\s\/]+)/)
puts
puts "#{format_tag :activity_kill, "Killing process"} #{wrap_message(m[2] + " (pid " + m[1], "")})"
puts
elsif mes[:message] =~ /^Start proc/
m = mes[:message].match(/^Start proc (.*)$/)
puts
puts "#{format_tag :activity_start, "Start process"} #{wrap_message(m[1], "")}"
puts
else
puts "#{format_tag :normal, mes[:tag]} #{type} #{wrap_message mes[:message], type}"
end
else
puts "#{format_tag :normal, mes[:tag]} #{type} #{wrap_message mes[:message], type}"
end
end
def echo(mes)
pretty_print parse_message mes
end
end
end
|
# -*- encoding : utf-8 -*-
class Application
class << self
def tmp_dir
@tmp_dir ||= File.join(root, %w{tmp})
end
def logger_dir
@logger_dir ||= File.join(root, %w{log})
end
def env
@env ||= begin
env = ENV['APP_ENV'] || ENV['RAILS_ENV']
# if not specify env, try find file with env config/environment.current
# which created this file by a capistrano, by example
unless env
path = File.join(root, %w{ config environment.current })
if File.exists?(path)
env = File.read(path)
env.strip!
end
end
env = 'development' unless env
env
end
end
alias :environment :env
def logger
@logger ||= begin
file = (env == 'test') ? File.open(File.join(logger_dir, 'ruby-app.log'), "a") : STDERR
LocalLogger.new(file).tap do |l|
l.level = App.config.log_level
end
end
end
def config
CommonConfig
end
def identifier
"ruby-app"
end
def rake_paths
@rake_paths ||= []
end
def initializer_paths
@initializer_paths ||= []
end
def bundler_group
end
end
end
App = Application unless defined?(App)
# fix logger name
# -*- encoding : utf-8 -*-
class Application
class << self
def tmp_dir
@tmp_dir ||= File.join(root, %w{tmp})
end
def logger_dir
@logger_dir ||= File.join(root, %w{log})
end
def env
@env ||= begin
env = ENV['APP_ENV'] || ENV['RAILS_ENV']
# if not specify env, try find file with env config/environment.current
# which created this file by a capistrano, by example
unless env
path = File.join(root, %w{ config environment.current })
if File.exists?(path)
env = File.read(path)
env.strip!
end
end
env = 'development' unless env
env
end
end
alias :environment :env
def logger
@logger ||= begin
file = (env == 'test') ? File.open(File.join(logger_dir, "#{name rescue 'ruby-app'}.log"), "a") : STDERR
LocalLogger.new(file).tap do |l|
l.level = App.config.log_level
end
end
end
def config
CommonConfig
end
def identifier
"ruby-app"
end
def rake_paths
@rake_paths ||= []
end
def initializer_paths
@initializer_paths ||= []
end
def bundler_group
end
end
end
App = Application unless defined?(App)
|
module RubyFeatures
class Single
attr_reader :name, :applied, :apply_to_blocks
alias applied? applied
def initialize(name, feature_body)
@name = name = name.to_s
raise NameError.new("Wrong feature name: #{name}") unless name.match(/^[\/_a-z\d]+$/)
@apply_to_blocks = {}
@applied = false
instance_eval(&feature_body) if feature_body
end
def apply
unless applied?
Mixins.build_and_apply!(self)
@apply_to_blocks = nil
@applied = true
end
end
private
def apply_to(target, &block)
target = target.to_s
@apply_to_blocks[target] ||= []
@apply_to_blocks[target] << block
end
end
end
Refactoring.
module RubyFeatures
class Single
attr_reader :name, :applied, :apply_to_blocks
alias applied? applied
def initialize(name, feature_body)
@name = name = name.to_s
raise NameError.new("Wrong feature name: #{name}") unless name.match(/^[\/_a-z\d]+$/)
@apply_to_blocks = {}
@applied = false
instance_eval(&feature_body) if feature_body
end
def apply
unless applied?
Mixins.build_and_apply!(self)
@apply_to_blocks = nil
@applied = true
end
end
private
def apply_to(target, &block)
target = target.to_s
(@apply_to_blocks[target] ||= []) << block
end
end
end
|
require "run_rabbit_run"
namespace :rrr do
desc 'kills all processes with the name RunRabbitRun only on UNIX'
task :kill do
system("kill `ps -ef | grep ruby.rrr | grep -v grep | awk '{print $2}'`")
end
desc 'delete all of the queues'
task :reset do
`rabbitmqctl stop_app`
`rabbitmqctl reset`
`rabbitmqctl start_app`
end
desc 'Starts master'
task start: [ :config ] do | t, args |
RRR::Processes::MasterRunner.start
end
desc 'Stops master'
task stop: [ :config ] do | t, args |
RRR::Processes::MasterRunner.stop
end
desc 'Starts master and system workers'
task boot: [ :config ] do | t, args |
Rake::Task["rrr:start"].execute
Rake::Task["rrr:worker:start"].execute(Rake::TaskArguments.new([:path], [ 'lib/workers' ]))
end
desc 'Stops master, resets rabbitmq and boots app'
task reload: [ :config ] do | t, args |
Rake::Task["rrr:stop"].execute
Rake::Task["rrr:reset"].execute
Rake::Task["rrr:boot"].execute
end
namespace :worker do
desc 'Sends command to the master to start the worker'
task :start, [ :path ] => [ :config ] do | t, args |
raise 'Please specify path to worker(s)' unless args[:path]
raise 'Path you giving is not existing' unless File.exists? args[:path]
files = File.directory?(args[:path]) ? Dir["#{args[:path]}/**/*.rb"] : [ args[:path] ]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.worker.start", durable: true)
send_message = Proc.new do
begin
file = files.shift
if file
puts "Starting [#{file}]"
worker_code = File.read(file)
worker = eval(worker_code)
queue.notify( name: worker.name, capacity: worker.settings[:capacity], code: worker_code, &send_message )
else
RRR::Amqp.stop(0)
end
rescue => e
puts e.message
puts e.backtrace.join("\n")
RRR::Amqp.stop(0)
end
end
send_message.call
end
end
desc 'Sends worker code to the loadbalancer'
task :deploy, [ :path ] => [ :config ] do | t, args |
raise 'Please specify path to worker(s)' unless args[:path]
raise 'Path you giving is not existing' unless File.exists? args[:path]
files = File.directory?(args[:path]) ? Dir["#{args[:path]}/**/*.rb"] : [ args[:path] ]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.loadbalancer", durable: true)
send_message = Proc.new do
file = files.shift
if file
worker_code = File.read file
worker = eval worker_code
puts "Sending [#{file}]"
queue.notify( action: :push, worker_name: worker.name, code: worker_code, &send_message )
else
RRR::Amqp.stop(0)
end
end
send_message.call
end
end
desc 'Sends command to the master to stop the worker'
task :stop, [ :name ] => [ :config ] do | t, args |
raise 'Please specify name for worker' unless args[:name]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.worker.stop", durable: true)
queue.notify( name: args[:name] ) do
RRR::Amqp.stop(0)
end
end
end
desc 'Runs the worker for master'
task :run, [ :master_name, :worker_id, :path ] => [ :config ] do | t, args |
raise 'Please specify master_name' unless args[:master_name]
raise 'Please specify worker_id' unless args[:worker_id]
raise 'Please specify path to worker' unless args[:path]
RRR::Processes::WorkerRunner.start(args[:master_name], args[:worker_id], args[:path])
end
end
task :config do
RRR.load_config(Rake.original_dir)
end
end
Fixes boot script
require "run_rabbit_run"
namespace :rrr do
desc 'kills all processes with the name RunRabbitRun only on UNIX'
task :kill do
system("kill `ps -ef | grep ruby.rrr | grep -v grep | awk '{print $2}'`")
end
desc 'delete all of the queues'
task :reset do
`rabbitmqctl stop_app`
`rabbitmqctl reset`
`rabbitmqctl start_app`
end
desc 'Starts master'
task start: [ :config ] do | t, args |
RRR::Processes::MasterRunner.start
end
desc 'Stops master'
task stop: [ :config ] do | t, args |
RRR::Processes::MasterRunner.stop
end
desc 'Starts master and system workers'
task boot: [ :config ] do | t, args |
Rake::Task["rrr:start"].execute
Rake::Task["rrr:worker:start"].execute(Rake::TaskArguments.new([:path], [ File.expand_path('../../workers', __FILE__) ]))
end
desc 'Stops master, resets rabbitmq and boots app'
task reload: [ :config ] do | t, args |
Rake::Task["rrr:stop"].execute
Rake::Task["rrr:reset"].execute
Rake::Task["rrr:boot"].execute
end
namespace :worker do
desc 'Sends command to the master to start the worker'
task :start, [ :path ] => [ :config ] do | t, args |
raise 'Please specify path to worker(s)' unless args[:path]
raise 'Path you giving is not existing' unless File.exists? args[:path]
files = File.directory?(args[:path]) ? Dir["#{args[:path]}/**/*.rb"] : [ args[:path] ]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.worker.start", durable: true)
send_message = Proc.new do
begin
file = files.shift
if file
puts "Starting [#{file}]"
worker_code = File.read(file)
worker = eval(worker_code)
queue.notify( name: worker.name, capacity: worker.settings[:capacity], code: worker_code, &send_message )
else
RRR::Amqp.stop(0)
end
rescue => e
puts e.message
puts e.backtrace.join("\n")
RRR::Amqp.stop(0)
end
end
send_message.call
end
end
desc 'Sends worker code to the loadbalancer'
task :deploy, [ :path ] => [ :config ] do | t, args |
raise 'Please specify path to worker(s)' unless args[:path]
raise 'Path you giving is not existing' unless File.exists? args[:path]
files = File.directory?(args[:path]) ? Dir["#{args[:path]}/**/*.rb"] : [ args[:path] ]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.loadbalancer", durable: true)
send_message = Proc.new do
file = files.shift
if file
worker_code = File.read file
worker = eval worker_code
puts "Sending [#{file}]"
queue.notify( action: :push, worker_name: worker.name, code: worker_code, &send_message )
else
RRR::Amqp.stop(0)
end
end
send_message.call
end
end
desc 'Sends command to the master to stop the worker'
task :stop, [ :name ] => [ :config ] do | t, args |
raise 'Please specify name for worker' unless args[:name]
EM.run do
RRR::Amqp.start
queue = RRR::Amqp::Queue.new("#{RRR.config[:env]}.system.worker.stop", durable: true)
queue.notify( name: args[:name] ) do
RRR::Amqp.stop(0)
end
end
end
desc 'Runs the worker for master'
task :run, [ :master_name, :worker_id, :path ] => [ :config ] do | t, args |
raise 'Please specify master_name' unless args[:master_name]
raise 'Please specify worker_id' unless args[:worker_id]
raise 'Please specify path to worker' unless args[:path]
RRR::Processes::WorkerRunner.start(args[:master_name], args[:worker_id], args[:path])
end
end
task :config do
RRR.load_config(Rake.original_dir)
end
end
|
#--
# Copyright (c) 2005-2012, John Mettraux, jmettraux@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
require 'sequel'
require 'ruote/storage/base'
require 'ruote/sequel/version'
module Ruote
module Sequel
# Creates the 'documents' table necessary for this storage.
#
# If re_create is set to true, it will destroy any previous 'documents'
# table and create it. If false (default) then the table will be created
# if it doesn't already exist.
#
# It's also possible to change the default table_name from 'documents' to
# something else with the optional third parameter
#
def self.create_table(sequel, re_create=false, table_name='documents')
m = re_create ? :create_table! : :create_table?
sequel.send(m, table_name.to_sym) do
String :ide, :size => 255, :null => false
Integer :rev, :null => false
String :typ, :size => 55, :null => false
String :doc, :text => true, :null => false
String :wfid, :size => 255, :index => true
String :participant_name, :size => 512
primary_key [ :ide, :rev, :typ ]
end
end
#
# A Sequel storage implementation for ruote >= 2.2.0.
#
# require 'rubygems'
# require 'json' # gem install json
# require 'ruote'
# require 'ruote-sequel' # gem install ruote-sequel
#
# sequel = Sequel.connect('postgres://localhost/ruote_test')
# #sequel = Sequel.connect('mysql://root:root@localhost/ruote_test')
#
# opts = { 'remote_definition_allowed' => true }
#
# engine = Ruote::Engine.new(
# Ruote::Worker.new(
# Ruote::Sequel::Storage.new(sequel, opts)))
#
# # ...
#
class Storage
include Ruote::StorageBase
# The underlying Sequel::Database instance
#
attr_reader :sequel
def initialize(sequel, options={})
@sequel = sequel
#@options = options
@table = (options['sequel_table_name'] || :documents).to_sym
replace_engine_configuration(options)
end
def put_msg(action, options)
# put_msg is a unique action, no need for all the complexity of put
do_insert(prepare_msg_doc(action, options), 1)
nil
end
def put_schedule(flavour, owner_fei, s, msg)
# put_schedule is a unique action, no need for all the complexity of put
doc = prepare_schedule_doc(flavour, owner_fei, s, msg)
return nil unless doc
do_insert(doc, 1)
doc['_id']
end
def put(doc, opts={})
if doc['_rev']
d = get(doc['type'], doc['_id'])
return true unless d
return d if d['_rev'] != doc['_rev']
# failures
end
nrev = doc['_rev'].to_i + 1
begin
do_insert(doc, nrev)
rescue ::Sequel::DatabaseError => de
return (get(doc['type'], doc['_id']) || true)
# failure
end
@sequel[@table].where(
:typ => doc['type'], :ide => doc['_id']
).filter { rev < nrev }.delete
doc['_rev'] = nrev if opts[:update_rev]
nil
# success
end
def get(type, key)
d = do_get(type, key)
d ? Rufus::Json.decode(d[:doc]) : nil
end
def delete(doc)
raise ArgumentError.new('no _rev for doc') unless doc['_rev']
count = @sequel[@table].where(
:ide => doc['_id'], :typ => doc['type'], :rev => doc['_rev'].to_i
).delete
return (get(doc['type'], doc['_id']) || true) if count < 1
# failure
nil
# success
end
def get_many(type, key=nil, opts={})
ds = @sequel[@table].where(:typ => type)
keys = key ? Array(key) : nil
ds = ds.filter(:wfid => keys) if keys && keys.first.is_a?(String)
return ds.all.size if opts[:count]
ds = ds.order(
*(opts[:descending] ? [ :ide.desc, :rev.desc ] : [ :ide.asc, :rev.asc ])
).limit(
opts[:limit], opts[:skip]
)
docs = ds.all
docs = select_last_revs(docs, opts[:descending])
docs = docs.collect { |d| Rufus::Json.decode(d[:doc]) }
keys && keys.first.is_a?(Regexp) ?
docs.select { |doc| keys.find { |key| key.match(doc['_id']) } } :
docs
# (pass on the dataset.filter(:wfid => /regexp/) for now
# since we have potentially multiple keys)
end
# Returns all the ids of the documents of a given type.
#
def ids(type)
@sequel[@table].where(:typ => type).collect { |d| d[:ide] }.uniq.sort
end
# Nukes all the documents in this storage.
#
def purge!
@sequel[@table].delete
end
# Returns a string representation the current content of the storage for
# a given type.
#
def dump(type)
"=== #{type} ===\n" +
get_many(type).map { |h| " #{h['_id']} => #{h.inspect}" }.join("\n")
end
# Calls #disconnect on the db. According to Sequel's doc, it closes
# all the idle connections in the pool (not the active ones).
#
def shutdown
@sequel.disconnect
end
# Grrr... I should sort the mess between close and shutdown...
# Tests vs production :-(
#
def close
@sequel.disconnect
end
# Mainly used by ruote's test/unit/ut_17_storage.rb
#
def add_type(type)
# does nothing, types are differentiated by the 'typ' column
end
# Nukes a db type and reputs it (losing all the documents that were in it).
#
def purge_type!(type)
@sequel[@table].where(:typ => type).delete
end
# A provision made for workitems, allow to query them directly by
# participant name.
#
def by_participant(type, participant_name, opts={})
raise NotImplementedError if type != 'workitems'
docs = @sequel[@table].where(
:typ => type, :participant_name => participant_name
).order(
:ide.asc, :rev.asc
).limit(
opts[:limit], opts[:offset] || opts[:skip]
)
docs = select_last_revs(docs)
opts[:count] ?
docs.size : docs.map { |d| Ruote::Workitem.from_json(d[:doc]) }
end
# Querying workitems by field (warning, goes deep into the JSON structure)
#
def by_field(type, field, value, opts={})
raise NotImplementedError if type != 'workitems'
lk = [ '%"', field, '":' ]
lk.push(Rufus::Json.encode(value)) if value
lk.push('%')
docs = @sequel[@table].where(
:typ => type
).filter(
:doc.like(lk.join)
).order(
:ide.asc, :rev.asc
).limit(opts[:limit], opts[:skip] || opts[:offset])
docs = select_last_revs(docs)
opts[:count] ?
docs.size : docs.map { |d| Ruote::Workitem.from_json(d[:doc]) }
end
def query_workitems(criteria)
ds = @sequel[@table].where(:typ => 'workitems')
count = criteria.delete('count')
limit = criteria.delete('limit')
offset = criteria.delete('offset') || criteria.delete('skip')
ds = ds.limit(limit, offset)
wfid =
criteria.delete('wfid')
pname =
criteria.delete('participant_name') || criteria.delete('participant')
ds = ds.filter(:ide.like("%!#{wfid}")) if wfid
ds = ds.filter(:participant_name => pname) if pname
criteria.collect do |k, v|
ds = ds.filter(:doc.like("%\"#{k}\":#{Rufus::Json.encode(v)}%"))
end
ds = select_last_revs(ds.all)
count ? ds.size : ds.collect { |d| Ruote::Workitem.from_json(d[:doc]) }
end
protected
def do_insert(doc, rev)
@sequel[@table].insert(
:ide => doc['_id'],
:rev => rev,
:typ => doc['type'],
:doc => Rufus::Json.encode(doc.merge(
'_rev' => rev,
'put_at' => Ruote.now_to_utc_s)),
:wfid => extract_wfid(doc),
:participant_name => doc['participant_name']
)
end
def extract_wfid(doc)
doc['wfid'] || (doc['fei'] ? doc['fei']['wfid'] : nil)
end
def do_get(type, key)
@sequel[@table].where(
:typ => type, :ide => key
).reverse_order(:rev).first
end
def select_last_revs(docs, reverse=false)
docs = docs.inject({}) { |h, doc|
h[doc[:ide]] = doc
h
}.values.sort_by { |h|
h[:ide]
}
reverse ? docs.reverse : docs
end
end
end
end
use the #dump given by the StorageBase
#--
# Copyright (c) 2005-2012, John Mettraux, jmettraux@gmail.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Made in Japan.
#++
require 'sequel'
require 'ruote/storage/base'
require 'ruote/sequel/version'
module Ruote
module Sequel
# Creates the 'documents' table necessary for this storage.
#
# If re_create is set to true, it will destroy any previous 'documents'
# table and create it. If false (default) then the table will be created
# if it doesn't already exist.
#
# It's also possible to change the default table_name from 'documents' to
# something else with the optional third parameter
#
def self.create_table(sequel, re_create=false, table_name='documents')
m = re_create ? :create_table! : :create_table?
sequel.send(m, table_name.to_sym) do
String :ide, :size => 255, :null => false
Integer :rev, :null => false
String :typ, :size => 55, :null => false
String :doc, :text => true, :null => false
String :wfid, :size => 255, :index => true
String :participant_name, :size => 512
primary_key [ :ide, :rev, :typ ]
end
end
#
# A Sequel storage implementation for ruote >= 2.2.0.
#
# require 'rubygems'
# require 'json' # gem install json
# require 'ruote'
# require 'ruote-sequel' # gem install ruote-sequel
#
# sequel = Sequel.connect('postgres://localhost/ruote_test')
# #sequel = Sequel.connect('mysql://root:root@localhost/ruote_test')
#
# opts = { 'remote_definition_allowed' => true }
#
# engine = Ruote::Engine.new(
# Ruote::Worker.new(
# Ruote::Sequel::Storage.new(sequel, opts)))
#
# # ...
#
class Storage
include Ruote::StorageBase
# The underlying Sequel::Database instance
#
attr_reader :sequel
def initialize(sequel, options={})
@sequel = sequel
#@options = options
@table = (options['sequel_table_name'] || :documents).to_sym
replace_engine_configuration(options)
end
def put_msg(action, options)
# put_msg is a unique action, no need for all the complexity of put
do_insert(prepare_msg_doc(action, options), 1)
nil
end
def put_schedule(flavour, owner_fei, s, msg)
# put_schedule is a unique action, no need for all the complexity of put
doc = prepare_schedule_doc(flavour, owner_fei, s, msg)
return nil unless doc
do_insert(doc, 1)
doc['_id']
end
def put(doc, opts={})
if doc['_rev']
d = get(doc['type'], doc['_id'])
return true unless d
return d if d['_rev'] != doc['_rev']
# failures
end
nrev = doc['_rev'].to_i + 1
begin
do_insert(doc, nrev)
rescue ::Sequel::DatabaseError => de
return (get(doc['type'], doc['_id']) || true)
# failure
end
@sequel[@table].where(
:typ => doc['type'], :ide => doc['_id']
).filter { rev < nrev }.delete
doc['_rev'] = nrev if opts[:update_rev]
nil
# success
end
def get(type, key)
d = do_get(type, key)
d ? Rufus::Json.decode(d[:doc]) : nil
end
def delete(doc)
raise ArgumentError.new('no _rev for doc') unless doc['_rev']
count = @sequel[@table].where(
:ide => doc['_id'], :typ => doc['type'], :rev => doc['_rev'].to_i
).delete
return (get(doc['type'], doc['_id']) || true) if count < 1
# failure
nil
# success
end
def get_many(type, key=nil, opts={})
ds = @sequel[@table].where(:typ => type)
keys = key ? Array(key) : nil
ds = ds.filter(:wfid => keys) if keys && keys.first.is_a?(String)
return ds.all.size if opts[:count]
ds = ds.order(
*(opts[:descending] ? [ :ide.desc, :rev.desc ] : [ :ide.asc, :rev.asc ])
).limit(
opts[:limit], opts[:skip]
)
docs = ds.all
docs = select_last_revs(docs, opts[:descending])
docs = docs.collect { |d| Rufus::Json.decode(d[:doc]) }
keys && keys.first.is_a?(Regexp) ?
docs.select { |doc| keys.find { |key| key.match(doc['_id']) } } :
docs
# (pass on the dataset.filter(:wfid => /regexp/) for now
# since we have potentially multiple keys)
end
# Returns all the ids of the documents of a given type.
#
def ids(type)
@sequel[@table].where(:typ => type).collect { |d| d[:ide] }.uniq.sort
end
# Nukes all the documents in this storage.
#
def purge!
@sequel[@table].delete
end
# Calls #disconnect on the db. According to Sequel's doc, it closes
# all the idle connections in the pool (not the active ones).
#
def shutdown
@sequel.disconnect
end
# Grrr... I should sort the mess between close and shutdown...
# Tests vs production :-(
#
def close
@sequel.disconnect
end
# Mainly used by ruote's test/unit/ut_17_storage.rb
#
def add_type(type)
# does nothing, types are differentiated by the 'typ' column
end
# Nukes a db type and reputs it (losing all the documents that were in it).
#
def purge_type!(type)
@sequel[@table].where(:typ => type).delete
end
# A provision made for workitems, allow to query them directly by
# participant name.
#
def by_participant(type, participant_name, opts={})
raise NotImplementedError if type != 'workitems'
docs = @sequel[@table].where(
:typ => type, :participant_name => participant_name
).order(
:ide.asc, :rev.asc
).limit(
opts[:limit], opts[:offset] || opts[:skip]
)
docs = select_last_revs(docs)
opts[:count] ?
docs.size : docs.map { |d| Ruote::Workitem.from_json(d[:doc]) }
end
# Querying workitems by field (warning, goes deep into the JSON structure)
#
def by_field(type, field, value, opts={})
raise NotImplementedError if type != 'workitems'
lk = [ '%"', field, '":' ]
lk.push(Rufus::Json.encode(value)) if value
lk.push('%')
docs = @sequel[@table].where(
:typ => type
).filter(
:doc.like(lk.join)
).order(
:ide.asc, :rev.asc
).limit(opts[:limit], opts[:skip] || opts[:offset])
docs = select_last_revs(docs)
opts[:count] ?
docs.size : docs.map { |d| Ruote::Workitem.from_json(d[:doc]) }
end
def query_workitems(criteria)
ds = @sequel[@table].where(:typ => 'workitems')
count = criteria.delete('count')
limit = criteria.delete('limit')
offset = criteria.delete('offset') || criteria.delete('skip')
ds = ds.limit(limit, offset)
wfid =
criteria.delete('wfid')
pname =
criteria.delete('participant_name') || criteria.delete('participant')
ds = ds.filter(:ide.like("%!#{wfid}")) if wfid
ds = ds.filter(:participant_name => pname) if pname
criteria.collect do |k, v|
ds = ds.filter(:doc.like("%\"#{k}\":#{Rufus::Json.encode(v)}%"))
end
ds = select_last_revs(ds.all)
count ? ds.size : ds.collect { |d| Ruote::Workitem.from_json(d[:doc]) }
end
protected
def do_insert(doc, rev)
@sequel[@table].insert(
:ide => doc['_id'],
:rev => rev,
:typ => doc['type'],
:doc => Rufus::Json.encode(doc.merge(
'_rev' => rev,
'put_at' => Ruote.now_to_utc_s)),
:wfid => extract_wfid(doc),
:participant_name => doc['participant_name']
)
end
def extract_wfid(doc)
doc['wfid'] || (doc['fei'] ? doc['fei']['wfid'] : nil)
end
def do_get(type, key)
@sequel[@table].where(
:typ => type, :ide => key
).reverse_order(:rev).first
end
def select_last_revs(docs, reverse=false)
docs = docs.inject({}) { |h, doc|
h[doc[:ide]] = doc
h
}.values.sort_by { |h|
h[:ide]
}
reverse ? docs.reverse : docs
end
end
end
end
|
module SafeTimeout
VERSION = '0.0.5'
end
Bump gem version
module SafeTimeout
VERSION = '1.0.0'
end
|
require 'rexml/document'
require "savon/wsse/certs"
require 'savon/wsse/canonicalizer'
module Savon
class WSSE
class Signature
class EmptyCanonicalization < RuntimeError; end
class MissingCertificate < RuntimeError; end
# For a +Savon::WSSE::Certs+ object. To hold the certs we need to sign.
attr_accessor :certs
# Without a document, the document cannot be signed.
# Generate the document once, and then set document and recall #to_xml
attr_accessor :document
ExclusiveXMLCanonicalizationAlgorithm = 'http://www.w3.org/2001/10/xml-exc-c14n#'.freeze
RSASHA1SignatureAlgorithm = 'http://www.w3.org/2000/09/xmldsig#rsa-sha1'.freeze
SHA1DigestAlgorithm = 'http://www.w3.org/2000/09/xmldsig#sha1'.freeze
X509v3ValueType = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3'.freeze
Base64EncodingType = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary'.freeze
SignatureNamespace = 'http://www.w3.org/2000/09/xmldsig#'.freeze
def initialize(certs = Certs.new)
@certs = certs
end
def have_document?
!!document
end
# Cache "now" so that digests match...
# TODO: figure out how we might want to expire this cache...
def now
@now ||= Time.now
end
def timestamp_id
@timestamp_id ||= "Timestamp-#{uid}".freeze
end
def body_id
@body_id ||= "Body-#{uid}".freeze
end
def security_token_id
@security_token_id ||= "SecurityToken-#{uid}".freeze
end
def body_attributes
{
"xmlns:wsu" => WSUNamespace,
"wsu:Id" => body_id,
}
end
def to_xml(wsse)
security = {}.deep_merge(timestamp).deep_merge(username_tag wsse).deep_merge(signature)
security.deep_merge!(binary_security_token) if certs.cert
security.merge! :order! => []
[ "wsu:Timestamp","wsse:UsernameToken" , "wsse:BinarySecurityToken", "Signature" ].each do |key|
security[:order!] << key if security[key]
end
xml = Gyoku.xml({
"wsse:Security" => security,
:attributes! => { "wsse:Security" => {
'xmlns:wsse' => WSENamespace,
'soapenv:mustUnderstand' => "1",
} },
})
end
private
def binary_security_token
{
"wsse:BinarySecurityToken" => Base64.encode64(certs.cert.to_der).gsub("\n", ''),
:attributes! => { "wsse:BinarySecurityToken" => {
"wsu:Id" => security_token_id,
'EncodingType' => Base64EncodingType,
'ValueType' => X509v3ValueType,
"xmlns:wsu" => WSUNamespace,
} }
}
end
def signature
return {} unless have_document?
sig = signed_info.merge(key_info).merge(signature_value)
sig.merge! :order! => []
[ "SignedInfo", "SignatureValue", "KeyInfo" ].each do |key|
sig[:order!] << key if sig[key]
end
{
"Signature" => sig,
:attributes! => { "Signature" => { "xmlns" => SignatureNamespace } },
}
end
def key_info
{
"KeyInfo" => {
"wsse:SecurityTokenReference" => {
"wsse:Reference/" => nil,
:attributes! => { "wsse:Reference/" => {
"ValueType" => X509v3ValueType,
"URI" => "##{security_token_id}",
} }
},
:attributes! => { "wsse:SecurityTokenReference" => { "xmlns" => "" } },
},
}
end
def signature_value
{ "SignatureValue" => the_signature }
rescue EmptyCanonicalization, MissingCertificate
{}
end
def signed_info
{
"SignedInfo" => {
"CanonicalizationMethod/" => nil,
"SignatureMethod/" => nil,
"Reference" => [
{ "DigestValue" => timestamp_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => body_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_action_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_message_id_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_reply_to_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_to_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
],
:attributes! => {
"CanonicalizationMethod/" => { "Algorithm" => ExclusiveXMLCanonicalizationAlgorithm },
"SignatureMethod/" => { "Algorithm" => RSASHA1SignatureAlgorithm },
"Reference" => { "URI" => ["##{timestamp_id}", "##{body_id}", "#Action", "#MessageID", "#ReplyTo", "#To", "##{security_token_id}"] },
},
:order! => [ "CanonicalizationMethod/", "SignatureMethod/", "Reference" ],
},
}
end
# We're going to generate the timestamp ourselves, since WSSE is hard-
# coded to generate the timestamp section directly within wsse:Security.
#
# TODO: Allow for configurability of these timestamps.
def timestamp
{
"wsu:Timestamp" => {
"wsu:Created" => now.xs_datetime,
"wsu:Expires" => (now + 60 * 5).xs_datetime,
:order! => ["wsu:Created", "wsu:Expires"],
},
:attributes! => { "wsu:Timestamp" => { "wsu:Id" => timestamp_id, "xmlns:wsu" => WSUNamespace } },
}
end
def username_tag(wsse)
if wsse.digest?
{
"wsse:UsernameToken" => {
"wsse:Username" => wsse.username,
"wsse:Nonce" => wsse.public_nonce,
"wsu:Created" => wsse.public_timestamp,
"wsse:Password" => wsse.public_digest_password,
:attributes! => { "wsse:Password" => { "Type" => PasswordDigestURI } }
},
:attributes! => { "wsse:UsernameToken" => { "xmlns:wsu" => WSUNamespace } }
}
else
{
"wsse:UsernameToken" => {
"wsse:Username" => wsse.username,
"wsse:Password" => wsse.password,
:attributes! => { "wsse:Password" => { "Type" => PasswordTextURI } }
}
}
end
end
def the_signature
raise MissingCertificate, "Expected a private_key for signing" unless certs.private_key
xml = canonicalize("SignedInfo")
signature = certs.private_key.sign(OpenSSL::Digest::SHA1.new, xml)
Base64.encode64(signature).gsub("\n", '') # TODO: DRY calls to Base64.encode64(...).gsub("\n", '')
end
def timestamp_digest
xml_digest('wsu:Timestamp')
end
def body_digest
xml_digest("soapenv:Body")
end
def wsa_action_digest
xml_digest("wsa:Action")
end
def wsa_to_digest
xml_digest("wsa:To")
end
def wsa_message_id_digest
xml_digest("wsa:MessageID")
end
def wsa_reply_to_digest
xml_digest("wsa:ReplyTo")
end
def canonicalize(xml_element)
canonicalized_element = Canonicalizer.canonicalize(document, xml_element)
raise EmptyCanonicalization, "Expected to canonicalize #{xml_element.inspect} within: #{document}" if canonicalized_element.blank?
canonicalized_element
end
def xml_digest(xml_element)
Base64.encode64(OpenSSL::Digest::SHA1.digest(canonicalize(xml_element))).strip
end
def signed_info_digest_method
{ "DigestMethod/" => nil, :attributes! => { "DigestMethod/" => { "Algorithm" => SHA1DigestAlgorithm } } }
end
def signed_info_transforms
{ "Transforms" => { "Transform/" => nil, :attributes! => { "Transform/" => { "Algorithm" => ExclusiveXMLCanonicalizationAlgorithm } } } }
end
def uid
OpenSSL::Digest::SHA1.hexdigest([Time.now, rand].collect(&:to_s).join('/'))
end
end
end
end
add nonce for non digest password
require 'rexml/document'
require "savon/wsse/certs"
require 'savon/wsse/canonicalizer'
module Savon
class WSSE
class Signature
class EmptyCanonicalization < RuntimeError; end
class MissingCertificate < RuntimeError; end
# For a +Savon::WSSE::Certs+ object. To hold the certs we need to sign.
attr_accessor :certs
# Without a document, the document cannot be signed.
# Generate the document once, and then set document and recall #to_xml
attr_accessor :document
ExclusiveXMLCanonicalizationAlgorithm = 'http://www.w3.org/2001/10/xml-exc-c14n#'.freeze
RSASHA1SignatureAlgorithm = 'http://www.w3.org/2000/09/xmldsig#rsa-sha1'.freeze
SHA1DigestAlgorithm = 'http://www.w3.org/2000/09/xmldsig#sha1'.freeze
X509v3ValueType = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509v3'.freeze
Base64EncodingType = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary'.freeze
SignatureNamespace = 'http://www.w3.org/2000/09/xmldsig#'.freeze
def initialize(certs = Certs.new)
@certs = certs
end
def have_document?
!!document
end
# Cache "now" so that digests match...
# TODO: figure out how we might want to expire this cache...
def now
@now ||= Time.now
end
def timestamp_id
@timestamp_id ||= "Timestamp-#{uid}".freeze
end
def body_id
@body_id ||= "Body-#{uid}".freeze
end
def security_token_id
@security_token_id ||= "SecurityToken-#{uid}".freeze
end
def body_attributes
{
"xmlns:wsu" => WSUNamespace,
"wsu:Id" => body_id,
}
end
def to_xml(wsse)
security = {}.deep_merge(timestamp).deep_merge(username_tag wsse).deep_merge(signature)
security.deep_merge!(binary_security_token) if certs.cert
security.merge! :order! => []
[ "wsu:Timestamp","wsse:UsernameToken" , "wsse:BinarySecurityToken", "Signature" ].each do |key|
security[:order!] << key if security[key]
end
xml = Gyoku.xml({
"wsse:Security" => security,
:attributes! => { "wsse:Security" => {
'xmlns:wsse' => WSENamespace,
'soapenv:mustUnderstand' => "1",
} },
})
end
private
def binary_security_token
{
"wsse:BinarySecurityToken" => Base64.encode64(certs.cert.to_der).gsub("\n", ''),
:attributes! => { "wsse:BinarySecurityToken" => {
"wsu:Id" => security_token_id,
'EncodingType' => Base64EncodingType,
'ValueType' => X509v3ValueType,
"xmlns:wsu" => WSUNamespace,
} }
}
end
def signature
return {} unless have_document?
sig = signed_info.merge(key_info).merge(signature_value)
sig.merge! :order! => []
[ "SignedInfo", "SignatureValue", "KeyInfo" ].each do |key|
sig[:order!] << key if sig[key]
end
{
"Signature" => sig,
:attributes! => { "Signature" => { "xmlns" => SignatureNamespace } },
}
end
def key_info
{
"KeyInfo" => {
"wsse:SecurityTokenReference" => {
"wsse:Reference/" => nil,
:attributes! => { "wsse:Reference/" => {
"ValueType" => X509v3ValueType,
"URI" => "##{security_token_id}",
} }
},
:attributes! => { "wsse:SecurityTokenReference" => { "xmlns" => "" } },
},
}
end
def signature_value
{ "SignatureValue" => the_signature }
rescue EmptyCanonicalization, MissingCertificate
{}
end
def signed_info
{
"SignedInfo" => {
"CanonicalizationMethod/" => nil,
"SignatureMethod/" => nil,
"Reference" => [
{ "DigestValue" => timestamp_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => body_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_action_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_message_id_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_reply_to_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
{ "DigestValue" => wsa_to_digest }.merge(signed_info_transforms).merge(signed_info_digest_method),
],
:attributes! => {
"CanonicalizationMethod/" => { "Algorithm" => ExclusiveXMLCanonicalizationAlgorithm },
"SignatureMethod/" => { "Algorithm" => RSASHA1SignatureAlgorithm },
"Reference" => { "URI" => ["##{timestamp_id}", "##{body_id}", "#Action", "#MessageID", "#ReplyTo", "#To", "##{security_token_id}"] },
},
:order! => [ "CanonicalizationMethod/", "SignatureMethod/", "Reference" ],
},
}
end
# We're going to generate the timestamp ourselves, since WSSE is hard-
# coded to generate the timestamp section directly within wsse:Security.
#
# TODO: Allow for configurability of these timestamps.
def timestamp
{
"wsu:Timestamp" => {
"wsu:Created" => now.xs_datetime,
"wsu:Expires" => (now + 60 * 5).xs_datetime,
:order! => ["wsu:Created", "wsu:Expires"],
},
:attributes! => { "wsu:Timestamp" => { "wsu:Id" => timestamp_id, "xmlns:wsu" => WSUNamespace } },
}
end
def username_tag(wsse)
if wsse.digest?
{
"wsse:UsernameToken" => {
"wsse:Username" => wsse.username,
"wsse:Nonce" => wsse.public_nonce,
"wsu:Created" => wsse.public_timestamp,
"wsse:Password" => wsse.public_digest_password,
:attributes! => { "wsse:Password" => { "Type" => PasswordDigestURI } }
},
:attributes! => { "wsse:UsernameToken" => { "xmlns:wsu" => WSUNamespace } }
}
else
{
"wsse:UsernameToken" => {
"wsse:Username" => wsse.username,
"wsse:Nonce" => wsse.public_nonce,
"wsu:Created" => wsse.public_timestamp,
"wsse:Password" => wsse.password,
:attributes! => { "wsse:Password" => { "Type" => PasswordTextURI } }
},
:attributes! => { "wsse:UsernameToken" => { "xmlns:wsu" => WSUNamespace } }
}
end
end
def the_signature
raise MissingCertificate, "Expected a private_key for signing" unless certs.private_key
xml = canonicalize("SignedInfo")
signature = certs.private_key.sign(OpenSSL::Digest::SHA1.new, xml)
Base64.encode64(signature).gsub("\n", '') # TODO: DRY calls to Base64.encode64(...).gsub("\n", '')
end
def timestamp_digest
xml_digest('wsu:Timestamp')
end
def body_digest
xml_digest("soapenv:Body")
end
def wsa_action_digest
xml_digest("wsa:Action")
end
def wsa_to_digest
xml_digest("wsa:To")
end
def wsa_message_id_digest
xml_digest("wsa:MessageID")
end
def wsa_reply_to_digest
xml_digest("wsa:ReplyTo")
end
def canonicalize(xml_element)
canonicalized_element = Canonicalizer.canonicalize(document, xml_element)
raise EmptyCanonicalization, "Expected to canonicalize #{xml_element.inspect} within: #{document}" if canonicalized_element.blank?
canonicalized_element
end
def xml_digest(xml_element)
Base64.encode64(OpenSSL::Digest::SHA1.digest(canonicalize(xml_element))).strip
end
def signed_info_digest_method
{ "DigestMethod/" => nil, :attributes! => { "DigestMethod/" => { "Algorithm" => SHA1DigestAlgorithm } } }
end
def signed_info_transforms
{ "Transforms" => { "Transform/" => nil, :attributes! => { "Transform/" => { "Algorithm" => ExclusiveXMLCanonicalizationAlgorithm } } } }
end
def uid
OpenSSL::Digest::SHA1.hexdigest([Time.now, rand].collect(&:to_s).join('/'))
end
end
end
end
|
module SBConstants
Location = Struct.new(:node, :attribute, :context, :file, :line) do
def key_path
if attribute
@key_path ||= "#{node}.#{attribute}"
else
@key_path ||= "#{node}"
end
end
def debug
@debug ||= "#{file}[line:#{line}](#{key_path})"
end
def eql? other
self.class == other.class && key_path == other.key_path
end
def hash
key_path.hash
end
end
end
Cleaner @key_path code
module SBConstants
Location = Struct.new(:node, :attribute, :context, :file, :line) do
def key_path
@key_path ||= [node, attribute].compact.join('.')
end
def debug
@debug ||= "#{file}[line:#{line}](#{key_path})"
end
def eql? other
self.class == other.class && key_path == other.key_path
end
def hash
key_path.hash
end
end
end
|
# Copyright (C) 2012-2013 Zammad Foundation, http://zammad-foundation.org/
class SearchIndexBackend
@@index = "zammad_#{Rails.env}"
@@url = 'http://127.0.0.1:9000'
@@user = 'elasticsearch'
@@pw = 'zammad'
=begin
add new object to search index
SearchIndexBackend.add( 'Ticket', some_data_object )
=end
def self.add(type, data)
url = "#{@@url}/#{@@index}/#{type}/#{data[:id]}"
puts "# curl -X POST \"#{url}\" -d '#{data.to_json}'"
conn = Faraday.new( :url => url )
if @@user && @@pw
conn.basic_auth( @@user, @@pw )
end
response = conn.post do |req|
req.url url
req.headers['Content-Type'] = 'application/json'
req.body = data.to_json
end
# puts response.body.to_s
puts "# #{response.status.to_s}"
return true if response.success?
data = JSON.parse( response.body )
return { :data => data, :response => response }
end
=begin
remove whole data from index
SearchIndexBackend.remove( 'Ticket', 123 )
SearchIndexBackend.remove( 'Ticket' )
=end
def self.remove( type, o_id = nil )
if o_id
url = "#{@@url}/#{@@index}/#{type}/#{o_id}"
else
url = "#{@@url}/#{@@index}/#{type}"
end
puts "# curl -X DELETE \"#{url}\""
conn = Faraday.new( :url => url )
if @@user && @@pw
conn.basic_auth( @@user, @@pw )
end
response = conn.delete url
# puts response.body.to_s
puts "# #{response.status.to_s}"
return true if response.success?
data = JSON.parse( response.body )
return { :data => data, :response => response }
end
=begin
return all activity entries of an user
result = SearchIndexBackend.search( user )
=end
def self.search(user,limit)
end
end
Move to test system url for testing.
# Copyright (C) 2012-2013 Zammad Foundation, http://zammad-foundation.org/
class SearchIndexBackend
@@index = "zammad_#{Rails.env}"
@@url = 'http://217.111.80.181'
@@user = 'elasticsearch'
@@pw = 'zammad'
=begin
add new object to search index
SearchIndexBackend.add( 'Ticket', some_data_object )
=end
def self.add(type, data)
url = "#{@@url}/#{@@index}/#{type}/#{data[:id]}"
puts "# curl -X POST \"#{url}\" -d '#{data.to_json}'"
conn = Faraday.new( :url => url )
if @@user && @@pw
conn.basic_auth( @@user, @@pw )
end
response = conn.post do |req|
req.url url
req.headers['Content-Type'] = 'application/json'
req.body = data.to_json
end
# puts response.body.to_s
puts "# #{response.status.to_s}"
return true if response.success?
data = JSON.parse( response.body )
return { :data => data, :response => response }
end
=begin
remove whole data from index
SearchIndexBackend.remove( 'Ticket', 123 )
SearchIndexBackend.remove( 'Ticket' )
=end
def self.remove( type, o_id = nil )
if o_id
url = "#{@@url}/#{@@index}/#{type}/#{o_id}"
else
url = "#{@@url}/#{@@index}/#{type}"
end
puts "# curl -X DELETE \"#{url}\""
conn = Faraday.new( :url => url )
if @@user && @@pw
conn.basic_auth( @@user, @@pw )
end
response = conn.delete url
# puts response.body.to_s
puts "# #{response.status.to_s}"
return true if response.success?
data = JSON.parse( response.body )
return { :data => data, :response => response }
end
=begin
return all activity entries of an user
result = SearchIndexBackend.search( user )
=end
def self.search(user,limit)
end
end |
# -*- coding: utf-8 -*-
module SelectableAttr
module Base
def self.included(base)
base.extend(ClassMethods)
end
ENUM_ARRAY_METHODS = {
:none => {
:to_hash_array => Proc.new do |enum, attr_value|
value = (attr_value || []).map(&:to_s)
enum.to_hash_array do |hash|
hash[:select] = value.include?(hash[:id].to_s)
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
hash_array.select{|hash| hash[:select]}.map{|hash| hash[:id]}
end
},
:comma_string => {
:to_hash_array => Proc.new do |enum, attr_value|
values = attr_value.is_a?(Array) ? attr_value.map{|v|v.to_s} :
(attr_value || '').split(',')
enum.to_hash_array do |hash|
hash[:select] = values.include?(hash[:id].to_s)
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
hash_array.select{|hash| hash[:select]}.map{|hash| hash[:id]}.join(',')
end
},
:binary_string => {
:to_hash_array => Proc.new do |enum, attr_value|
value = attr_value || ''
idx = 0
enum.to_hash_array do |hash|
hash[:select] = (value[idx, 1] == '1')
idx += 1
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
result = ''
hash_map = hash_array.inject({}){|dest, hash| dest[hash[:id]] = hash; dest}
enum.each do |entry|
hash = hash_map[entry.id]
result << (hash[:select] ? '1' : '0')
end
result
end
}
}
module ClassMethods
def single_selectable_attrs
@single_selectable_attrs_hash ||= {};
@single_selectable_attrs_hash[self] ||= []
end
def multi_selectable_attrs
@multi_selectable_attrs_hash ||= {};
@multi_selectable_attrs_hash[self] ||= []
end
def selectable_attr_type_for(attr)
single_selectable_attrs.include?(attr) ? :single :
multi_selectable_attrs.include?(attr) ? :multi : nil
end
def enum(*args, &block)
process_definition(block, *args) do |enum, context|
self.single_selectable_attrs << context[:attr].to_s
define_enum_class_methods(context)
define_enum_instance_methods(context)
end
end
alias_method :single_selectable_attr, :enum
alias_method :selectable_attr, :enum
def enum_array(*args, &block)
base_options = args.extract_options!
args << base_options # .update({:attr_accessor => false})
process_definition(block, *args) do |enum, context|
self.multi_selectable_attrs << context[:attr].to_s
define_enum_class_methods(context)
define_enum_array_instance_methods(context)
end
end
alias_method :multi_selectable_attr, :enum_array
def process_definition(block, *args)
base_options = args.extract_options!
enum = base_options[:enum] || create_enum(&block)
args.each do |attr|
context = {
:enum => enum,
:attr_accessor => !has_attr(attr),
:attr => attr,
:base_name => enum_base_name(attr)
}.update(base_options)
define_enum(context)
define_accessor(context)
yield(enum, context)
enum.i18n_scope(:selectable_attrs, attr.to_sym) unless enum.i18n_scope
end
enum
end
def has_attr(attr)
return true if self.method_defined?(attr)
return false unless self.respond_to?(:columns)
(self.columns || []).any?{|col|col.name.to_s == attr.to_s}
end
def attr_enumeable_base(*args, &block)
@base_name_processor = block
end
def enum_base_name(attr)
if @base_name_processor
@base_name_processor.call(attr).to_s
else
attr.to_s.gsub(selectable_attr_name_pattern, '')
end
end
DEFAULT_SELECTABLE_ATTR_NAME_PATTERN = /(_cd$|_code$|_cds$|_codes$)/
def selectable_attr_name_pattern
@selectable_attr_name_pattern ||= DEFAULT_SELECTABLE_ATTR_NAME_PATTERN
end
alias_method :enum_name_pattern, :selectable_attr_name_pattern
def selectable_attr_name_pattern=(value)
@selectable_attr_name_pattern = value
end
alias_method :enum_name_pattern=, :selectable_attr_name_pattern=
def create_enum(&block)
result = Enum.new
result.instance_eval(&block)
result
end
def define_enum(context)
base_name = context[:base_name]
const_name = "#{base_name.upcase}_ENUM"
const_set(const_name, context[:enum]) unless const_defined?(const_name)
end
def enum_for(attr)
base_name = enum_base_name(attr)
const_get("#{base_name.upcase}_ENUM")
end
def define_accessor(context)
attr = context[:attr]
return unless (instance_methods(false) & [attr, "#{attr}="]).empty?
if context[:attr_accessor]
if context[:default]
attr_accessor_with_default(attr, context[:default])
else
attr_accessor(attr)
end
else
if context[:default]
$stderr.puts "WARNING! :default option ignored for #{attr}"
end
end
end
def define_enum_class_methods(context)
base_name = context[:base_name]
enum = context[:enum]
mod = Module.new
mod.module_eval do
define_method("#{base_name}_enum"){enum}
define_method("#{base_name}_hash_array"){enum.to_hash_array}
define_method("#{base_name}_entries"){enum.entries}
define_method("#{base_name}_options"){|*ids_or_keys|enum.options(*ids_or_keys)}
define_method("#{base_name}_ids"){|*ids_or_keys| enum.ids(*ids_or_keys)}
define_method("#{base_name}_keys"){|*ids_or_keys|enum.keys(*ids_or_keys)}
define_method("#{base_name}_names"){|*ids_or_keys|enum.names(*ids_or_keys)}
define_method("#{base_name}_key_by_id"){|id|enum.key_by_id(id)}
define_method("#{base_name}_id_by_key"){|key|enum.id_by_key(key)}
define_method("#{base_name}_name_by_id"){|id|enum.name_by_id(id)}
define_method("#{base_name}_name_by_key"){|key|enum.name_by_key(key)}
define_method("#{base_name}_entry_by_id"){|id|enum.entry_by_id(id)}
define_method("#{base_name}_entry_by_key"){|key|enum.entry_by_key(key)}
end
if convertors = ENUM_ARRAY_METHODS[context[:convert_with] || :none]
mod.module_eval do
define_method("#{base_name}_to_hash_array", convertors[:to_hash_array])
define_method("hash_array_to_#{base_name}", convertors[:to_attr_value])
end
end
self.extend(mod)
end
def define_enum_instance_methods(context)
attr = context[:attr]
base_name = context[:base_name]
instance_methods = <<-EOS
def #{base_name}_key
self.class.#{base_name}_key_by_id(#{attr})
end
def #{base_name}_key=(key)
self.#{attr} = self.class.#{base_name}_id_by_key(key)
end
def #{base_name}_name
self.class.#{base_name}_name_by_id(#{attr})
end
def #{base_name}_entry
self.class.#{base_name}_entry_by_id(#{attr})
end
def #{base_name}_entry
self.class.#{base_name}_entry_by_id(#{attr})
end
EOS
self.module_eval(instance_methods)
end
def define_enum_array_instance_methods(context)
attr = context[:attr]
base_name = context[:base_name]
# ActiveRecord::Baseから継承している場合は、基本カラムに対応するメソッドはない
self.module_eval(<<-"EOS")
def #{base_name}_ids
#{base_name}_hash_array_selected.map{|hash|hash[:id]}
end
def #{base_name}_ids=(ids)
ids = ids.split(',') if ids.is_a?(String)
ids = ids ? ids.map(&:to_s) : []
update_#{base_name}_hash_array{|hash|ids.include?(hash[:id].to_s)}
end
EOS
self.module_eval(<<-"EOS")
def #{base_name}_hash_array
self.class.#{base_name}_to_hash_array(self.class.#{base_name}_enum, #{attr})
end
def #{base_name}_hash_array=(hash_array)
self.#{attr} = self.class.hash_array_to_#{base_name}(self.class.#{base_name}_enum, hash_array)
end
def #{base_name}_hash_array_selected
#{base_name}_hash_array.select{|hash|!!hash[:select]}
end
def update_#{base_name}_hash_array(&block)
hash_array = #{base_name}_hash_array.map do |hash|
hash.merge(:select => yield(hash))
end
self.#{base_name}_hash_array = hash_array
end
def #{base_name}_keys
#{base_name}_hash_array_selected.map{|hash|hash[:key]}
end
def #{base_name}_keys=(keys)
update_#{base_name}_hash_array{|hash|keys.include?(hash[:key])}
end
def #{base_name}_selection
#{base_name}_hash_array.map{|hash|!!hash[:select]}
end
def #{base_name}_selection=(selection)
idx = -1
update_#{base_name}_hash_array{|hash| idx += 1; !!selection[idx]}
end
def #{base_name}_names
#{base_name}_hash_array_selected.map{|hash|hash[:name]}
end
def #{base_name}_entries
ids = #{base_name}_ids
self.class.#{base_name}_enum.select{|entry|ids.include?(entry.id)}
end
EOS
end
end
end
end
test/selectable_attr_enum_test.rbのテストケースを追加しました。
git-svn-id: 40c15af1e0b82d5aa6c333bef28ff6cece3161be@587 8b017839-1745-0410-ac03-371ba388625b
# -*- coding: utf-8 -*-
module SelectableAttr
module Base
def self.included(base)
base.extend(ClassMethods)
end
ENUM_ARRAY_METHODS = {
:none => {
:to_hash_array => Proc.new do |enum, attr_value|
value = (attr_value || []).map(&:to_s)
enum.to_hash_array do |hash|
hash[:select] = value.include?(hash[:id].to_s)
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
hash_array.select{|hash| hash[:select]}.map{|hash| hash[:id]}
end
},
:comma_string => {
:to_hash_array => Proc.new do |enum, attr_value|
values = attr_value.is_a?(Array) ? attr_value.map{|v|v.to_s} :
(attr_value || '').split(',')
enum.to_hash_array do |hash|
hash[:select] = values.include?(hash[:id].to_s)
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
hash_array.select{|hash| hash[:select]}.map{|hash| hash[:id]}.join(',')
end
},
:binary_string => {
:to_hash_array => Proc.new do |enum, attr_value|
value = attr_value || ''
idx = 0
enum.to_hash_array do |hash|
hash[:select] = (value[idx, 1] == '1')
idx += 1
end
end,
:to_attr_value => Proc.new do |enum, hash_array|
result = ''
hash_map = hash_array.inject({}){|dest, hash| dest[hash[:id]] = hash; dest}
enum.each do |entry|
hash = hash_map[entry.id]
result << (hash[:select] ? '1' : '0')
end
result
end
}
}
module ClassMethods
def single_selectable_attrs
@single_selectable_attrs_hash ||= {};
@single_selectable_attrs_hash[self] ||= []
end
def multi_selectable_attrs
@multi_selectable_attrs_hash ||= {};
@multi_selectable_attrs_hash[self] ||= []
end
def selectable_attr_type_for(attr)
single_selectable_attrs.include?(attr) ? :single :
multi_selectable_attrs.include?(attr) ? :multi : nil
end
def enum(*args, &block)
process_definition(block, *args) do |enum, context|
self.single_selectable_attrs << context[:attr].to_s
define_enum_class_methods(context)
define_enum_instance_methods(context)
end
end
alias_method :single_selectable_attr, :enum
alias_method :selectable_attr, :enum
def enum_array(*args, &block)
base_options = args.extract_options! # last.is_a?(Hash) ? args.pop : {}
args << base_options # .update({:attr_accessor => false})
process_definition(block, *args) do |enum, context|
self.multi_selectable_attrs << context[:attr].to_s
define_enum_class_methods(context)
define_enum_array_instance_methods(context)
end
end
alias_method :multi_selectable_attr, :enum_array
def process_definition(block, *args)
base_options = args.extract_options! # last.is_a?(Hash) ? args.pop : {}
enum = base_options[:enum] || create_enum(&block)
args.each do |attr|
context = {
:enum => enum,
:attr_accessor => !has_attr(attr),
:attr => attr,
:base_name => enum_base_name(attr)
}.update(base_options)
define_enum(context)
define_accessor(context)
yield(enum, context)
enum.i18n_scope(:selectable_attrs, attr.to_sym) unless enum.i18n_scope
end
enum
end
def has_attr(attr)
return true if self.method_defined?(attr)
return false unless self.respond_to?(:columns)
(self.columns || []).any?{|col|col.name.to_s == attr.to_s}
end
def attr_enumeable_base(*args, &block)
@base_name_processor = block
end
def enum_base_name(attr)
if @base_name_processor
@base_name_processor.call(attr).to_s
else
attr.to_s.gsub(selectable_attr_name_pattern, '')
end
end
DEFAULT_SELECTABLE_ATTR_NAME_PATTERN = /(_cd$|_code$|_cds$|_codes$)/
def selectable_attr_name_pattern
@selectable_attr_name_pattern ||= DEFAULT_SELECTABLE_ATTR_NAME_PATTERN
end
alias_method :enum_name_pattern, :selectable_attr_name_pattern
def selectable_attr_name_pattern=(value)
@selectable_attr_name_pattern = value
end
alias_method :enum_name_pattern=, :selectable_attr_name_pattern=
def create_enum(&block)
result = Enum.new
result.instance_eval(&block)
result
end
def define_enum(context)
base_name = context[:base_name]
const_name = "#{base_name.upcase}_ENUM"
const_set(const_name, context[:enum]) unless const_defined?(const_name)
end
def enum_for(attr)
base_name = enum_base_name(attr)
const_get("#{base_name.upcase}_ENUM")
end
def define_accessor(context)
attr = context[:attr]
return unless (instance_methods(false) & [attr, "#{attr}="]).empty?
if context[:attr_accessor]
if context[:default]
attr_accessor_with_default(attr, context[:default])
else
attr_accessor(attr)
end
else
if context[:default]
$stderr.puts "WARNING! :default option ignored for #{attr}"
end
end
end
def define_enum_class_methods(context)
base_name = context[:base_name]
enum = context[:enum]
mod = Module.new
mod.module_eval do
define_method("#{base_name}_enum"){enum}
define_method("#{base_name}_hash_array"){enum.to_hash_array}
define_method("#{base_name}_entries"){enum.entries}
define_method("#{base_name}_options"){|*ids_or_keys|enum.options(*ids_or_keys)}
define_method("#{base_name}_ids"){|*ids_or_keys| enum.ids(*ids_or_keys)}
define_method("#{base_name}_keys"){|*ids_or_keys|enum.keys(*ids_or_keys)}
define_method("#{base_name}_names"){|*ids_or_keys|enum.names(*ids_or_keys)}
define_method("#{base_name}_key_by_id"){|id|enum.key_by_id(id)}
define_method("#{base_name}_id_by_key"){|key|enum.id_by_key(key)}
define_method("#{base_name}_name_by_id"){|id|enum.name_by_id(id)}
define_method("#{base_name}_name_by_key"){|key|enum.name_by_key(key)}
define_method("#{base_name}_entry_by_id"){|id|enum.entry_by_id(id)}
define_method("#{base_name}_entry_by_key"){|key|enum.entry_by_key(key)}
end
if convertors = ENUM_ARRAY_METHODS[context[:convert_with] || :none]
mod.module_eval do
define_method("#{base_name}_to_hash_array", convertors[:to_hash_array])
define_method("hash_array_to_#{base_name}", convertors[:to_attr_value])
end
end
self.extend(mod)
end
def define_enum_instance_methods(context)
attr = context[:attr]
base_name = context[:base_name]
instance_methods = <<-EOS
def #{base_name}_key
self.class.#{base_name}_key_by_id(#{attr})
end
def #{base_name}_key=(key)
self.#{attr} = self.class.#{base_name}_id_by_key(key)
end
def #{base_name}_name
self.class.#{base_name}_name_by_id(#{attr})
end
def #{base_name}_entry
self.class.#{base_name}_entry_by_id(#{attr})
end
def #{base_name}_entry
self.class.#{base_name}_entry_by_id(#{attr})
end
EOS
self.module_eval(instance_methods)
end
def define_enum_array_instance_methods(context)
attr = context[:attr]
base_name = context[:base_name]
# ActiveRecord::Baseから継承している場合は、基本カラムに対応するメソッドはない
self.module_eval(<<-"EOS")
def #{base_name}_ids
#{base_name}_hash_array_selected.map{|hash|hash[:id]}
end
def #{base_name}_ids=(ids)
ids = ids.split(',') if ids.is_a?(String)
ids = ids ? ids.map(&:to_s) : []
update_#{base_name}_hash_array{|hash|ids.include?(hash[:id].to_s)}
end
EOS
self.module_eval(<<-"EOS")
def #{base_name}_hash_array
self.class.#{base_name}_to_hash_array(self.class.#{base_name}_enum, #{attr})
end
def #{base_name}_hash_array=(hash_array)
self.#{attr} = self.class.hash_array_to_#{base_name}(self.class.#{base_name}_enum, hash_array)
end
def #{base_name}_hash_array_selected
#{base_name}_hash_array.select{|hash|!!hash[:select]}
end
def update_#{base_name}_hash_array(&block)
hash_array = #{base_name}_hash_array.map do |hash|
hash.merge(:select => yield(hash))
end
self.#{base_name}_hash_array = hash_array
end
def #{base_name}_keys
#{base_name}_hash_array_selected.map{|hash|hash[:key]}
end
def #{base_name}_keys=(keys)
update_#{base_name}_hash_array{|hash|keys.include?(hash[:key])}
end
def #{base_name}_selection
#{base_name}_hash_array.map{|hash|!!hash[:select]}
end
def #{base_name}_selection=(selection)
idx = -1
update_#{base_name}_hash_array{|hash| idx += 1; !!selection[idx]}
end
def #{base_name}_names
#{base_name}_hash_array_selected.map{|hash|hash[:name]}
end
def #{base_name}_entries
ids = #{base_name}_ids
self.class.#{base_name}_enum.select{|entry|ids.include?(entry.id)}
end
EOS
end
end
end
end
|
module SendWithUs
VERSION = '1.1.2'
end
Incremented version.rb
module SendWithUs
VERSION = '1.1.3'
end
|
require "eventmachine"
require "resolv"
require "ipaddr"
module Sensu
module Transport
class Error < StandardError; end
class Base
# Transports are deferrable objects. This is to enable callbacks
# to be called in the event the transport calls `succeed()` to
# indicate that it has initialized and connected successfully.
include EM::Deferrable
# @!attribute [rw] logger
# @return [Logger] the Sensu logger object.
attr_accessor :logger
def initialize
@on_error = Proc.new {}
@before_reconnect = Proc.new {}
@after_reconnect = Proc.new {}
end
# Set the error callback.
#
# @param callback [Proc] called in the event of a transport
# error, the exception object should be passed as a parameter.
# @return [Proc] the error callback.
def on_error(&callback)
@on_error = callback
end
# Set the before reconnect callback.
#
# @param callback [Proc] called before attempting to reconnect
# to the transport.
# @return [Proc] the before reconnect callback.
def before_reconnect(&callback)
@before_reconnect = callback
end
# Set the after reconnect callback.
#
# @param callback [Proc] called after reconnecting to the
# transport.
# @return [Proc] the after reconnect callback.
def after_reconnect(&callback)
@after_reconnect = callback
end
# Transport connection setup.
#
# @param options [Hash, String]
def connect(options={}); end
# Reconnect to the transport.
#
# @param force [Boolean] the reconnect.
def reconnect(force=false); end
# Indicates if connected to the transport.
#
# @return [TrueClass, FalseClass]
def connected?
false
end
# Close the transport connection.
def close; end
# Publish a message to the transport.
#
# @param type [Symbol] the transport pipe type, possible values
# are: :direct and :fanout.
# @param pipe [String] the transport pipe name.
# @param message [String] the message to be published to the transport.
# @param options [Hash] the options to publish the message with.
# @yield [info] passes publish info to an optional callback/block.
# @yieldparam info [Hash] contains publish information, which
# may contain an error object (:error).
def publish(type, pipe, message, options={})
info = {:error => nil}
yield(info) if block_given?
end
# Subscribe to a transport pipe and/or funnel.
#
# @param type [Symbol] the transport pipe type, possible values
# are: :direct and :fanout.
# @param pipe [String] the transport pipe name.
# @param funnel [String] the transport funnel, which may be
# connected to multiple pipes.
# @param options [Hash] the options to consume messages with.
# @yield [info, message] passes message info and content to
# the consumer callback/block.
# @yieldparam info [Hash] contains message information.
# @yieldparam message [String] message.
def subscribe(type, pipe, funnel=nil, options={})
info = {}
message = ''
yield(info, message)
end
# Unsubscribe from all transport pipes and/or funnels.
#
# @yield [info] passes info to an optional callback/block.
# @yieldparam info [Hash] contains unsubscribe information.
def unsubscribe
info = {}
yield(info) if block_given?
end
# Acknowledge the delivery of a message from the transport.
#
# @param info [Hash] message information, eg. contains its id.
# @yield [info] passes acknowledgment info to an optional callback/block.
def acknowledge(info)
yield(info) if block_given?
end
# Alias for acknowledge(). This should be superseded by a proper
# alias via alias_method in the transport class.
def ack(*args, &callback)
acknowledge(*args, &callback)
end
# Transport funnel stats, such as message and consumer counts.
#
# @param funnel [String] the transport funnel to get stats for.
# @param options [Hash] the options to get funnel stats with.
# @yield [info] passes funnel stats a callback/block.
# @yieldparam info [Hash] contains funnel stats.
def stats(funnel, options={})
info = {}
yield(info) if block_given?
end
# Determine if a host is an IP address (or DNS hostname).
#
# @param host [String]
# @return [TrueClass, FalseClass]
def ip_address?(host)
begin
ip_address = IPAddr.new(host)
ip_address.ipv4? || ip_address.ipv6?
rescue IPAddr::InvalidAddressError
false
end
end
# Resolve a hostname to an IP address for a host. This method
# will return `nil` to the provided callback when the hostname
# cannot be resolved to an IP address.
#
# @param host [String]
# @param callback [Proc] called with the result of the DNS
# query (IP address).
def resolve_hostname(host, &callback)
resolve = Proc.new do
begin
dns = Resolv::DNS.new
ip_addresses = []
dns.each_resource(host, Resolv::DNS::Resource::IN::A) do |resource|
ip_addresses << resource.address.to_s
end
dns.each_resource(host, Resolv::DNS::Resource::IN::AAAA) do |resource|
ip_addresses << resource.address.to_s
end
ip_addresses.shift
rescue => error
@logger.error("transport connection error", {
:reason => "unable to resolve hostname",
:error => error.to_s
}) if @logger
nil
end
end
EM.defer(resolve, callback)
end
# Resolve a hostname to an IP address for a host. This method
# will return the provided host to the provided callback if it
# is already an IP address. This method will return `nil` to the
# provided callback when the hostname cannot be resolved to an
# IP address.
#
# @param host [String]
# @param callback [Proc] called with the result of the DNS
# query (IP address).
def resolve_host(host, &callback)
if ip_address?(host)
yield host
else
resolve_hostname(host, &callback)
end
end
# Discover available transports (Subclasses)
def self.descendants
ObjectSpace.each_object(Class).select do |klass|
klass < self
end
end
private
# Catch transport errors and call the on_error callback,
# providing it with the error object as an argument. This method
# is intended to be applied where necessary, not to be confused
# with a catch-all. Not all transports will need this.
#
# @yield [] callback/block to execute within a rescue block to
# catch transport errors.
def catch_errors
begin
yield
rescue => error
@on_error.call(error)
end
end
end
end
end
[dns-resolv] try ruby getaddrinfo fallback
require "eventmachine"
require "resolv"
require "ipaddr"
require "socket"
module Sensu
module Transport
class Error < StandardError; end
class Base
# Transports are deferrable objects. This is to enable callbacks
# to be called in the event the transport calls `succeed()` to
# indicate that it has initialized and connected successfully.
include EM::Deferrable
# @!attribute [rw] logger
# @return [Logger] the Sensu logger object.
attr_accessor :logger
def initialize
@on_error = Proc.new {}
@before_reconnect = Proc.new {}
@after_reconnect = Proc.new {}
end
# Set the error callback.
#
# @param callback [Proc] called in the event of a transport
# error, the exception object should be passed as a parameter.
# @return [Proc] the error callback.
def on_error(&callback)
@on_error = callback
end
# Set the before reconnect callback.
#
# @param callback [Proc] called before attempting to reconnect
# to the transport.
# @return [Proc] the before reconnect callback.
def before_reconnect(&callback)
@before_reconnect = callback
end
# Set the after reconnect callback.
#
# @param callback [Proc] called after reconnecting to the
# transport.
# @return [Proc] the after reconnect callback.
def after_reconnect(&callback)
@after_reconnect = callback
end
# Transport connection setup.
#
# @param options [Hash, String]
def connect(options={}); end
# Reconnect to the transport.
#
# @param force [Boolean] the reconnect.
def reconnect(force=false); end
# Indicates if connected to the transport.
#
# @return [TrueClass, FalseClass]
def connected?
false
end
# Close the transport connection.
def close; end
# Publish a message to the transport.
#
# @param type [Symbol] the transport pipe type, possible values
# are: :direct and :fanout.
# @param pipe [String] the transport pipe name.
# @param message [String] the message to be published to the transport.
# @param options [Hash] the options to publish the message with.
# @yield [info] passes publish info to an optional callback/block.
# @yieldparam info [Hash] contains publish information, which
# may contain an error object (:error).
def publish(type, pipe, message, options={})
info = {:error => nil}
yield(info) if block_given?
end
# Subscribe to a transport pipe and/or funnel.
#
# @param type [Symbol] the transport pipe type, possible values
# are: :direct and :fanout.
# @param pipe [String] the transport pipe name.
# @param funnel [String] the transport funnel, which may be
# connected to multiple pipes.
# @param options [Hash] the options to consume messages with.
# @yield [info, message] passes message info and content to
# the consumer callback/block.
# @yieldparam info [Hash] contains message information.
# @yieldparam message [String] message.
def subscribe(type, pipe, funnel=nil, options={})
info = {}
message = ''
yield(info, message)
end
# Unsubscribe from all transport pipes and/or funnels.
#
# @yield [info] passes info to an optional callback/block.
# @yieldparam info [Hash] contains unsubscribe information.
def unsubscribe
info = {}
yield(info) if block_given?
end
# Acknowledge the delivery of a message from the transport.
#
# @param info [Hash] message information, eg. contains its id.
# @yield [info] passes acknowledgment info to an optional callback/block.
def acknowledge(info)
yield(info) if block_given?
end
# Alias for acknowledge(). This should be superseded by a proper
# alias via alias_method in the transport class.
def ack(*args, &callback)
acknowledge(*args, &callback)
end
# Transport funnel stats, such as message and consumer counts.
#
# @param funnel [String] the transport funnel to get stats for.
# @param options [Hash] the options to get funnel stats with.
# @yield [info] passes funnel stats a callback/block.
# @yieldparam info [Hash] contains funnel stats.
def stats(funnel, options={})
info = {}
yield(info) if block_given?
end
# Determine if a host is an IP address (or DNS hostname).
#
# @param host [String]
# @return [TrueClass, FalseClass]
def ip_address?(host)
begin
ip_address = IPAddr.new(host)
ip_address.ipv4? || ip_address.ipv6?
rescue IPAddr::InvalidAddressError
false
end
end
# Resolve a hostname to an IP address for a host. This method
# will return `nil` to the provided callback when the hostname
# cannot be resolved to an IP address.
#
# @param host [String]
# @param callback [Proc] called with the result of the DNS
# query (IP address).
def resolve_hostname(host, &callback)
resolve = Proc.new do
begin
dns = Resolv::DNS.new
ip_addresses = []
dns.each_resource(host, Resolv::DNS::Resource::IN::A) do |resource|
ip_addresses << resource.address.to_s
end
dns.each_resource(host, Resolv::DNS::Resource::IN::AAAA) do |resource|
ip_addresses << resource.address.to_s
end
ip_address = ip_addresses.shift
if ip_address.nil?
info = Socket.getaddrinfo(host, nil).first
info.nil? ? nil : info[2]
else
ip_address
end
rescue => error
@logger.error("transport connection error", {
:reason => "unable to resolve hostname",
:error => error.to_s
}) if @logger
nil
end
end
EM.defer(resolve, callback)
end
# Resolve a hostname to an IP address for a host. This method
# will return the provided host to the provided callback if it
# is already an IP address. This method will return `nil` to the
# provided callback when the hostname cannot be resolved to an
# IP address.
#
# @param host [String]
# @param callback [Proc] called with the result of the DNS
# query (IP address).
def resolve_host(host, &callback)
if ip_address?(host)
yield host
else
resolve_hostname(host, &callback)
end
end
# Discover available transports (Subclasses)
def self.descendants
ObjectSpace.each_object(Class).select do |klass|
klass < self
end
end
private
# Catch transport errors and call the on_error callback,
# providing it with the error object as an argument. This method
# is intended to be applied where necessary, not to be confused
# with a catch-all. Not all transports will need this.
#
# @yield [] callback/block to execute within a rescue block to
# catch transport errors.
def catch_errors
begin
yield
rescue => error
@on_error.call(error)
end
end
end
end
end
|
module Serialism
# Combines a set of items and a serializer class.
#
# Example:
#
# class Foo
# attr_accessor :id
# end
#
# class FooSerializer < Serialism::Serializer
# attributes :id
# end
#
# Serialism::Collection.new(a_bunch_of_foo_instances, serializer: FooSerializer).to_csv
# #=> returns a CSV string
class Collection
attr_reader :items
# create a new collection
#
# @param [Enumerable] items
# A collection of items.
# All member items should be encodable by `serializer`.
# @param [Serialism::Serializer] serializer
# The serializer class used to encode members of `items`.
def initialize(items = [], serializer:)
if !serializer.respond_to?(:attributes)
raise ArgumentError, 'serializer must implement a class-level :attributes method'
end
if !serializer.instance_methods.include?(:render)
raise ArgumentError, 'serializer must implement an instance-level :render method'
end
@serializer = serializer
self.items = items
end
# Set the items in the collection.
#
# Replaces any previous items already in the collection.
#
# @param [#each] items an enumerable collection of items
# @return [Serialism::Collection]
def items=(items)
raise ArgumentError, 'argument must respond_to :each' if !items.respond_to?(:each)
raise ArgumentError, 'argument must respond_to :map' if !items.respond_to?(:map)
@items = items
self
end
# return the attributes for the collection
#
# @return [Array]
def attributes
return [] if items.empty?
@serializer.attributes
end
# Generate a csv string for the collection
#
# When members of the array returned by the serializer are themselves arrays,
# these sub-arrays will be joined using "," prior to being added to the main
# CSV.
#
# @return [String]
def to_csv
require 'csv'
CSV.generate do |csv|
csv << attributes
items.each do |t|
row = @serializer.new(t).render.values.map do |cell|
# convert complex cells to comma-separated strings
cell.is_a?(Array) ? cell.join(',') : cell
end
csv << row
end
end
end
def to_json
require 'json'
JSON.dump(items.map { |t| @serializer.new(t).render })
end
end
end
extract method: csv_value_to_s
module Serialism
# Combines a set of items and a serializer class.
#
# Example:
#
# class Foo
# attr_accessor :id
# end
#
# class FooSerializer < Serialism::Serializer
# attributes :id
# end
#
# Serialism::Collection.new(a_bunch_of_foo_instances, serializer: FooSerializer).to_csv
# #=> returns a CSV string
class Collection
attr_reader :items
# create a new collection
#
# @param [Enumerable] items
# A collection of items.
# All member items should be encodable by `serializer`.
# @param [Serialism::Serializer] serializer
# The serializer class used to encode members of `items`.
def initialize(items = [], serializer:)
if !serializer.respond_to?(:attributes)
raise ArgumentError, 'serializer must implement a class-level :attributes method'
end
if !serializer.instance_methods.include?(:render)
raise ArgumentError, 'serializer must implement an instance-level :render method'
end
@serializer = serializer
self.items = items
end
# Set the items in the collection.
#
# Replaces any previous items already in the collection.
#
# @param [#each] items an enumerable collection of items
# @return [Serialism::Collection]
def items=(items)
raise ArgumentError, 'argument must respond_to :each' if !items.respond_to?(:each)
raise ArgumentError, 'argument must respond_to :map' if !items.respond_to?(:map)
@items = items
self
end
# return the attributes for the collection
#
# @return [Array]
def attributes
return [] if items.empty?
@serializer.attributes
end
# Generate a csv string for the collection
#
# When members of the array returned by the serializer are themselves arrays,
# these sub-arrays will be joined using "," prior to being added to the main
# CSV.
#
# @return [String]
def to_csv
require 'csv'
CSV.generate do |csv|
csv << attributes
items.each do |t|
row = @serializer.new(t).render.values.map do |cell|
csv_value_to_s(cell)
end
csv << row
end
end
end
def to_json
require 'json'
JSON.dump(items.map { |t| @serializer.new(t).render })
end
private
# convert complex cells to comma-separated strings
def csv_value_to_s(cell)
cell.is_a?(Array) ? cell.join(',') : cell
end
end
end
|
module ActiveModel
module OneTimePassword
extend ActiveSupport::Concern
module ClassMethods
def has_one_time_password(options = {})
cattr_accessor :otp_column_name, :otp_counter_column_name,
:otp_backup_codes_column_name
class_attribute :otp_digits, :otp_counter_based,
:otp_backup_codes_count, :otp_one_time_backup_codes
self.otp_column_name = (options[:column_name] || "otp_secret_key").to_s
self.otp_digits = options[:length] || 6
self.otp_counter_based = (options[:counter_based] || false)
self.otp_counter_column_name = (options[:counter_column_name] || "otp_counter").to_s
self.otp_backup_codes_column_name = (
options[:backup_codes_column_name] || 'otp_backup_codes'
).to_s
self.otp_backup_codes_count = options[:backup_codes_count] || 12
self.otp_one_time_backup_codes = (
options[:one_time_backup_codes] || false
)
include InstanceMethodsOnActivation
before_create(**options.slice(:if, :unless)) do
self.otp_regenerate_secret if !otp_column
self.otp_regenerate_counter if otp_counter_based && !otp_counter
otp_regenerate_backup_codes if backup_codes_enabled?
end
if respond_to?(:attributes_protected_by_default)
def self.attributes_protected_by_default #:nodoc:
super + [otp_column_name, otp_counter_column_name]
end
end
end
# Defaults to 160 bit long secret
# (meaning a 32 character long base32 secret)
def otp_random_secret(length = 20)
ROTP::Base32.random(length)
end
end
module InstanceMethodsOnActivation
def otp_regenerate_secret
self.otp_column = self.class.otp_random_secret
end
def otp_regenerate_counter
self.otp_counter = 1
end
def authenticate_otp(code, options = {})
return true if backup_codes_enabled? && authenticate_backup_code(code)
if otp_counter_based
hotp = ROTP::HOTP.new(otp_column, digits: otp_digits)
result = hotp.verify(code, otp_counter)
if result && options[:auto_increment]
self.otp_counter += 1
save if respond_to?(:changed?) && !new_record?
end
result
else
totp = ROTP::TOTP.new(otp_column, digits: otp_digits)
if drift = options[:drift]
totp.verify(code, drift_behind: drift)
else
totp.verify(code)
end
end
end
def otp_code(options = {})
if otp_counter_based
if options[:auto_increment]
self.otp_counter += 1
save if respond_to?(:changed?) && !new_record?
end
ROTP::HOTP.new(otp_column, digits: otp_digits).at(self.otp_counter)
else
if options.is_a? Hash
time = options.fetch(:time, Time.now)
else
time = options
end
ROTP::TOTP.new(otp_column, digits: otp_digits).at(time)
end
end
def provisioning_uri(account = nil, options = {})
account ||= self.email if self.respond_to?(:email)
account ||= ""
if otp_counter_based
ROTP::HOTP
.new(otp_column, options)
.provisioning_uri(account, self.otp_counter)
else
ROTP::TOTP
.new(otp_column, options)
.provisioning_uri(account)
end
end
def otp_column
self.public_send(self.class.otp_column_name)
end
def otp_column=(attr)
self.public_send("#{self.class.otp_column_name}=", attr)
end
def otp_counter
if self.class.otp_counter_column_name != "otp_counter"
self.public_send(self.class.otp_counter_column_name)
else
super
end
end
def otp_counter=(attr)
if self.class.otp_counter_column_name != "otp_counter"
self.public_send("#{self.class.otp_counter_column_name}=", attr)
else
super
end
end
def serializable_hash(options = nil)
options ||= {}
options[:except] = Array(options[:except])
options[:except] << self.class.otp_column_name
super(options)
end
def otp_regenerate_backup_codes
otp = ROTP::OTP.new(otp_column)
backup_codes = Array.new(self.class.otp_backup_codes_count) do
otp.generate_otp((SecureRandom.random_number(9e5) + 1e5).to_i)
end
public_send("#{self.class.otp_backup_codes_column_name}=", backup_codes)
end
def backup_codes_enabled?
self.class.attribute_method?(self.class.otp_backup_codes_column_name)
end
private
def authenticate_backup_code(code)
backup_codes_column_name = self.class.otp_backup_codes_column_name
backup_codes = public_send(backup_codes_column_name)
return false unless backup_codes.include?(code)
if self.class.otp_one_time_backup_codes
backup_codes.delete(code)
public_send("#{backup_codes_column_name}=", backup_codes)
save if respond_to?(:changed?) && !new_record?
end
true
end
end
end
end
Backup codes might be `nil` when using encryption (#83)
module ActiveModel
module OneTimePassword
extend ActiveSupport::Concern
module ClassMethods
def has_one_time_password(options = {})
cattr_accessor :otp_column_name, :otp_counter_column_name,
:otp_backup_codes_column_name
class_attribute :otp_digits, :otp_counter_based,
:otp_backup_codes_count, :otp_one_time_backup_codes
self.otp_column_name = (options[:column_name] || "otp_secret_key").to_s
self.otp_digits = options[:length] || 6
self.otp_counter_based = (options[:counter_based] || false)
self.otp_counter_column_name = (options[:counter_column_name] || "otp_counter").to_s
self.otp_backup_codes_column_name = (
options[:backup_codes_column_name] || 'otp_backup_codes'
).to_s
self.otp_backup_codes_count = options[:backup_codes_count] || 12
self.otp_one_time_backup_codes = (
options[:one_time_backup_codes] || false
)
include InstanceMethodsOnActivation
before_create(**options.slice(:if, :unless)) do
self.otp_regenerate_secret if !otp_column
self.otp_regenerate_counter if otp_counter_based && !otp_counter
otp_regenerate_backup_codes if backup_codes_enabled?
end
if respond_to?(:attributes_protected_by_default)
def self.attributes_protected_by_default #:nodoc:
super + [otp_column_name, otp_counter_column_name]
end
end
end
# Defaults to 160 bit long secret
# (meaning a 32 character long base32 secret)
def otp_random_secret(length = 20)
ROTP::Base32.random(length)
end
end
module InstanceMethodsOnActivation
def otp_regenerate_secret
self.otp_column = self.class.otp_random_secret
end
def otp_regenerate_counter
self.otp_counter = 1
end
def authenticate_otp(code, options = {})
return true if backup_codes_enabled? && authenticate_backup_code(code)
if otp_counter_based
hotp = ROTP::HOTP.new(otp_column, digits: otp_digits)
result = hotp.verify(code, otp_counter)
if result && options[:auto_increment]
self.otp_counter += 1
save if respond_to?(:changed?) && !new_record?
end
result
else
totp = ROTP::TOTP.new(otp_column, digits: otp_digits)
if drift = options[:drift]
totp.verify(code, drift_behind: drift)
else
totp.verify(code)
end
end
end
def otp_code(options = {})
if otp_counter_based
if options[:auto_increment]
self.otp_counter += 1
save if respond_to?(:changed?) && !new_record?
end
ROTP::HOTP.new(otp_column, digits: otp_digits).at(self.otp_counter)
else
if options.is_a? Hash
time = options.fetch(:time, Time.now)
else
time = options
end
ROTP::TOTP.new(otp_column, digits: otp_digits).at(time)
end
end
def provisioning_uri(account = nil, options = {})
account ||= self.email if self.respond_to?(:email)
account ||= ""
if otp_counter_based
ROTP::HOTP
.new(otp_column, options)
.provisioning_uri(account, self.otp_counter)
else
ROTP::TOTP
.new(otp_column, options)
.provisioning_uri(account)
end
end
def otp_column
self.public_send(self.class.otp_column_name)
end
def otp_column=(attr)
self.public_send("#{self.class.otp_column_name}=", attr)
end
def otp_counter
if self.class.otp_counter_column_name != "otp_counter"
self.public_send(self.class.otp_counter_column_name)
else
super
end
end
def otp_counter=(attr)
if self.class.otp_counter_column_name != "otp_counter"
self.public_send("#{self.class.otp_counter_column_name}=", attr)
else
super
end
end
def serializable_hash(options = nil)
options ||= {}
options[:except] = Array(options[:except])
options[:except] << self.class.otp_column_name
super(options)
end
def otp_regenerate_backup_codes
otp = ROTP::OTP.new(otp_column)
backup_codes = Array.new(self.class.otp_backup_codes_count) do
otp.generate_otp((SecureRandom.random_number(9e5) + 1e5).to_i)
end
public_send("#{self.class.otp_backup_codes_column_name}=", backup_codes)
end
def backup_codes_enabled?
self.class.attribute_method?(self.class.otp_backup_codes_column_name)
end
private
def authenticate_backup_code(code)
backup_codes_column_name = self.class.otp_backup_codes_column_name
backup_codes = public_send(backup_codes_column_name)
return false unless backup_codes.present? && backup_codes.include?(code)
if self.class.otp_one_time_backup_codes
backup_codes.delete(code)
public_send("#{backup_codes_column_name}=", backup_codes)
save if respond_to?(:changed?) && !new_record?
end
true
end
end
end
end
|
require 'active_serializer/serializer'
module ActiveSerializer::Serializable
extend ActiveSupport::Concern
module ClassMethods
def serialize(*objects)
first_object = objects.first
serialization_options = self.class_variable_get(:@@serialization_options)
serialization_rules = self.class_variable_get(:@@serialization_rules)
serializer = ActiveSerializer::Serializer.new(first_object)
instance_exec do
serializer.instance_exec(*objects, &serialization_rules)
end
if serialization_options[:no_root_node]
serializer.attrs.first[1]
else
serializer.attrs
end
end
def serialization_rules(options = {}, &block)
self.class_variable_set(:@@serialization_options, options)
self.class_variable_set(:@@serialization_rules, block)
end
end
end
Added serialize_all method
require 'active_serializer/serializer'
module ActiveSerializer::Serializable
extend ActiveSupport::Concern
module ClassMethods
def serialize(*objects)
first_object = objects.first
serialization_options = self.class_variable_get(:@@serialization_options)
serialization_rules = self.class_variable_get(:@@serialization_rules)
serializer = ActiveSerializer::Serializer.new(first_object)
instance_exec do
serializer.instance_exec(*objects, &serialization_rules)
end
if serialization_options[:no_root_node]
serializer.attrs.first[1]
else
serializer.attrs
end
end
def serialize_all(collection)
collection.each do |object|
serialize(object)
end
end
def serialization_rules(options = {}, &block)
self.class_variable_set(:@@serialization_options, options)
self.class_variable_set(:@@serialization_rules, block)
end
end
end
|
module ActiveShipping
# FedEx carrier implementation.
#
# FedEx module by Jimmy Baker (http://github.com/jimmyebaker)
# Documentation can be found here: http://images.fedex.com/us/developer/product/WebServices/MyWebHelp/PropDevGuide.pdf
class FedEx < Carrier
self.retry_safe = true
cattr_reader :name
@@name = "FedEx"
TEST_URL = 'https://gatewaybeta.fedex.com:443/xml'
LIVE_URL = 'https://gateway.fedex.com:443/xml'
CARRIER_CODES = {
"fedex_ground" => "FDXG",
"fedex_express" => "FDXE"
}
DELIVERY_ADDRESS_NODE_NAMES = %w(DestinationAddress ActualDeliveryAddress)
SHIPPER_ADDRESS_NODE_NAMES = %w(ShipperAddress)
SERVICE_TYPES = {
"PRIORITY_OVERNIGHT" => "FedEx Priority Overnight",
"PRIORITY_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx Priority Overnight Saturday Delivery",
"FEDEX_2_DAY" => "FedEx 2 Day",
"FEDEX_2_DAY_SATURDAY_DELIVERY" => "FedEx 2 Day Saturday Delivery",
"STANDARD_OVERNIGHT" => "FedEx Standard Overnight",
"FIRST_OVERNIGHT" => "FedEx First Overnight",
"FIRST_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx First Overnight Saturday Delivery",
"FEDEX_EXPRESS_SAVER" => "FedEx Express Saver",
"FEDEX_1_DAY_FREIGHT" => "FedEx 1 Day Freight",
"FEDEX_1_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 1 Day Freight Saturday Delivery",
"FEDEX_2_DAY_FREIGHT" => "FedEx 2 Day Freight",
"FEDEX_2_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 2 Day Freight Saturday Delivery",
"FEDEX_3_DAY_FREIGHT" => "FedEx 3 Day Freight",
"FEDEX_3_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 3 Day Freight Saturday Delivery",
"INTERNATIONAL_PRIORITY" => "FedEx International Priority",
"INTERNATIONAL_PRIORITY_SATURDAY_DELIVERY" => "FedEx International Priority Saturday Delivery",
"INTERNATIONAL_ECONOMY" => "FedEx International Economy",
"INTERNATIONAL_FIRST" => "FedEx International First",
"INTERNATIONAL_PRIORITY_FREIGHT" => "FedEx International Priority Freight",
"INTERNATIONAL_ECONOMY_FREIGHT" => "FedEx International Economy Freight",
"GROUND_HOME_DELIVERY" => "FedEx Ground Home Delivery",
"FEDEX_GROUND" => "FedEx Ground",
"INTERNATIONAL_GROUND" => "FedEx International Ground",
"SMART_POST" => "FedEx SmartPost",
"FEDEX_FREIGHT_PRIORITY" => "FedEx Freight Priority",
"FEDEX_FREIGHT_ECONOMY" => "FedEx Freight Economy"
}
PACKAGE_TYPES = {
"fedex_envelope" => "FEDEX_ENVELOPE",
"fedex_pak" => "FEDEX_PAK",
"fedex_box" => "FEDEX_BOX",
"fedex_tube" => "FEDEX_TUBE",
"fedex_10_kg_box" => "FEDEX_10KG_BOX",
"fedex_25_kg_box" => "FEDEX_25KG_BOX",
"your_packaging" => "YOUR_PACKAGING"
}
DROPOFF_TYPES = {
'regular_pickup' => 'REGULAR_PICKUP',
'request_courier' => 'REQUEST_COURIER',
'dropbox' => 'DROP_BOX',
'business_service_center' => 'BUSINESS_SERVICE_CENTER',
'station' => 'STATION'
}
SIGNATURE_OPTION_CODES = {
adult: 'ADULT', # 21 years plus
direct: 'DIRECT', # A person at the delivery address
indirect: 'INDIRECT', # A person at the delivery address, or a neighbor, or a signed note for fedex on the door
none_required: 'NO_SIGNATURE_REQUIRED',
default_for_service: 'SERVICE_DEFAULT'
}
PAYMENT_TYPES = {
'sender' => 'SENDER',
'recipient' => 'RECIPIENT',
'third_party' => 'THIRDPARTY',
'collect' => 'COLLECT'
}
PACKAGE_IDENTIFIER_TYPES = {
'tracking_number' => 'TRACKING_NUMBER_OR_DOORTAG',
'door_tag' => 'TRACKING_NUMBER_OR_DOORTAG',
'rma' => 'RMA',
'ground_shipment_id' => 'GROUND_SHIPMENT_ID',
'ground_invoice_number' => 'GROUND_INVOICE_NUMBER',
'ground_customer_reference' => 'GROUND_CUSTOMER_REFERENCE',
'ground_po' => 'GROUND_PO',
'express_reference' => 'EXPRESS_REFERENCE',
'express_mps_master' => 'EXPRESS_MPS_MASTER',
'shipper_reference' => 'SHIPPER_REFERENCE',
}
TRANSIT_TIMES = %w(UNKNOWN ONE_DAY TWO_DAYS THREE_DAYS FOUR_DAYS FIVE_DAYS SIX_DAYS SEVEN_DAYS EIGHT_DAYS NINE_DAYS TEN_DAYS ELEVEN_DAYS TWELVE_DAYS THIRTEEN_DAYS FOURTEEN_DAYS FIFTEEN_DAYS SIXTEEN_DAYS SEVENTEEN_DAYS EIGHTEEN_DAYS)
# FedEx tracking codes as described in the FedEx Tracking Service WSDL Guide
# All delays also have been marked as exceptions
TRACKING_STATUS_CODES = HashWithIndifferentAccess.new(
'AA' => :at_airport,
'AD' => :at_delivery,
'AF' => :at_fedex_facility,
'AR' => :at_fedex_facility,
'AP' => :at_pickup,
'CA' => :canceled,
'CH' => :location_changed,
'DE' => :exception,
'DL' => :delivered,
'DP' => :departed_fedex_location,
'DR' => :vehicle_furnished_not_used,
'DS' => :vehicle_dispatched,
'DY' => :exception,
'EA' => :exception,
'ED' => :enroute_to_delivery,
'EO' => :enroute_to_origin_airport,
'EP' => :enroute_to_pickup,
'FD' => :at_fedex_destination,
'HL' => :held_at_location,
'IT' => :in_transit,
'LO' => :left_origin,
'OC' => :order_created,
'OD' => :out_for_delivery,
'PF' => :plane_in_flight,
'PL' => :plane_landed,
'PU' => :picked_up,
'RS' => :return_to_shipper,
'SE' => :exception,
'SF' => :at_sort_facility,
'SP' => :split_status,
'TR' => :transfer
)
def self.service_name_for_code(service_code)
SERVICE_TYPES[service_code] || "FedEx #{service_code.titleize.sub(/Fedex /, '')}"
end
def requirements
[:key, :password, :account, :login]
end
def find_rates(origin, destination, packages, options = {})
options = @options.update(options)
packages = Array(packages)
rate_request = build_rate_request(origin, destination, packages, options)
xml = commit(save_request(rate_request), (options[:test] || false))
parse_rate_response(origin, destination, packages, xml, options)
end
def find_tracking_info(tracking_number, options = {})
options = @options.update(options)
tracking_request = build_tracking_request(tracking_number, options)
xml = commit(save_request(tracking_request), (options[:test] || false))
parse_tracking_response(xml, options)
end
# Get Shipping labels
def create_shipment(origin, destination, packages, options = {})
options = @options.update(options)
packages = Array(packages)
raise Error, "Multiple packages are not supported yet." if packages.length > 1
request = build_shipment_request(origin, destination, packages, options)
logger.debug(request) if logger
logger.debug(confirm_response) if logger
response = commit(save_request(request), (options[:test] || false))
parse_ship_response(response)
end
def maximum_address_field_length
# See Fedex Developper Guide
35
end
protected
def build_shipment_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ProcessShipmentRequest(xmlns: 'http://fedex.com/ws/ship/v13') do
build_request_header(xml)
build_version_node(xml, 'ship', 13, 0 ,0)
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
xml.DropoffType('REGULAR_PICKUP')
xml.ServiceType(options[:service_type] || 'FEDEX_GROUND')
xml.PackagingType('YOUR_PACKAGING')
xml.Shipper do
build_contact_address_nodes(xml, options[:shipper] || origin)
end
xml.Recipient do
build_contact_address_nodes(xml, destination)
end
xml.Origin do
build_contact_address_nodes(xml, origin)
end
xml.ShippingChargesPayment do
xml.PaymentType('SENDER')
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
xml.LabelSpecification do
xml.LabelFormatType('COMMON2D')
xml.ImageType('PNG')
xml.LabelStockType('PAPER_7X4.75')
end
xml.RateRequestTypes('ACCOUNT')
xml.PackageCount(packages.size)
packages.each do |package|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, package, imperial)
build_package_dimensions_node(xml, package, imperial)
# Reference Numbers
reference_numbers = Array(package.options[:reference_numbers])
if reference_numbers.size > 0
xml.CustomerReferences do
reference_numbers.each do |reference_number_info|
xml.CustomerReferenceType(reference_number_info[:type] || "CUSTOMER_REFERENCE")
xml.Value(reference_number_info[:value])
end
end
end
xml.SpecialServicesRequested do
xml.SpecialServiceTypes("SIGNATURE_OPTION")
xml.SignatureOptionDetail do
xml.OptionType(SIGNATURE_OPTION_CODES[package.options[:signature_option] || :default_for_service])
end
end
end
end
end
end
end
xml_builder.to_xml
end
def build_contact_address_nodes(xml, location)
xml.Contact do
xml.PersonName(location.name)
xml.CompanyName(location.company)
xml.PhoneNumber(location.phone)
end
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.StateOrProvinceCode(location.state)
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential('true') if location.residential?
end
end
def build_shipment_responsible_party_node(xml, origin)
xml.ResponsibleParty do
xml.AccountNumber(@options[:account])
xml.Contact do
xml.PersonName(origin.name)
xml.CompanyName(origin.company)
xml.PhoneNumber(origin.phone)
end
end
end
def build_rate_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.RateRequest(xmlns: 'http://fedex.com/ws/rate/v13') do
build_request_header(xml)
build_version_node(xml, 'crs', 13, 0 ,0)
# Returns delivery dates
xml.ReturnTransitAndCommit(true)
# Returns saturday delivery shipping options when available
xml.VariableOptions('SATURDAY_DELIVERY')
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
freight = has_freight?(options)
unless freight
# fedex api wants this up here otherwise request returns an error
xml.DropoffType(options[:dropoff_type] || 'REGULAR_PICKUP')
xml.PackagingType(options[:packaging_type] || 'YOUR_PACKAGING')
end
build_location_node(xml, 'Shipper', options[:shipper] || origin)
build_location_node(xml, 'Recipient', destination)
if options[:shipper] && options[:shipper] != origin
build_location_node(xml, 'Origin', origin)
end
if freight
freight_options = options[:freight]
build_shipping_charges_payment_node(xml, freight_options)
build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
build_rate_request_types_node(xml)
else
xml.SmartPostDetail do
xml.Indicia(options[:smart_post_indicia] || 'PARCEL_SELECT')
xml.HubId(options[:smart_post_hub_id] || 5902) # default to LA
end
build_rate_request_types_node(xml)
xml.PackageCount(packages.size)
build_packages_nodes(xml, packages, imperial)
end
end
end
end
xml_builder.to_xml
end
def build_packages_nodes(xml, packages, imperial)
packages.map do |pkg|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
def build_shipping_charges_payment_node(xml, freight_options)
xml.ShippingChargesPayment do
xml.PaymentType(freight_options[:payment_type])
xml.Payor do
xml.ResponsibleParty do
# TODO: case of different freight account numbers?
xml.AccountNumber(freight_options[:account])
end
end
end
end
def build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
xml.FreightShipmentDetail do
# TODO: case of different freight account numbers?
xml.FedExFreightAccountNumber(freight_options[:account])
build_location_node(xml, 'FedExFreightBillingContactAndAddress', freight_options[:billing_location])
xml.Role(freight_options[:role])
packages.each do |pkg|
xml.LineItems do
xml.FreightClass(freight_options[:freight_class])
xml.Packaging(freight_options[:packaging])
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
end
def has_freight?(options)
options[:freight] && options[:freight].present?
end
def build_package_weight_node(xml, pkg, imperial)
xml.Weight do
xml.Units(imperial ? 'LB' : 'KG')
xml.Value([((imperial ? pkg.lbs : pkg.kgs).to_f * 1000).round / 1000.0, 0.1].max)
end
end
def build_package_dimensions_node(xml, pkg, imperial)
xml.Dimensions do
[:length, :width, :height].each do |axis|
value = ((imperial ? pkg.inches(axis) : pkg.cm(axis)).to_f * 1000).round / 1000.0 # 3 decimals
xml.public_send(axis.to_s.capitalize, value.ceil)
end
xml.Units(imperial ? 'IN' : 'CM')
end
end
def build_rate_request_types_node(xml, type = 'ACCOUNT')
xml.RateRequestTypes(type)
end
def build_tracking_request(tracking_number, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TrackRequest(xmlns: 'http://fedex.com/ws/track/v7') do
build_request_header(xml)
build_version_node(xml, 'trck', 7, 0, 0)
xml.SelectionDetails do
xml.PackageIdentifier do
xml.Type(PACKAGE_IDENTIFIER_TYPES[options[:package_identifier_type] || 'tracking_number'])
xml.Value(tracking_number)
end
xml.ShipDateRangeBegin(options[:ship_date_range_begin]) if options[:ship_date_range_begin]
xml.ShipDateRangeEnd(options[:ship_date_range_end]) if options[:ship_date_range_end]
xml.TrackingNumberUniqueIdentifier(options[:unique_identifier]) if options[:unique_identifier]
end
xml.ProcessingOptions('INCLUDE_DETAILED_SCANS')
end
end
xml_builder.to_xml
end
def build_request_header(xml)
xml.WebAuthenticationDetail do
xml.UserCredential do
xml.Key(@options[:key])
xml.Password(@options[:password])
end
end
xml.ClientDetail do
xml.AccountNumber(@options[:account])
xml.MeterNumber(@options[:login])
end
xml.TransactionDetail do
xml.CustomerTransactionId(@options[:transaction_id] || 'ActiveShipping') # TODO: Need to do something better with this...
end
end
def build_version_node(xml, service_id, major, intermediate, minor)
xml.Version do
xml.ServiceId(service_id)
xml.Major(major)
xml.Intermediate(intermediate)
xml.Minor(minor)
end
end
def build_location_node(xml, name, location)
xml.public_send(name) do
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential(true) unless location.commercial?
end
end
end
def parse_rate_response(origin, destination, packages, response, options)
xml = build_document(response, 'RateReply')
success = response_success?(xml)
message = response_message(xml)
if success
rate_estimates = xml.root.css('> RateReplyDetails').map do |rated_shipment|
service_code = rated_shipment.at('ServiceType').text
is_saturday_delivery = rated_shipment.at('AppliedOptions').try(:text) == 'SATURDAY_DELIVERY'
service_type = is_saturday_delivery ? "#{service_code}_SATURDAY_DELIVERY" : service_code
transit_time = rated_shipment.at('TransitTime').text if ["FEDEX_GROUND", "GROUND_HOME_DELIVERY"].include?(service_code)
max_transit_time = rated_shipment.at('MaximumTransitTime').try(:text) if service_code == "FEDEX_GROUND"
delivery_timestamp = rated_shipment.at('DeliveryTimestamp').try(:text)
delivery_range = delivery_range_from(transit_time, max_transit_time, delivery_timestamp, options)
currency = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Currency').text
RateEstimate.new(origin, destination, @@name,
self.class.service_name_for_code(service_type),
:service_code => service_code,
:total_price => rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Amount').text.to_f,
:currency => currency,
:packages => packages,
:delivery_range => delivery_range)
end
if rate_estimates.empty?
success = false
message = "No shipping rates could be found for the destination address" if message.blank?
end
else
rate_estimates = []
end
RateResponse.new(success, message, Hash.from_xml(response), :rates => rate_estimates, :xml => response, :request => last_request, :log_xml => options[:log_xml])
end
def delivery_range_from(transit_time, max_transit_time, delivery_timestamp, options)
delivery_range = [delivery_timestamp, delivery_timestamp]
# if there's no delivery timestamp but we do have a transit time, use it
if delivery_timestamp.blank? && transit_time.present?
transit_range = parse_transit_times([transit_time, max_transit_time.presence || transit_time])
delivery_range = transit_range.map { |days| business_days_from(ship_date(options[:turn_around_time]), days) }
end
delivery_range
end
def parse_ship_response(response)
xml = build_document(response, 'ProcessShipmentReply')
success = response_success?(xml)
message = response_message(xml)
response_info = Hash.from_xml(response)
tracking_number = xml.css("CompletedPackageDetails TrackingIds TrackingNumber").text
base_64_image = xml.css("Label Image").text
labels = [Label.new(tracking_number, Base64.decode64(base_64_image))]
LabelResponse.new(success, message, response_info, {labels: labels})
end
def business_days_from(date, days)
future_date = date
count = 0
while count < days
future_date += 1.day
count += 1 if business_day?(future_date)
end
future_date
end
def business_day?(date)
(1..5).include?(date.wday)
end
def parse_tracking_response(response, options)
xml = build_document(response, 'TrackReply')
success = response_success?(xml)
message = response_message(xml)
if success
origin = nil
delivery_signature = nil
shipment_events = []
all_tracking_details = xml.root.xpath('CompletedTrackDetails/TrackDetails')
tracking_details = case all_tracking_details.length
when 1
all_tracking_details.first
when 0
raise ActiveShipping::Error, "The response did not contain tracking details"
else
all_unique_identifiers = xml.root.xpath('CompletedTrackDetails/TrackDetails/TrackingNumberUniqueIdentifier').map(&:text)
raise ActiveShipping::Error, "Multiple matches were found. Specify a unqiue identifier: #{all_unique_identifiers.join(', ')}"
end
first_notification = tracking_details.at('Notification')
if first_notification.at('Severity').text == 'ERROR'
case first_notification.at('Code').text
when '9040'
raise ActiveShipping::ShipmentNotFound, first_notification.at('Message').text
else
raise ActiveShipping::ResponseContentError, StandardError.new(first_notification.at('Message').text)
end
end
tracking_number = tracking_details.at('TrackingNumber').text
status_detail = tracking_details.at('StatusDetail')
if status_detail.nil?
raise ActiveShipping::Error, "Tracking response does not contain status information"
end
status_code = status_detail.at('Code').try(:text)
if status_code.nil?
raise ActiveShipping::Error, "Tracking response does not contain status code"
end
status_description = (status_detail.at('AncillaryDetails/ReasonDescription') || status_detail.at('Description')).text
status = TRACKING_STATUS_CODES[status_code]
if status_code == 'DL' && tracking_details.at('AvailableImages').try(:text) == 'SIGNATURE_PROOF_OF_DELIVERY'
delivery_signature = tracking_details.at('DeliverySignatureName').text
end
if origin_node = tracking_details.at('OriginLocationAddress')
origin = Location.new(
:country => origin_node.at('CountryCode').text,
:province => origin_node.at('StateOrProvinceCode').text,
:city => origin_node.at('City').text
)
end
destination = extract_address(tracking_details, DELIVERY_ADDRESS_NODE_NAMES)
shipper_address = extract_address(tracking_details, SHIPPER_ADDRESS_NODE_NAMES)
ship_time = extract_timestamp(tracking_details, 'ShipTimestamp')
actual_delivery_time = extract_timestamp(tracking_details, 'ActualDeliveryTimestamp')
scheduled_delivery_time = extract_timestamp(tracking_details, 'EstimatedDeliveryTimestamp')
tracking_details.xpath('Events').each do |event|
address = event.at('Address')
next if address.nil? || address.at('CountryCode').nil?
city = address.at('City').try(:text)
state = address.at('StateOrProvinceCode').try(:text)
zip_code = address.at('PostalCode').try(:text)
country = address.at('CountryCode').try(:text)
location = Location.new(:city => city, :state => state, :postal_code => zip_code, :country => country)
description = event.at('EventDescription').text
time = Time.parse(event.at('Timestamp').text)
zoneless_time = time.utc
shipment_events << ShipmentEvent.new(description, zoneless_time, location)
end
shipment_events = shipment_events.sort_by(&:time)
end
TrackingResponse.new(success, message, Hash.from_xml(response),
:carrier => @@name,
:xml => response,
:request => last_request,
:status => status,
:status_code => status_code,
:status_description => status_description,
:ship_time => ship_time,
:scheduled_delivery_date => scheduled_delivery_time,
:actual_delivery_date => actual_delivery_time,
:delivery_signature => delivery_signature,
:shipment_events => shipment_events,
:shipper_address => (shipper_address.nil? || shipper_address.unknown?) ? nil : shipper_address,
:origin => origin,
:destination => destination,
:tracking_number => tracking_number
)
end
def ship_timestamp(delay_in_hours)
delay_in_hours ||= 0
Time.now + delay_in_hours.hours
end
def ship_date(delay_in_hours)
delay_in_hours ||= 0
(Time.now + delay_in_hours.hours).to_date
end
def response_success?(document)
highest_severity = document.root.at('HighestSeverity')
return false if highest_severity.nil?
%w(SUCCESS WARNING NOTE).include?(highest_severity.text)
end
def response_message(document)
notifications = document.root.at('Notifications')
return "" if notifications.nil?
"#{notifications.at('Severity').text} - #{notifications.at('Code').text}: #{notifications.at('Message').text}"
end
def commit(request, test = false)
ssl_post(test ? TEST_URL : LIVE_URL, request.gsub("\n", ''))
end
def parse_transit_times(times)
results = []
times.each do |day_count|
days = TRANSIT_TIMES.index(day_count.to_s.chomp)
results << days.to_i
end
results
end
def extract_address(document, possible_node_names)
node = nil
args = {}
possible_node_names.each do |name|
node = document.at(name)
break if node
end
if node
args[:country] =
node.at('CountryCode').try(:text) ||
ActiveUtils::Country.new(:alpha2 => 'ZZ', :name => 'Unknown or Invalid Territory', :alpha3 => 'ZZZ', :numeric => '999')
args[:province] = node.at('StateOrProvinceCode').try(:text) || 'unknown'
args[:city] = node.at('City').try(:text) || 'unknown'
end
Location.new(args)
end
def extract_timestamp(document, node_name)
if timestamp_node = document.at(node_name)
if timestamp_node.text =~ /\A(\d{4}-\d{2}-\d{2})T00:00:00\Z/
Date.parse($1)
else
Time.parse(timestamp_node.text)
end
end
end
def build_document(xml, expected_root_tag)
document = Nokogiri.XML(xml) { |config| config.strict }
document.remove_namespaces!
if document.root.nil? || document.root.name != expected_root_tag
raise ActiveShipping::ResponseContentError.new(StandardError.new('Invalid document'), xml)
end
document
rescue Nokogiri::XML::SyntaxError => e
raise ActiveShipping::ResponseContentError.new(e, xml)
end
def location_uses_imperial(location)
%w(US LR MM).include?(location.country_code(:alpha2))
end
end
end
Remove reference to undefined variable if using FedEx logger.
module ActiveShipping
# FedEx carrier implementation.
#
# FedEx module by Jimmy Baker (http://github.com/jimmyebaker)
# Documentation can be found here: http://images.fedex.com/us/developer/product/WebServices/MyWebHelp/PropDevGuide.pdf
class FedEx < Carrier
self.retry_safe = true
cattr_reader :name
@@name = "FedEx"
TEST_URL = 'https://gatewaybeta.fedex.com:443/xml'
LIVE_URL = 'https://gateway.fedex.com:443/xml'
CARRIER_CODES = {
"fedex_ground" => "FDXG",
"fedex_express" => "FDXE"
}
DELIVERY_ADDRESS_NODE_NAMES = %w(DestinationAddress ActualDeliveryAddress)
SHIPPER_ADDRESS_NODE_NAMES = %w(ShipperAddress)
SERVICE_TYPES = {
"PRIORITY_OVERNIGHT" => "FedEx Priority Overnight",
"PRIORITY_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx Priority Overnight Saturday Delivery",
"FEDEX_2_DAY" => "FedEx 2 Day",
"FEDEX_2_DAY_SATURDAY_DELIVERY" => "FedEx 2 Day Saturday Delivery",
"STANDARD_OVERNIGHT" => "FedEx Standard Overnight",
"FIRST_OVERNIGHT" => "FedEx First Overnight",
"FIRST_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx First Overnight Saturday Delivery",
"FEDEX_EXPRESS_SAVER" => "FedEx Express Saver",
"FEDEX_1_DAY_FREIGHT" => "FedEx 1 Day Freight",
"FEDEX_1_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 1 Day Freight Saturday Delivery",
"FEDEX_2_DAY_FREIGHT" => "FedEx 2 Day Freight",
"FEDEX_2_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 2 Day Freight Saturday Delivery",
"FEDEX_3_DAY_FREIGHT" => "FedEx 3 Day Freight",
"FEDEX_3_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 3 Day Freight Saturday Delivery",
"INTERNATIONAL_PRIORITY" => "FedEx International Priority",
"INTERNATIONAL_PRIORITY_SATURDAY_DELIVERY" => "FedEx International Priority Saturday Delivery",
"INTERNATIONAL_ECONOMY" => "FedEx International Economy",
"INTERNATIONAL_FIRST" => "FedEx International First",
"INTERNATIONAL_PRIORITY_FREIGHT" => "FedEx International Priority Freight",
"INTERNATIONAL_ECONOMY_FREIGHT" => "FedEx International Economy Freight",
"GROUND_HOME_DELIVERY" => "FedEx Ground Home Delivery",
"FEDEX_GROUND" => "FedEx Ground",
"INTERNATIONAL_GROUND" => "FedEx International Ground",
"SMART_POST" => "FedEx SmartPost",
"FEDEX_FREIGHT_PRIORITY" => "FedEx Freight Priority",
"FEDEX_FREIGHT_ECONOMY" => "FedEx Freight Economy"
}
PACKAGE_TYPES = {
"fedex_envelope" => "FEDEX_ENVELOPE",
"fedex_pak" => "FEDEX_PAK",
"fedex_box" => "FEDEX_BOX",
"fedex_tube" => "FEDEX_TUBE",
"fedex_10_kg_box" => "FEDEX_10KG_BOX",
"fedex_25_kg_box" => "FEDEX_25KG_BOX",
"your_packaging" => "YOUR_PACKAGING"
}
DROPOFF_TYPES = {
'regular_pickup' => 'REGULAR_PICKUP',
'request_courier' => 'REQUEST_COURIER',
'dropbox' => 'DROP_BOX',
'business_service_center' => 'BUSINESS_SERVICE_CENTER',
'station' => 'STATION'
}
SIGNATURE_OPTION_CODES = {
adult: 'ADULT', # 21 years plus
direct: 'DIRECT', # A person at the delivery address
indirect: 'INDIRECT', # A person at the delivery address, or a neighbor, or a signed note for fedex on the door
none_required: 'NO_SIGNATURE_REQUIRED',
default_for_service: 'SERVICE_DEFAULT'
}
PAYMENT_TYPES = {
'sender' => 'SENDER',
'recipient' => 'RECIPIENT',
'third_party' => 'THIRDPARTY',
'collect' => 'COLLECT'
}
PACKAGE_IDENTIFIER_TYPES = {
'tracking_number' => 'TRACKING_NUMBER_OR_DOORTAG',
'door_tag' => 'TRACKING_NUMBER_OR_DOORTAG',
'rma' => 'RMA',
'ground_shipment_id' => 'GROUND_SHIPMENT_ID',
'ground_invoice_number' => 'GROUND_INVOICE_NUMBER',
'ground_customer_reference' => 'GROUND_CUSTOMER_REFERENCE',
'ground_po' => 'GROUND_PO',
'express_reference' => 'EXPRESS_REFERENCE',
'express_mps_master' => 'EXPRESS_MPS_MASTER',
'shipper_reference' => 'SHIPPER_REFERENCE',
}
TRANSIT_TIMES = %w(UNKNOWN ONE_DAY TWO_DAYS THREE_DAYS FOUR_DAYS FIVE_DAYS SIX_DAYS SEVEN_DAYS EIGHT_DAYS NINE_DAYS TEN_DAYS ELEVEN_DAYS TWELVE_DAYS THIRTEEN_DAYS FOURTEEN_DAYS FIFTEEN_DAYS SIXTEEN_DAYS SEVENTEEN_DAYS EIGHTEEN_DAYS)
# FedEx tracking codes as described in the FedEx Tracking Service WSDL Guide
# All delays also have been marked as exceptions
TRACKING_STATUS_CODES = HashWithIndifferentAccess.new(
'AA' => :at_airport,
'AD' => :at_delivery,
'AF' => :at_fedex_facility,
'AR' => :at_fedex_facility,
'AP' => :at_pickup,
'CA' => :canceled,
'CH' => :location_changed,
'DE' => :exception,
'DL' => :delivered,
'DP' => :departed_fedex_location,
'DR' => :vehicle_furnished_not_used,
'DS' => :vehicle_dispatched,
'DY' => :exception,
'EA' => :exception,
'ED' => :enroute_to_delivery,
'EO' => :enroute_to_origin_airport,
'EP' => :enroute_to_pickup,
'FD' => :at_fedex_destination,
'HL' => :held_at_location,
'IT' => :in_transit,
'LO' => :left_origin,
'OC' => :order_created,
'OD' => :out_for_delivery,
'PF' => :plane_in_flight,
'PL' => :plane_landed,
'PU' => :picked_up,
'RS' => :return_to_shipper,
'SE' => :exception,
'SF' => :at_sort_facility,
'SP' => :split_status,
'TR' => :transfer
)
def self.service_name_for_code(service_code)
SERVICE_TYPES[service_code] || "FedEx #{service_code.titleize.sub(/Fedex /, '')}"
end
def requirements
[:key, :password, :account, :login]
end
def find_rates(origin, destination, packages, options = {})
options = @options.update(options)
packages = Array(packages)
rate_request = build_rate_request(origin, destination, packages, options)
xml = commit(save_request(rate_request), (options[:test] || false))
parse_rate_response(origin, destination, packages, xml, options)
end
def find_tracking_info(tracking_number, options = {})
options = @options.update(options)
tracking_request = build_tracking_request(tracking_number, options)
xml = commit(save_request(tracking_request), (options[:test] || false))
parse_tracking_response(xml, options)
end
# Get Shipping labels
def create_shipment(origin, destination, packages, options = {})
options = @options.update(options)
packages = Array(packages)
raise Error, "Multiple packages are not supported yet." if packages.length > 1
request = build_shipment_request(origin, destination, packages, options)
logger.debug(request) if logger
response = commit(save_request(request), (options[:test] || false))
parse_ship_response(response)
end
def maximum_address_field_length
# See Fedex Developper Guide
35
end
protected
def build_shipment_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ProcessShipmentRequest(xmlns: 'http://fedex.com/ws/ship/v13') do
build_request_header(xml)
build_version_node(xml, 'ship', 13, 0 ,0)
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
xml.DropoffType('REGULAR_PICKUP')
xml.ServiceType(options[:service_type] || 'FEDEX_GROUND')
xml.PackagingType('YOUR_PACKAGING')
xml.Shipper do
build_contact_address_nodes(xml, options[:shipper] || origin)
end
xml.Recipient do
build_contact_address_nodes(xml, destination)
end
xml.Origin do
build_contact_address_nodes(xml, origin)
end
xml.ShippingChargesPayment do
xml.PaymentType('SENDER')
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
xml.LabelSpecification do
xml.LabelFormatType('COMMON2D')
xml.ImageType('PNG')
xml.LabelStockType('PAPER_7X4.75')
end
xml.RateRequestTypes('ACCOUNT')
xml.PackageCount(packages.size)
packages.each do |package|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, package, imperial)
build_package_dimensions_node(xml, package, imperial)
# Reference Numbers
reference_numbers = Array(package.options[:reference_numbers])
if reference_numbers.size > 0
xml.CustomerReferences do
reference_numbers.each do |reference_number_info|
xml.CustomerReferenceType(reference_number_info[:type] || "CUSTOMER_REFERENCE")
xml.Value(reference_number_info[:value])
end
end
end
xml.SpecialServicesRequested do
xml.SpecialServiceTypes("SIGNATURE_OPTION")
xml.SignatureOptionDetail do
xml.OptionType(SIGNATURE_OPTION_CODES[package.options[:signature_option] || :default_for_service])
end
end
end
end
end
end
end
xml_builder.to_xml
end
def build_contact_address_nodes(xml, location)
xml.Contact do
xml.PersonName(location.name)
xml.CompanyName(location.company)
xml.PhoneNumber(location.phone)
end
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.StateOrProvinceCode(location.state)
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential('true') if location.residential?
end
end
def build_shipment_responsible_party_node(xml, origin)
xml.ResponsibleParty do
xml.AccountNumber(@options[:account])
xml.Contact do
xml.PersonName(origin.name)
xml.CompanyName(origin.company)
xml.PhoneNumber(origin.phone)
end
end
end
def build_rate_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.RateRequest(xmlns: 'http://fedex.com/ws/rate/v13') do
build_request_header(xml)
build_version_node(xml, 'crs', 13, 0 ,0)
# Returns delivery dates
xml.ReturnTransitAndCommit(true)
# Returns saturday delivery shipping options when available
xml.VariableOptions('SATURDAY_DELIVERY')
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
freight = has_freight?(options)
unless freight
# fedex api wants this up here otherwise request returns an error
xml.DropoffType(options[:dropoff_type] || 'REGULAR_PICKUP')
xml.PackagingType(options[:packaging_type] || 'YOUR_PACKAGING')
end
build_location_node(xml, 'Shipper', options[:shipper] || origin)
build_location_node(xml, 'Recipient', destination)
if options[:shipper] && options[:shipper] != origin
build_location_node(xml, 'Origin', origin)
end
if freight
freight_options = options[:freight]
build_shipping_charges_payment_node(xml, freight_options)
build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
build_rate_request_types_node(xml)
else
xml.SmartPostDetail do
xml.Indicia(options[:smart_post_indicia] || 'PARCEL_SELECT')
xml.HubId(options[:smart_post_hub_id] || 5902) # default to LA
end
build_rate_request_types_node(xml)
xml.PackageCount(packages.size)
build_packages_nodes(xml, packages, imperial)
end
end
end
end
xml_builder.to_xml
end
def build_packages_nodes(xml, packages, imperial)
packages.map do |pkg|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
def build_shipping_charges_payment_node(xml, freight_options)
xml.ShippingChargesPayment do
xml.PaymentType(freight_options[:payment_type])
xml.Payor do
xml.ResponsibleParty do
# TODO: case of different freight account numbers?
xml.AccountNumber(freight_options[:account])
end
end
end
end
def build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
xml.FreightShipmentDetail do
# TODO: case of different freight account numbers?
xml.FedExFreightAccountNumber(freight_options[:account])
build_location_node(xml, 'FedExFreightBillingContactAndAddress', freight_options[:billing_location])
xml.Role(freight_options[:role])
packages.each do |pkg|
xml.LineItems do
xml.FreightClass(freight_options[:freight_class])
xml.Packaging(freight_options[:packaging])
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
end
def has_freight?(options)
options[:freight] && options[:freight].present?
end
def build_package_weight_node(xml, pkg, imperial)
xml.Weight do
xml.Units(imperial ? 'LB' : 'KG')
xml.Value([((imperial ? pkg.lbs : pkg.kgs).to_f * 1000).round / 1000.0, 0.1].max)
end
end
def build_package_dimensions_node(xml, pkg, imperial)
xml.Dimensions do
[:length, :width, :height].each do |axis|
value = ((imperial ? pkg.inches(axis) : pkg.cm(axis)).to_f * 1000).round / 1000.0 # 3 decimals
xml.public_send(axis.to_s.capitalize, value.ceil)
end
xml.Units(imperial ? 'IN' : 'CM')
end
end
def build_rate_request_types_node(xml, type = 'ACCOUNT')
xml.RateRequestTypes(type)
end
def build_tracking_request(tracking_number, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TrackRequest(xmlns: 'http://fedex.com/ws/track/v7') do
build_request_header(xml)
build_version_node(xml, 'trck', 7, 0, 0)
xml.SelectionDetails do
xml.PackageIdentifier do
xml.Type(PACKAGE_IDENTIFIER_TYPES[options[:package_identifier_type] || 'tracking_number'])
xml.Value(tracking_number)
end
xml.ShipDateRangeBegin(options[:ship_date_range_begin]) if options[:ship_date_range_begin]
xml.ShipDateRangeEnd(options[:ship_date_range_end]) if options[:ship_date_range_end]
xml.TrackingNumberUniqueIdentifier(options[:unique_identifier]) if options[:unique_identifier]
end
xml.ProcessingOptions('INCLUDE_DETAILED_SCANS')
end
end
xml_builder.to_xml
end
def build_request_header(xml)
xml.WebAuthenticationDetail do
xml.UserCredential do
xml.Key(@options[:key])
xml.Password(@options[:password])
end
end
xml.ClientDetail do
xml.AccountNumber(@options[:account])
xml.MeterNumber(@options[:login])
end
xml.TransactionDetail do
xml.CustomerTransactionId(@options[:transaction_id] || 'ActiveShipping') # TODO: Need to do something better with this...
end
end
def build_version_node(xml, service_id, major, intermediate, minor)
xml.Version do
xml.ServiceId(service_id)
xml.Major(major)
xml.Intermediate(intermediate)
xml.Minor(minor)
end
end
def build_location_node(xml, name, location)
xml.public_send(name) do
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential(true) unless location.commercial?
end
end
end
def parse_rate_response(origin, destination, packages, response, options)
xml = build_document(response, 'RateReply')
success = response_success?(xml)
message = response_message(xml)
if success
rate_estimates = xml.root.css('> RateReplyDetails').map do |rated_shipment|
service_code = rated_shipment.at('ServiceType').text
is_saturday_delivery = rated_shipment.at('AppliedOptions').try(:text) == 'SATURDAY_DELIVERY'
service_type = is_saturday_delivery ? "#{service_code}_SATURDAY_DELIVERY" : service_code
transit_time = rated_shipment.at('TransitTime').text if ["FEDEX_GROUND", "GROUND_HOME_DELIVERY"].include?(service_code)
max_transit_time = rated_shipment.at('MaximumTransitTime').try(:text) if service_code == "FEDEX_GROUND"
delivery_timestamp = rated_shipment.at('DeliveryTimestamp').try(:text)
delivery_range = delivery_range_from(transit_time, max_transit_time, delivery_timestamp, options)
currency = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Currency').text
RateEstimate.new(origin, destination, @@name,
self.class.service_name_for_code(service_type),
:service_code => service_code,
:total_price => rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Amount').text.to_f,
:currency => currency,
:packages => packages,
:delivery_range => delivery_range)
end
if rate_estimates.empty?
success = false
message = "No shipping rates could be found for the destination address" if message.blank?
end
else
rate_estimates = []
end
RateResponse.new(success, message, Hash.from_xml(response), :rates => rate_estimates, :xml => response, :request => last_request, :log_xml => options[:log_xml])
end
def delivery_range_from(transit_time, max_transit_time, delivery_timestamp, options)
delivery_range = [delivery_timestamp, delivery_timestamp]
# if there's no delivery timestamp but we do have a transit time, use it
if delivery_timestamp.blank? && transit_time.present?
transit_range = parse_transit_times([transit_time, max_transit_time.presence || transit_time])
delivery_range = transit_range.map { |days| business_days_from(ship_date(options[:turn_around_time]), days) }
end
delivery_range
end
def parse_ship_response(response)
xml = build_document(response, 'ProcessShipmentReply')
success = response_success?(xml)
message = response_message(xml)
response_info = Hash.from_xml(response)
tracking_number = xml.css("CompletedPackageDetails TrackingIds TrackingNumber").text
base_64_image = xml.css("Label Image").text
labels = [Label.new(tracking_number, Base64.decode64(base_64_image))]
LabelResponse.new(success, message, response_info, {labels: labels})
end
def business_days_from(date, days)
future_date = date
count = 0
while count < days
future_date += 1.day
count += 1 if business_day?(future_date)
end
future_date
end
def business_day?(date)
(1..5).include?(date.wday)
end
def parse_tracking_response(response, options)
xml = build_document(response, 'TrackReply')
success = response_success?(xml)
message = response_message(xml)
if success
origin = nil
delivery_signature = nil
shipment_events = []
all_tracking_details = xml.root.xpath('CompletedTrackDetails/TrackDetails')
tracking_details = case all_tracking_details.length
when 1
all_tracking_details.first
when 0
raise ActiveShipping::Error, "The response did not contain tracking details"
else
all_unique_identifiers = xml.root.xpath('CompletedTrackDetails/TrackDetails/TrackingNumberUniqueIdentifier').map(&:text)
raise ActiveShipping::Error, "Multiple matches were found. Specify a unqiue identifier: #{all_unique_identifiers.join(', ')}"
end
first_notification = tracking_details.at('Notification')
if first_notification.at('Severity').text == 'ERROR'
case first_notification.at('Code').text
when '9040'
raise ActiveShipping::ShipmentNotFound, first_notification.at('Message').text
else
raise ActiveShipping::ResponseContentError, StandardError.new(first_notification.at('Message').text)
end
end
tracking_number = tracking_details.at('TrackingNumber').text
status_detail = tracking_details.at('StatusDetail')
if status_detail.nil?
raise ActiveShipping::Error, "Tracking response does not contain status information"
end
status_code = status_detail.at('Code').try(:text)
if status_code.nil?
raise ActiveShipping::Error, "Tracking response does not contain status code"
end
status_description = (status_detail.at('AncillaryDetails/ReasonDescription') || status_detail.at('Description')).text
status = TRACKING_STATUS_CODES[status_code]
if status_code == 'DL' && tracking_details.at('AvailableImages').try(:text) == 'SIGNATURE_PROOF_OF_DELIVERY'
delivery_signature = tracking_details.at('DeliverySignatureName').text
end
if origin_node = tracking_details.at('OriginLocationAddress')
origin = Location.new(
:country => origin_node.at('CountryCode').text,
:province => origin_node.at('StateOrProvinceCode').text,
:city => origin_node.at('City').text
)
end
destination = extract_address(tracking_details, DELIVERY_ADDRESS_NODE_NAMES)
shipper_address = extract_address(tracking_details, SHIPPER_ADDRESS_NODE_NAMES)
ship_time = extract_timestamp(tracking_details, 'ShipTimestamp')
actual_delivery_time = extract_timestamp(tracking_details, 'ActualDeliveryTimestamp')
scheduled_delivery_time = extract_timestamp(tracking_details, 'EstimatedDeliveryTimestamp')
tracking_details.xpath('Events').each do |event|
address = event.at('Address')
next if address.nil? || address.at('CountryCode').nil?
city = address.at('City').try(:text)
state = address.at('StateOrProvinceCode').try(:text)
zip_code = address.at('PostalCode').try(:text)
country = address.at('CountryCode').try(:text)
location = Location.new(:city => city, :state => state, :postal_code => zip_code, :country => country)
description = event.at('EventDescription').text
time = Time.parse(event.at('Timestamp').text)
zoneless_time = time.utc
shipment_events << ShipmentEvent.new(description, zoneless_time, location)
end
shipment_events = shipment_events.sort_by(&:time)
end
TrackingResponse.new(success, message, Hash.from_xml(response),
:carrier => @@name,
:xml => response,
:request => last_request,
:status => status,
:status_code => status_code,
:status_description => status_description,
:ship_time => ship_time,
:scheduled_delivery_date => scheduled_delivery_time,
:actual_delivery_date => actual_delivery_time,
:delivery_signature => delivery_signature,
:shipment_events => shipment_events,
:shipper_address => (shipper_address.nil? || shipper_address.unknown?) ? nil : shipper_address,
:origin => origin,
:destination => destination,
:tracking_number => tracking_number
)
end
def ship_timestamp(delay_in_hours)
delay_in_hours ||= 0
Time.now + delay_in_hours.hours
end
def ship_date(delay_in_hours)
delay_in_hours ||= 0
(Time.now + delay_in_hours.hours).to_date
end
def response_success?(document)
highest_severity = document.root.at('HighestSeverity')
return false if highest_severity.nil?
%w(SUCCESS WARNING NOTE).include?(highest_severity.text)
end
def response_message(document)
notifications = document.root.at('Notifications')
return "" if notifications.nil?
"#{notifications.at('Severity').text} - #{notifications.at('Code').text}: #{notifications.at('Message').text}"
end
def commit(request, test = false)
ssl_post(test ? TEST_URL : LIVE_URL, request.gsub("\n", ''))
end
def parse_transit_times(times)
results = []
times.each do |day_count|
days = TRANSIT_TIMES.index(day_count.to_s.chomp)
results << days.to_i
end
results
end
def extract_address(document, possible_node_names)
node = nil
args = {}
possible_node_names.each do |name|
node = document.at(name)
break if node
end
if node
args[:country] =
node.at('CountryCode').try(:text) ||
ActiveUtils::Country.new(:alpha2 => 'ZZ', :name => 'Unknown or Invalid Territory', :alpha3 => 'ZZZ', :numeric => '999')
args[:province] = node.at('StateOrProvinceCode').try(:text) || 'unknown'
args[:city] = node.at('City').try(:text) || 'unknown'
end
Location.new(args)
end
def extract_timestamp(document, node_name)
if timestamp_node = document.at(node_name)
if timestamp_node.text =~ /\A(\d{4}-\d{2}-\d{2})T00:00:00\Z/
Date.parse($1)
else
Time.parse(timestamp_node.text)
end
end
end
def build_document(xml, expected_root_tag)
document = Nokogiri.XML(xml) { |config| config.strict }
document.remove_namespaces!
if document.root.nil? || document.root.name != expected_root_tag
raise ActiveShipping::ResponseContentError.new(StandardError.new('Invalid document'), xml)
end
document
rescue Nokogiri::XML::SyntaxError => e
raise ActiveShipping::ResponseContentError.new(e, xml)
end
def location_uses_imperial(location)
%w(US LR MM).include?(location.country_code(:alpha2))
end
end
end |
module ActiveShipping
# FedEx carrier implementation.
#
# FedEx module by Jimmy Baker (http://github.com/jimmyebaker)
# Documentation can be found here: http://images.fedex.com/us/developer/product/WebServices/MyWebHelp/PropDevGuide.pdf
class FedEx < Carrier
self.retry_safe = true
cattr_reader :name
@@name = "FedEx"
TEST_URL = 'https://gatewaybeta.fedex.com:443/xml'
LIVE_URL = 'https://gateway.fedex.com:443/xml'
CARRIER_CODES = {
"fedex_ground" => "FDXG",
"fedex_express" => "FDXE"
}
DELIVERY_ADDRESS_NODE_NAMES = %w(DestinationAddress ActualDeliveryAddress)
SHIPPER_ADDRESS_NODE_NAMES = %w(ShipperAddress)
SERVICE_TYPES = {
"PRIORITY_OVERNIGHT" => "FedEx Priority Overnight",
"PRIORITY_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx Priority Overnight Saturday Delivery",
"FEDEX_2_DAY" => "FedEx 2 Day",
"FEDEX_2_DAY_SATURDAY_DELIVERY" => "FedEx 2 Day Saturday Delivery",
"STANDARD_OVERNIGHT" => "FedEx Standard Overnight",
"FIRST_OVERNIGHT" => "FedEx First Overnight",
"FIRST_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx First Overnight Saturday Delivery",
"FEDEX_EXPRESS_SAVER" => "FedEx Express Saver",
"FEDEX_1_DAY_FREIGHT" => "FedEx 1 Day Freight",
"FEDEX_1_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 1 Day Freight Saturday Delivery",
"FEDEX_2_DAY_FREIGHT" => "FedEx 2 Day Freight",
"FEDEX_2_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 2 Day Freight Saturday Delivery",
"FEDEX_3_DAY_FREIGHT" => "FedEx 3 Day Freight",
"FEDEX_3_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 3 Day Freight Saturday Delivery",
"INTERNATIONAL_PRIORITY" => "FedEx International Priority",
"INTERNATIONAL_PRIORITY_SATURDAY_DELIVERY" => "FedEx International Priority Saturday Delivery",
"INTERNATIONAL_ECONOMY" => "FedEx International Economy",
"INTERNATIONAL_FIRST" => "FedEx International First",
"INTERNATIONAL_PRIORITY_FREIGHT" => "FedEx International Priority Freight",
"INTERNATIONAL_ECONOMY_FREIGHT" => "FedEx International Economy Freight",
"GROUND_HOME_DELIVERY" => "FedEx Ground Home Delivery",
"FEDEX_GROUND" => "FedEx Ground",
"INTERNATIONAL_GROUND" => "FedEx International Ground",
"SMART_POST" => "FedEx SmartPost",
"FEDEX_FREIGHT_PRIORITY" => "FedEx Freight Priority",
"FEDEX_FREIGHT_ECONOMY" => "FedEx Freight Economy"
}
PACKAGE_TYPES = {
"fedex_envelope" => "FEDEX_ENVELOPE",
"fedex_pak" => "FEDEX_PAK",
"fedex_box" => "FEDEX_BOX",
"fedex_tube" => "FEDEX_TUBE",
"fedex_10_kg_box" => "FEDEX_10KG_BOX",
"fedex_25_kg_box" => "FEDEX_25KG_BOX",
"your_packaging" => "YOUR_PACKAGING"
}
DROPOFF_TYPES = {
'regular_pickup' => 'REGULAR_PICKUP',
'request_courier' => 'REQUEST_COURIER',
'dropbox' => 'DROP_BOX',
'business_service_center' => 'BUSINESS_SERVICE_CENTER',
'station' => 'STATION'
}
SIGNATURE_OPTION_CODES = {
adult: 'ADULT', # 21 years plus
direct: 'DIRECT', # A person at the delivery address
indirect: 'INDIRECT', # A person at the delivery address, or a neighbor, or a signed note for fedex on the door
none_required: 'NO_SIGNATURE_REQUIRED',
default_for_service: 'SERVICE_DEFAULT'
}
PAYMENT_TYPES = {
'sender' => 'SENDER',
'recipient' => 'RECIPIENT',
'third_party' => 'THIRDPARTY',
'collect' => 'COLLECT'
}
PACKAGE_IDENTIFIER_TYPES = {
'tracking_number' => 'TRACKING_NUMBER_OR_DOORTAG',
'door_tag' => 'TRACKING_NUMBER_OR_DOORTAG',
'rma' => 'RMA',
'ground_shipment_id' => 'GROUND_SHIPMENT_ID',
'ground_invoice_number' => 'GROUND_INVOICE_NUMBER',
'ground_customer_reference' => 'GROUND_CUSTOMER_REFERENCE',
'ground_po' => 'GROUND_PO',
'express_reference' => 'EXPRESS_REFERENCE',
'express_mps_master' => 'EXPRESS_MPS_MASTER',
'shipper_reference' => 'SHIPPER_REFERENCE',
}
TRANSIT_TIMES = %w(UNKNOWN ONE_DAY TWO_DAYS THREE_DAYS FOUR_DAYS FIVE_DAYS SIX_DAYS SEVEN_DAYS EIGHT_DAYS NINE_DAYS TEN_DAYS ELEVEN_DAYS TWELVE_DAYS THIRTEEN_DAYS FOURTEEN_DAYS FIFTEEN_DAYS SIXTEEN_DAYS SEVENTEEN_DAYS EIGHTEEN_DAYS)
# FedEx tracking codes as described in the FedEx Tracking Service WSDL Guide
# All delays also have been marked as exceptions
TRACKING_STATUS_CODES = HashWithIndifferentAccess.new(
'AA' => :at_airport,
'AD' => :at_delivery,
'AF' => :at_fedex_facility,
'AR' => :at_fedex_facility,
'AP' => :at_pickup,
'CA' => :canceled,
'CH' => :location_changed,
'DE' => :exception,
'DL' => :delivered,
'DP' => :departed_fedex_location,
'DR' => :vehicle_furnished_not_used,
'DS' => :vehicle_dispatched,
'DY' => :exception,
'EA' => :exception,
'ED' => :enroute_to_delivery,
'EO' => :enroute_to_origin_airport,
'EP' => :enroute_to_pickup,
'FD' => :at_fedex_destination,
'HL' => :held_at_location,
'IT' => :in_transit,
'LO' => :left_origin,
'OC' => :order_created,
'OD' => :out_for_delivery,
'PF' => :plane_in_flight,
'PL' => :plane_landed,
'PU' => :picked_up,
'RS' => :return_to_shipper,
'SE' => :exception,
'SF' => :at_sort_facility,
'SP' => :split_status,
'TR' => :transfer
)
DEFAULT_LABEL_STOCK_TYPE = 'PAPER_7X4.75'
# Available return formats for image data when creating labels
LABEL_FORMATS = %w(DPL EPL2 PDF ZPLII PNG).freeze
TRANSIENT_TRACK_RESPONSE_CODES = %w(9035 9040 9041 9045 9050 9055 9060 9065 9070 9075 9085 9086 9090).freeze
UNRECOVERABLE_TRACK_RESPONSE_CODES = %w(9080 9081 9082 9095 9100).freeze
def self.service_name_for_code(service_code)
SERVICE_TYPES[service_code] || "FedEx #{service_code.titleize.sub(/Fedex /, '')}"
end
def requirements
[:key, :password, :account, :login]
end
def find_rates(origin, destination, packages, options = {})
options = @options.merge(options)
packages = Array(packages)
rate_request = build_rate_request(origin, destination, packages, options)
xml = commit(save_request(rate_request), (options[:test] || false))
parse_rate_response(origin, destination, packages, xml, options)
end
def find_tracking_info(tracking_number, options = {})
options = @options.merge(options)
tracking_request = build_tracking_request(tracking_number, options)
xml = commit(save_request(tracking_request), (options[:test] || false))
parse_tracking_response(xml, options)
end
# Get Shipping labels
def create_shipment(origin, destination, packages, options = {})
options = @options.merge(options)
packages = Array(packages)
raise Error, "Multiple packages are not supported yet." if packages.length > 1
request = build_shipment_request(origin, destination, packages, options)
logger.debug(request) if logger
response = commit(save_request(request), (options[:test] || false))
parse_ship_response(response)
end
def maximum_address_field_length
# See Fedex Developper Guide
35
end
protected
def build_shipment_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ProcessShipmentRequest(xmlns: 'http://fedex.com/ws/ship/v13') do
build_request_header(xml)
build_version_node(xml, 'ship', 13, 0 ,0)
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
xml.DropoffType('REGULAR_PICKUP')
xml.ServiceType(options[:service_type] || 'FEDEX_GROUND')
xml.PackagingType('YOUR_PACKAGING')
xml.Shipper do
build_contact_address_nodes(xml, options[:shipper] || origin)
end
xml.Recipient do
build_contact_address_nodes(xml, destination)
end
xml.Origin do
build_contact_address_nodes(xml, origin)
end
xml.ShippingChargesPayment do
xml.PaymentType('SENDER')
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
# international shipping
if origin.country_code != destination.country_code
xml.CustomsClearanceDetail do
xml.DutiesPayment do
xml.PaymentType('SENDER') # "RECIPIENT"# PaymentType.SENDER;
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
xml.DocumentContent "NON_DOCUMENTS" # InternationalDocumentContentType.NON_DOCUMENTS;
packages_value = packages.sum &:value
packages_value = (packages_value > 0) ? packages_value : 1.0
xml.CustomsValue do
xml.Currency "ILS"
xml.Amount packages_value
end
xml.Commodities do
xml.NumberOfPieces packages.size
descriptions = packages.map(&:options).map{|a| a[:description]}.join(",") || "Generic Goods"
xml.Description descriptions
xml.CountryOfManufacture(origin.country_code)
xml.Weight do
xml.Units 'KG'
xml.Value packages.sum(&:kgs)
end
xml.Quantity packages.size
xml.QuantityUnits "Unit(s)"
xml.UnitPrice do
xml.Currency "ILS"
xml.Amount "1.0"
end
end
end
end
xml.LabelSpecification do
xml.LabelFormatType('COMMON2D')
xml.ImageType(options[:label_format] || 'PNG')
xml.LabelStockType(options[:label_stock_type] || DEFAULT_LABEL_STOCK_TYPE)
end
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
xml.PackageCount(packages.size)
packages.each do |package|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, package, imperial)
build_package_dimensions_node(xml, package, imperial)
# Reference Numbers
reference_numbers = Array(package.options[:reference_numbers])
if reference_numbers.size > 0
reference_numbers.each do |reference_number_info|
xml.CustomerReferences do
xml.CustomerReferenceType(reference_number_info[:type] || "CUSTOMER_REFERENCE")
xml.Value(reference_number_info[:value])
end
end
end
xml.SpecialServicesRequested do
xml.SpecialServiceTypes("SIGNATURE_OPTION")
xml.SignatureOptionDetail do
xml.OptionType(SIGNATURE_OPTION_CODES[package.options[:signature_option] || :default_for_service])
end
end
end
end
end
end
end
xml_builder.to_xml
end
def build_contact_address_nodes(xml, location)
xml.Contact do
xml.PersonName(location.name)
xml.CompanyName(location.company)
xml.PhoneNumber(location.phone)
end
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.StateOrProvinceCode(location.state)
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential('true') if location.residential?
end
end
def build_shipment_responsible_party_node(xml, origin)
xml.ResponsibleParty do
xml.AccountNumber(@options[:account])
xml.Contact do
xml.PersonName(origin.name)
xml.CompanyName(origin.company)
xml.PhoneNumber(origin.phone)
end
end
end
def build_rate_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.RateRequest(xmlns: 'http://fedex.com/ws/rate/v13') do
build_request_header(xml)
build_version_node(xml, 'crs', 13, 0 ,0)
# Returns delivery dates
xml.ReturnTransitAndCommit(true)
# Returns saturday delivery shipping options when available
xml.VariableOptions('SATURDAY_DELIVERY') if options[:saturday_delivery]
xml.RequestedShipment do
if options[:pickup_date]
xml.ShipTimestamp(options[:pickup_date].to_time.iso8601(0))
else
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
end
freight = has_freight?(options)
unless freight
# fedex api wants this up here otherwise request returns an error
xml.DropoffType(options[:dropoff_type] || 'REGULAR_PICKUP')
xml.PackagingType(options[:packaging_type] || 'YOUR_PACKAGING')
end
xml.PreferredCurrency(options[:currency]) if options[:currency]
build_location_node(xml, 'Shipper', options[:shipper] || origin)
build_location_node(xml, 'Recipient', destination)
if options[:shipper] && options[:shipper] != origin
build_location_node(xml, 'Origin', origin)
end
if freight
freight_options = options[:freight]
build_shipping_charges_payment_node(xml, freight_options)
build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
else
xml.SmartPostDetail do
xml.Indicia(options[:smart_post_indicia] || 'PARCEL_SELECT')
xml.HubId(options[:smart_post_hub_id] || 5902) # default to LA
end
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
xml.PackageCount(packages.size)
build_packages_nodes(xml, packages, imperial)
end
end
end
end
xml_builder.to_xml
end
def build_packages_nodes(xml, packages, imperial)
packages.map do |pkg|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
def build_shipping_charges_payment_node(xml, freight_options)
xml.ShippingChargesPayment do
xml.PaymentType(freight_options[:payment_type])
xml.Payor do
xml.ResponsibleParty do
# TODO: case of different freight account numbers?
xml.AccountNumber(freight_options[:account])
end
end
end
end
def build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
xml.FreightShipmentDetail do
# TODO: case of different freight account numbers?
xml.FedExFreightAccountNumber(freight_options[:account])
build_location_node(xml, 'FedExFreightBillingContactAndAddress', freight_options[:billing_location])
xml.Role(freight_options[:role])
packages.each do |pkg|
xml.LineItems do
xml.FreightClass(freight_options[:freight_class])
xml.Packaging(freight_options[:packaging])
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
end
def has_freight?(options)
options[:freight] && options[:freight].present?
end
def build_package_weight_node(xml, pkg, imperial)
xml.Weight do
xml.Units(imperial ? 'LB' : 'KG')
xml.Value([((imperial ? pkg.lbs : pkg.kgs).to_f * 1000).round / 1000.0, 0.1].max)
end
end
def build_package_dimensions_node(xml, pkg, imperial)
xml.Dimensions do
[:length, :width, :height].each do |axis|
value = ((imperial ? pkg.inches(axis) : pkg.cm(axis)).to_f * 1000).round / 1000.0 # 3 decimals
xml.public_send(axis.to_s.capitalize, value.ceil)
end
xml.Units(imperial ? 'IN' : 'CM')
end
end
def build_rate_request_types_node(xml, type = 'ACCOUNT')
xml.RateRequestTypes(type)
end
def build_tracking_request(tracking_number, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TrackRequest(xmlns: 'http://fedex.com/ws/track/v7') do
build_request_header(xml)
build_version_node(xml, 'trck', 7, 0, 0)
xml.SelectionDetails do
xml.PackageIdentifier do
xml.Type(PACKAGE_IDENTIFIER_TYPES[options[:package_identifier_type] || 'tracking_number'])
xml.Value(tracking_number)
end
xml.ShipDateRangeBegin(options[:ship_date_range_begin]) if options[:ship_date_range_begin]
xml.ShipDateRangeEnd(options[:ship_date_range_end]) if options[:ship_date_range_end]
xml.TrackingNumberUniqueIdentifier(options[:unique_identifier]) if options[:unique_identifier]
end
xml.ProcessingOptions('INCLUDE_DETAILED_SCANS')
end
end
xml_builder.to_xml
end
def build_request_header(xml)
xml.WebAuthenticationDetail do
xml.UserCredential do
xml.Key(@options[:key])
xml.Password(@options[:password])
end
end
xml.ClientDetail do
xml.AccountNumber(@options[:account])
xml.MeterNumber(@options[:login])
end
xml.TransactionDetail do
xml.CustomerTransactionId(@options[:transaction_id] || 'ActiveShipping') # TODO: Need to do something better with this...
end
end
def build_version_node(xml, service_id, major, intermediate, minor)
xml.Version do
xml.ServiceId(service_id)
xml.Major(major)
xml.Intermediate(intermediate)
xml.Minor(minor)
end
end
def build_location_node(xml, name, location)
xml.public_send(name) do
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential(true) unless location.commercial?
end
end
end
def parse_rate_response(origin, destination, packages, response, options)
xml = build_document(response, 'RateReply')
success = response_success?(xml)
message = response_message(xml)
if success
missing_xml_field = false
rate_estimates = xml.root.css('> RateReplyDetails').map do |rated_shipment|
begin
service_code = rated_shipment.at('ServiceType').text
is_saturday_delivery = rated_shipment.at('AppliedOptions').try(:text) == 'SATURDAY_DELIVERY'
service_type = is_saturday_delivery ? "#{service_code}_SATURDAY_DELIVERY" : service_code
transit_time = rated_shipment.at('TransitTime').text if ["FEDEX_GROUND", "GROUND_HOME_DELIVERY"].include?(service_code)
max_transit_time = rated_shipment.at('MaximumTransitTime').try(:text) if service_code == "FEDEX_GROUND"
delivery_timestamp = rated_shipment.at('DeliveryTimestamp').try(:text)
delivery_range = delivery_range_from(transit_time, max_transit_time, delivery_timestamp, (service_code == "GROUND_HOME_DELIVERY"), options)
if options[:currency]
preferred_rate = rated_shipment.at("RatedShipmentDetails/ShipmentRateDetail/RateType:contains('PREFERRED_ACCOUNT_SHIPMENT')").parent
total_price = preferred_rate.at("TotalNetCharge/Amount").text.to_f
currency = preferred_rate.at("TotalNetCharge/Currency").text
else
total_price = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Amount').text.to_f
currency = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Currency').text
end
RateEstimate.new(origin, destination, @@name,
self.class.service_name_for_code(service_type),
:service_code => service_code,
:total_price => total_price,
:currency => currency,
:packages => packages,
:delivery_range => delivery_range)
rescue NoMethodError
missing_xml_field = true
nil
end
end
rate_estimates = rate_estimates.compact
logger.warn("[FedexParseRateError] Some fields where missing in the response: #{response}") if logger && missing_xml_field
if rate_estimates.empty?
success = false
if missing_xml_field
message = "The response from the carrier contained errors and could not be treated"
else
message = "No shipping rates could be found for the destination address" if message.blank?
end
end
else
rate_estimates = []
end
RateResponse.new(success, message, Hash.from_xml(response), :rates => rate_estimates, :xml => response, :request => last_request, :log_xml => options[:log_xml])
end
def delivery_range_from(transit_time, max_transit_time, delivery_timestamp, is_home_delivery, options)
delivery_range = [delivery_timestamp, delivery_timestamp]
# if there's no delivery timestamp but we do have a transit time, use it
if delivery_timestamp.blank? && transit_time.present?
transit_range = parse_transit_times([transit_time, max_transit_time.presence || transit_time])
pickup_date = options[:pickup_date] || ship_date(options[:turn_around_time])
delivery_range = transit_range.map { |days| business_days_from(pickup_date, days, is_home_delivery) }
end
delivery_range
end
def parse_ship_response(response)
xml = build_document(response, 'ProcessShipmentReply')
success = response_success?(xml)
message = response_message(xml)
response_info = Hash.from_xml(response)
tracking_number = xml.css("CompletedPackageDetails TrackingIds TrackingNumber").text
base_64_image = xml.css("Label Image").text
labels = [Label.new(tracking_number, Base64.decode64(base_64_image))]
LabelResponse.new(success, message, response_info, {labels: labels})
end
def business_days_from(date, days, is_home_delivery=false)
future_date = date
count = 0
while count < days
future_date += 1.day
if is_home_delivery
count += 1 if home_delivery_business_day?(future_date)
else
count += 1 if business_day?(future_date)
end
end
future_date
end
#Transit times for FedEx® Ground do not include Saturdays, Sundays, or holidays.
def business_day?(date)
(1..5).include?(date.wday)
end
#Transit times for FedEx® Home Delivery, do not include Sundays, Mondays, or holidays.
def home_delivery_business_day?(date)
(2..6).include?(date.wday)
end
def parse_tracking_response(response, options)
xml = build_document(response, 'TrackReply')
success = response_success?(xml)
message = response_message(xml)
if success
tracking_details_root = xml.at('CompletedTrackDetails')
success = response_success?(tracking_details_root)
message = response_message(tracking_details_root)
end
if success
delivery_signature = nil
shipment_events = []
all_tracking_details = xml.root.xpath('CompletedTrackDetails/TrackDetails')
tracking_details = case all_tracking_details.length
when 1
all_tracking_details.first
when 0
message = "The response did not contain tracking details"
return TrackingResponse.new(
false,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request
)
else
all_unique_identifiers = xml.root.xpath('CompletedTrackDetails/TrackDetails/TrackingNumberUniqueIdentifier').map(&:text)
message = "Multiple matches were found. Specify a unqiue identifier: #{all_unique_identifiers.join(', ')}"
return TrackingResponse.new(
false,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request
)
end
first_notification = tracking_details.at('Notification')
severity = first_notification.at('Severity').text
if severity == 'ERROR' || severity == 'FAILURE'
message = first_notification.try(:text)
code = first_notification.at('Code').try(:text)
case code
when *TRANSIENT_TRACK_RESPONSE_CODES
raise ActiveShipping::ShipmentNotFound, first_notification.at('Message').text
else
raise ActiveShipping::ResponseContentError, StandardError.new(first_notification.at('Message').text)
end
end
tracking_number = tracking_details.at('TrackingNumber').text
status_detail = tracking_details.at('StatusDetail')
if status_detail.blank?
status_code, status, status_description, delivery_signature = nil
else
status_code = status_detail.at('Code').try(:text)
status_description = status_detail.at('AncillaryDetails/ReasonDescription').try(:text) || status_detail.at('Description').try(:text)
status = TRACKING_STATUS_CODES[status_code]
if status_code == 'DL' && tracking_details.at('AvailableImages').try(:text) == 'SIGNATURE_PROOF_OF_DELIVERY'
delivery_signature = tracking_details.at('DeliverySignatureName').try(:text)
end
end
origin = if origin_node = tracking_details.at('OriginLocationAddress')
Location.new(
country: origin_node.at('CountryCode').text,
province: origin_node.at('StateOrProvinceCode').text,
city: origin_node.at('City').text
)
end
destination = extract_address(tracking_details, DELIVERY_ADDRESS_NODE_NAMES)
shipper_address = extract_address(tracking_details, SHIPPER_ADDRESS_NODE_NAMES)
ship_time = extract_timestamp(tracking_details, 'ShipTimestamp')
actual_delivery_time = extract_timestamp(tracking_details, 'ActualDeliveryTimestamp')
scheduled_delivery_time = extract_timestamp(tracking_details, 'EstimatedDeliveryTimestamp')
tracking_details.xpath('Events').each do |event|
address = event.at('Address')
next if address.nil? || address.at('CountryCode').nil?
city = address.at('City').try(:text)
state = address.at('StateOrProvinceCode').try(:text)
zip_code = address.at('PostalCode').try(:text)
country = address.at('CountryCode').try(:text)
location = Location.new(:city => city, :state => state, :postal_code => zip_code, :country => country)
description = event.at('EventDescription').text
type_code = event.at('EventType').text
time = Time.parse(event.at('Timestamp').text)
zoneless_time = time.utc
shipment_events << ShipmentEvent.new(description, zoneless_time, location, description, type_code)
end
shipment_events = shipment_events.sort_by(&:time)
end
TrackingResponse.new(
success,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request,
status: status,
status_code: status_code,
status_description: status_description,
ship_time: ship_time,
scheduled_delivery_date: scheduled_delivery_time,
actual_delivery_date: actual_delivery_time,
delivery_signature: delivery_signature,
shipment_events: shipment_events,
shipper_address: (shipper_address.nil? || shipper_address.unknown?) ? nil : shipper_address,
origin: origin,
destination: destination,
tracking_number: tracking_number
)
end
def ship_timestamp(delay_in_hours)
delay_in_hours ||= 0
Time.now + delay_in_hours.hours
end
def ship_date(delay_in_hours)
delay_in_hours ||= 0
(Time.now + delay_in_hours.hours).to_date
end
def response_success?(document)
highest_severity = document.at('HighestSeverity')
return false if highest_severity.nil?
%w(SUCCESS WARNING NOTE).include?(highest_severity.text)
end
def response_message(document)
notifications = document.at('Notifications')
return "" if notifications.nil?
"#{notifications.at('Severity').text} - #{notifications.at('Code').text}: #{notifications.at('Message').text}"
end
def commit(request, test = false)
ssl_post(test ? TEST_URL : LIVE_URL, request.gsub("\n", ''))
end
def parse_transit_times(times)
results = []
times.each do |day_count|
days = TRANSIT_TIMES.index(day_count.to_s.chomp)
results << days.to_i
end
results
end
def extract_address(document, possible_node_names)
node = nil
args = {}
possible_node_names.each do |name|
node = document.at(name)
break if node
end
if node
args[:country] =
node.at('CountryCode').try(:text) ||
ActiveUtils::Country.new(:alpha2 => 'ZZ', :name => 'Unknown or Invalid Territory', :alpha3 => 'ZZZ', :numeric => '999')
args[:province] = node.at('StateOrProvinceCode').try(:text) || 'unknown'
args[:city] = node.at('City').try(:text) || 'unknown'
end
Location.new(args)
end
def extract_timestamp(document, node_name)
if timestamp_node = document.at(node_name)
if timestamp_node.text =~ /\A(\d{4}-\d{2}-\d{2})T00:00:00\Z/
Date.parse($1)
else
Time.parse(timestamp_node.text)
end
end
end
def build_document(xml, expected_root_tag)
document = Nokogiri.XML(xml) { |config| config.strict }
document.remove_namespaces!
if document.root.nil? || document.root.name != expected_root_tag
raise ActiveShipping::ResponseContentError.new(StandardError.new('Invalid document'), xml)
end
document
rescue Nokogiri::XML::SyntaxError => e
raise ActiveShipping::ResponseContentError.new(e, xml)
end
def location_uses_imperial(location)
%w(US LR MM).include?(location.country_code(:alpha2))
end
end
end
fedex preferred currency
module ActiveShipping
# FedEx carrier implementation.
#
# FedEx module by Jimmy Baker (http://github.com/jimmyebaker)
# Documentation can be found here: http://images.fedex.com/us/developer/product/WebServices/MyWebHelp/PropDevGuide.pdf
class FedEx < Carrier
self.retry_safe = true
cattr_reader :name
@@name = "FedEx"
TEST_URL = 'https://gatewaybeta.fedex.com:443/xml'
LIVE_URL = 'https://gateway.fedex.com:443/xml'
CARRIER_CODES = {
"fedex_ground" => "FDXG",
"fedex_express" => "FDXE"
}
DELIVERY_ADDRESS_NODE_NAMES = %w(DestinationAddress ActualDeliveryAddress)
SHIPPER_ADDRESS_NODE_NAMES = %w(ShipperAddress)
SERVICE_TYPES = {
"PRIORITY_OVERNIGHT" => "FedEx Priority Overnight",
"PRIORITY_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx Priority Overnight Saturday Delivery",
"FEDEX_2_DAY" => "FedEx 2 Day",
"FEDEX_2_DAY_SATURDAY_DELIVERY" => "FedEx 2 Day Saturday Delivery",
"STANDARD_OVERNIGHT" => "FedEx Standard Overnight",
"FIRST_OVERNIGHT" => "FedEx First Overnight",
"FIRST_OVERNIGHT_SATURDAY_DELIVERY" => "FedEx First Overnight Saturday Delivery",
"FEDEX_EXPRESS_SAVER" => "FedEx Express Saver",
"FEDEX_1_DAY_FREIGHT" => "FedEx 1 Day Freight",
"FEDEX_1_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 1 Day Freight Saturday Delivery",
"FEDEX_2_DAY_FREIGHT" => "FedEx 2 Day Freight",
"FEDEX_2_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 2 Day Freight Saturday Delivery",
"FEDEX_3_DAY_FREIGHT" => "FedEx 3 Day Freight",
"FEDEX_3_DAY_FREIGHT_SATURDAY_DELIVERY" => "FedEx 3 Day Freight Saturday Delivery",
"INTERNATIONAL_PRIORITY" => "FedEx International Priority",
"INTERNATIONAL_PRIORITY_SATURDAY_DELIVERY" => "FedEx International Priority Saturday Delivery",
"INTERNATIONAL_ECONOMY" => "FedEx International Economy",
"INTERNATIONAL_FIRST" => "FedEx International First",
"INTERNATIONAL_PRIORITY_FREIGHT" => "FedEx International Priority Freight",
"INTERNATIONAL_ECONOMY_FREIGHT" => "FedEx International Economy Freight",
"GROUND_HOME_DELIVERY" => "FedEx Ground Home Delivery",
"FEDEX_GROUND" => "FedEx Ground",
"INTERNATIONAL_GROUND" => "FedEx International Ground",
"SMART_POST" => "FedEx SmartPost",
"FEDEX_FREIGHT_PRIORITY" => "FedEx Freight Priority",
"FEDEX_FREIGHT_ECONOMY" => "FedEx Freight Economy"
}
PACKAGE_TYPES = {
"fedex_envelope" => "FEDEX_ENVELOPE",
"fedex_pak" => "FEDEX_PAK",
"fedex_box" => "FEDEX_BOX",
"fedex_tube" => "FEDEX_TUBE",
"fedex_10_kg_box" => "FEDEX_10KG_BOX",
"fedex_25_kg_box" => "FEDEX_25KG_BOX",
"your_packaging" => "YOUR_PACKAGING"
}
DROPOFF_TYPES = {
'regular_pickup' => 'REGULAR_PICKUP',
'request_courier' => 'REQUEST_COURIER',
'dropbox' => 'DROP_BOX',
'business_service_center' => 'BUSINESS_SERVICE_CENTER',
'station' => 'STATION'
}
SIGNATURE_OPTION_CODES = {
adult: 'ADULT', # 21 years plus
direct: 'DIRECT', # A person at the delivery address
indirect: 'INDIRECT', # A person at the delivery address, or a neighbor, or a signed note for fedex on the door
none_required: 'NO_SIGNATURE_REQUIRED',
default_for_service: 'SERVICE_DEFAULT'
}
PAYMENT_TYPES = {
'sender' => 'SENDER',
'recipient' => 'RECIPIENT',
'third_party' => 'THIRDPARTY',
'collect' => 'COLLECT'
}
PACKAGE_IDENTIFIER_TYPES = {
'tracking_number' => 'TRACKING_NUMBER_OR_DOORTAG',
'door_tag' => 'TRACKING_NUMBER_OR_DOORTAG',
'rma' => 'RMA',
'ground_shipment_id' => 'GROUND_SHIPMENT_ID',
'ground_invoice_number' => 'GROUND_INVOICE_NUMBER',
'ground_customer_reference' => 'GROUND_CUSTOMER_REFERENCE',
'ground_po' => 'GROUND_PO',
'express_reference' => 'EXPRESS_REFERENCE',
'express_mps_master' => 'EXPRESS_MPS_MASTER',
'shipper_reference' => 'SHIPPER_REFERENCE',
}
TRANSIT_TIMES = %w(UNKNOWN ONE_DAY TWO_DAYS THREE_DAYS FOUR_DAYS FIVE_DAYS SIX_DAYS SEVEN_DAYS EIGHT_DAYS NINE_DAYS TEN_DAYS ELEVEN_DAYS TWELVE_DAYS THIRTEEN_DAYS FOURTEEN_DAYS FIFTEEN_DAYS SIXTEEN_DAYS SEVENTEEN_DAYS EIGHTEEN_DAYS)
# FedEx tracking codes as described in the FedEx Tracking Service WSDL Guide
# All delays also have been marked as exceptions
TRACKING_STATUS_CODES = HashWithIndifferentAccess.new(
'AA' => :at_airport,
'AD' => :at_delivery,
'AF' => :at_fedex_facility,
'AR' => :at_fedex_facility,
'AP' => :at_pickup,
'CA' => :canceled,
'CH' => :location_changed,
'DE' => :exception,
'DL' => :delivered,
'DP' => :departed_fedex_location,
'DR' => :vehicle_furnished_not_used,
'DS' => :vehicle_dispatched,
'DY' => :exception,
'EA' => :exception,
'ED' => :enroute_to_delivery,
'EO' => :enroute_to_origin_airport,
'EP' => :enroute_to_pickup,
'FD' => :at_fedex_destination,
'HL' => :held_at_location,
'IT' => :in_transit,
'LO' => :left_origin,
'OC' => :order_created,
'OD' => :out_for_delivery,
'PF' => :plane_in_flight,
'PL' => :plane_landed,
'PU' => :picked_up,
'RS' => :return_to_shipper,
'SE' => :exception,
'SF' => :at_sort_facility,
'SP' => :split_status,
'TR' => :transfer
)
DEFAULT_LABEL_STOCK_TYPE = 'PAPER_7X4.75'
# Available return formats for image data when creating labels
LABEL_FORMATS = %w(DPL EPL2 PDF ZPLII PNG).freeze
TRANSIENT_TRACK_RESPONSE_CODES = %w(9035 9040 9041 9045 9050 9055 9060 9065 9070 9075 9085 9086 9090).freeze
UNRECOVERABLE_TRACK_RESPONSE_CODES = %w(9080 9081 9082 9095 9100).freeze
def self.service_name_for_code(service_code)
SERVICE_TYPES[service_code] || "FedEx #{service_code.titleize.sub(/Fedex /, '')}"
end
def requirements
[:key, :password, :account, :login]
end
def find_rates(origin, destination, packages, options = {})
options = @options.merge(options)
packages = Array(packages)
rate_request = build_rate_request(origin, destination, packages, options)
xml = commit(save_request(rate_request), (options[:test] || false))
parse_rate_response(origin, destination, packages, xml, options)
end
def find_tracking_info(tracking_number, options = {})
options = @options.merge(options)
tracking_request = build_tracking_request(tracking_number, options)
xml = commit(save_request(tracking_request), (options[:test] || false))
parse_tracking_response(xml, options)
end
# Get Shipping labels
def create_shipment(origin, destination, packages, options = {})
options = @options.merge(options)
packages = Array(packages)
raise Error, "Multiple packages are not supported yet." if packages.length > 1
request = build_shipment_request(origin, destination, packages, options)
logger.debug(request) if logger
response = commit(save_request(request), (options[:test] || false))
parse_ship_response(response)
end
def maximum_address_field_length
# See Fedex Developper Guide
35
end
protected
def build_shipment_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.ProcessShipmentRequest(xmlns: 'http://fedex.com/ws/ship/v13') do
build_request_header(xml)
build_version_node(xml, 'ship', 13, 0 ,0)
xml.RequestedShipment do
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
xml.DropoffType('REGULAR_PICKUP')
xml.ServiceType(options[:service_type] || 'FEDEX_GROUND')
xml.PackagingType('YOUR_PACKAGING')
xml.Shipper do
build_contact_address_nodes(xml, options[:shipper] || origin)
end
xml.Recipient do
build_contact_address_nodes(xml, destination)
end
xml.Origin do
build_contact_address_nodes(xml, origin)
end
xml.ShippingChargesPayment do
xml.PaymentType('SENDER')
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
# international shipping
if origin.country_code != destination.country_code
xml.CustomsClearanceDetail do
xml.DutiesPayment do
xml.PaymentType('SENDER') # "RECIPIENT"# PaymentType.SENDER;
xml.Payor do
build_shipment_responsible_party_node(xml, options[:shipper] || origin)
end
end
xml.DocumentContent "NON_DOCUMENTS" # InternationalDocumentContentType.NON_DOCUMENTS;
packages_value = packages.sum &:value
packages_value = (packages_value > 0) ? packages_value : 1.0
xml.CustomsValue do
xml.Currency "ILS"
xml.Amount packages_value
end
xml.Commodities do
xml.NumberOfPieces packages.size
descriptions = packages.map(&:options).map{|a| a[:description]}.join(",") || "Generic Goods"
xml.Description descriptions
xml.CountryOfManufacture(origin.country_code)
xml.Weight do
xml.Units 'KG'
xml.Value packages.sum(&:kgs)
end
xml.Quantity packages.size
xml.QuantityUnits "Unit(s)"
xml.UnitPrice do
xml.Currency "ILS"
xml.Amount "1.0"
end
end
end
end
xml.LabelSpecification do
xml.LabelFormatType('COMMON2D')
xml.ImageType(options[:label_format] || 'PNG')
xml.LabelStockType(options[:label_stock_type] || DEFAULT_LABEL_STOCK_TYPE)
end
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
xml.PackageCount(packages.size)
packages.each do |package|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, package, imperial)
build_package_dimensions_node(xml, package, imperial)
# Reference Numbers
reference_numbers = Array(package.options[:reference_numbers])
if reference_numbers.size > 0
reference_numbers.each do |reference_number_info|
xml.CustomerReferences do
xml.CustomerReferenceType(reference_number_info[:type] || "CUSTOMER_REFERENCE")
xml.Value(reference_number_info[:value])
end
end
end
xml.SpecialServicesRequested do
xml.SpecialServiceTypes("SIGNATURE_OPTION")
xml.SignatureOptionDetail do
xml.OptionType(SIGNATURE_OPTION_CODES[package.options[:signature_option] || :default_for_service])
end
end
end
end
end
end
end
xml_builder.to_xml
end
def build_contact_address_nodes(xml, location)
xml.Contact do
xml.PersonName(location.name)
xml.CompanyName(location.company)
xml.PhoneNumber(location.phone)
end
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.StateOrProvinceCode(location.state)
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential('true') if location.residential?
end
end
def build_shipment_responsible_party_node(xml, origin)
xml.ResponsibleParty do
xml.AccountNumber(@options[:account])
xml.Contact do
xml.PersonName(origin.name)
xml.CompanyName(origin.company)
xml.PhoneNumber(origin.phone)
end
end
end
def build_rate_request(origin, destination, packages, options = {})
imperial = location_uses_imperial(origin)
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.RateRequest(xmlns: 'http://fedex.com/ws/rate/v13') do
build_request_header(xml)
build_version_node(xml, 'crs', 13, 0 ,0)
# Returns delivery dates
xml.ReturnTransitAndCommit(true)
# Returns saturday delivery shipping options when available
xml.VariableOptions('SATURDAY_DELIVERY') if options[:saturday_delivery]
xml.RequestedShipment do
if options[:pickup_date]
xml.ShipTimestamp(options[:pickup_date].to_time.iso8601(0))
else
xml.ShipTimestamp(ship_timestamp(options[:turn_around_time]).iso8601(0))
end
freight = has_freight?(options)
unless freight
# fedex api wants this up here otherwise request returns an error
xml.DropoffType(options[:dropoff_type] || 'REGULAR_PICKUP')
xml.PackagingType(options[:packaging_type] || 'YOUR_PACKAGING')
end
xml.PreferredCurrency(options[:currency]) if options[:currency]
build_location_node(xml, 'Shipper', options[:shipper] || origin)
build_location_node(xml, 'Recipient', destination)
if options[:shipper] && options[:shipper] != origin
build_location_node(xml, 'Origin', origin)
end
if freight
freight_options = options[:freight]
build_shipping_charges_payment_node(xml, freight_options)
build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
else
xml.SmartPostDetail do
xml.Indicia(options[:smart_post_indicia] || 'PARCEL_SELECT')
xml.HubId(options[:smart_post_hub_id] || 5902) # default to LA
end
build_rate_request_types_node(xml,options[:currency] ? 'PREFERRED' : 'ACCOUNT')
xml.PackageCount(packages.size)
build_packages_nodes(xml, packages, imperial)
end
end
end
end
xml_builder.to_xml
end
def build_packages_nodes(xml, packages, imperial)
packages.map do |pkg|
xml.RequestedPackageLineItems do
xml.GroupPackageCount(1)
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
def build_shipping_charges_payment_node(xml, freight_options)
xml.ShippingChargesPayment do
xml.PaymentType(freight_options[:payment_type])
xml.Payor do
xml.ResponsibleParty do
# TODO: case of different freight account numbers?
xml.AccountNumber(freight_options[:account])
end
end
end
end
def build_freight_shipment_detail_node(xml, freight_options, packages, imperial)
xml.FreightShipmentDetail do
# TODO: case of different freight account numbers?
xml.FedExFreightAccountNumber(freight_options[:account])
build_location_node(xml, 'FedExFreightBillingContactAndAddress', freight_options[:billing_location])
xml.Role(freight_options[:role])
packages.each do |pkg|
xml.LineItems do
xml.FreightClass(freight_options[:freight_class])
xml.Packaging(freight_options[:packaging])
build_package_weight_node(xml, pkg, imperial)
build_package_dimensions_node(xml, pkg, imperial)
end
end
end
end
def has_freight?(options)
options[:freight] && options[:freight].present?
end
def build_package_weight_node(xml, pkg, imperial)
xml.Weight do
xml.Units(imperial ? 'LB' : 'KG')
xml.Value([((imperial ? pkg.lbs : pkg.kgs).to_f * 1000).round / 1000.0, 0.1].max)
end
end
def build_package_dimensions_node(xml, pkg, imperial)
xml.Dimensions do
[:length, :width, :height].each do |axis|
value = ((imperial ? pkg.inches(axis) : pkg.cm(axis)).to_f * 1000).round / 1000.0 # 3 decimals
xml.public_send(axis.to_s.capitalize, value.ceil)
end
xml.Units(imperial ? 'IN' : 'CM')
end
end
def build_rate_request_types_node(xml, type = 'ACCOUNT')
xml.RateRequestTypes(type)
end
def build_tracking_request(tracking_number, options = {})
xml_builder = Nokogiri::XML::Builder.new do |xml|
xml.TrackRequest(xmlns: 'http://fedex.com/ws/track/v7') do
build_request_header(xml)
build_version_node(xml, 'trck', 7, 0, 0)
xml.SelectionDetails do
xml.PackageIdentifier do
xml.Type(PACKAGE_IDENTIFIER_TYPES[options[:package_identifier_type] || 'tracking_number'])
xml.Value(tracking_number)
end
xml.ShipDateRangeBegin(options[:ship_date_range_begin]) if options[:ship_date_range_begin]
xml.ShipDateRangeEnd(options[:ship_date_range_end]) if options[:ship_date_range_end]
xml.TrackingNumberUniqueIdentifier(options[:unique_identifier]) if options[:unique_identifier]
end
xml.ProcessingOptions('INCLUDE_DETAILED_SCANS')
end
end
xml_builder.to_xml
end
def build_request_header(xml)
xml.WebAuthenticationDetail do
xml.UserCredential do
xml.Key(@options[:key])
xml.Password(@options[:password])
end
end
xml.ClientDetail do
xml.AccountNumber(@options[:account])
xml.MeterNumber(@options[:login])
end
xml.TransactionDetail do
xml.CustomerTransactionId(@options[:transaction_id] || 'ActiveShipping') # TODO: Need to do something better with this...
end
end
def build_version_node(xml, service_id, major, intermediate, minor)
xml.Version do
xml.ServiceId(service_id)
xml.Major(major)
xml.Intermediate(intermediate)
xml.Minor(minor)
end
end
def build_location_node(xml, name, location)
xml.public_send(name) do
xml.Address do
xml.StreetLines(location.address1) if location.address1
xml.StreetLines(location.address2) if location.address2
xml.City(location.city) if location.city
xml.PostalCode(location.postal_code)
xml.CountryCode(location.country_code(:alpha2))
xml.Residential(true) unless location.commercial?
end
end
end
def parse_rate_response(origin, destination, packages, response, options)
xml = build_document(response, 'RateReply')
success = response_success?(xml)
message = response_message(xml)
if success
missing_xml_field = false
rate_estimates = xml.root.css('> RateReplyDetails').map do |rated_shipment|
begin
service_code = rated_shipment.at('ServiceType').text
is_saturday_delivery = rated_shipment.at('AppliedOptions').try(:text) == 'SATURDAY_DELIVERY'
service_type = is_saturday_delivery ? "#{service_code}_SATURDAY_DELIVERY" : service_code
transit_time = rated_shipment.at('TransitTime').text if ["FEDEX_GROUND", "GROUND_HOME_DELIVERY"].include?(service_code)
max_transit_time = rated_shipment.at('MaximumTransitTime').try(:text) if service_code == "FEDEX_GROUND"
delivery_timestamp = rated_shipment.at('DeliveryTimestamp').try(:text)
delivery_range = delivery_range_from(transit_time, max_transit_time, delivery_timestamp, (service_code == "GROUND_HOME_DELIVERY"), options)
if options[:currency]
preferred_rate = rated_shipment.at("RatedShipmentDetails/ShipmentRateDetail/RateType[text() = 'PREFERRED_ACCOUNT_SHIPMENT']").parent
total_price = preferred_rate.at("TotalNetCharge/Amount").text.to_f
currency = preferred_rate.at("TotalNetCharge/Currency").text
else
total_price = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Amount').text.to_f
currency = rated_shipment.at('RatedShipmentDetails/ShipmentRateDetail/TotalNetCharge/Currency').text
end
RateEstimate.new(origin, destination, @@name,
self.class.service_name_for_code(service_type),
:service_code => service_code,
:total_price => total_price,
:currency => currency,
:packages => packages,
:delivery_range => delivery_range)
rescue NoMethodError
missing_xml_field = true
nil
end
end
rate_estimates = rate_estimates.compact
logger.warn("[FedexParseRateError] Some fields where missing in the response: #{response}") if logger && missing_xml_field
if rate_estimates.empty?
success = false
if missing_xml_field
message = "The response from the carrier contained errors and could not be treated"
else
message = "No shipping rates could be found for the destination address" if message.blank?
end
end
else
rate_estimates = []
end
RateResponse.new(success, message, Hash.from_xml(response), :rates => rate_estimates, :xml => response, :request => last_request, :log_xml => options[:log_xml])
end
def delivery_range_from(transit_time, max_transit_time, delivery_timestamp, is_home_delivery, options)
delivery_range = [delivery_timestamp, delivery_timestamp]
# if there's no delivery timestamp but we do have a transit time, use it
if delivery_timestamp.blank? && transit_time.present?
transit_range = parse_transit_times([transit_time, max_transit_time.presence || transit_time])
pickup_date = options[:pickup_date] || ship_date(options[:turn_around_time])
delivery_range = transit_range.map { |days| business_days_from(pickup_date, days, is_home_delivery) }
end
delivery_range
end
def parse_ship_response(response)
xml = build_document(response, 'ProcessShipmentReply')
success = response_success?(xml)
message = response_message(xml)
response_info = Hash.from_xml(response)
tracking_number = xml.css("CompletedPackageDetails TrackingIds TrackingNumber").text
base_64_image = xml.css("Label Image").text
labels = [Label.new(tracking_number, Base64.decode64(base_64_image))]
LabelResponse.new(success, message, response_info, {labels: labels})
end
def business_days_from(date, days, is_home_delivery=false)
future_date = date
count = 0
while count < days
future_date += 1.day
if is_home_delivery
count += 1 if home_delivery_business_day?(future_date)
else
count += 1 if business_day?(future_date)
end
end
future_date
end
#Transit times for FedEx® Ground do not include Saturdays, Sundays, or holidays.
def business_day?(date)
(1..5).include?(date.wday)
end
#Transit times for FedEx® Home Delivery, do not include Sundays, Mondays, or holidays.
def home_delivery_business_day?(date)
(2..6).include?(date.wday)
end
def parse_tracking_response(response, options)
xml = build_document(response, 'TrackReply')
success = response_success?(xml)
message = response_message(xml)
if success
tracking_details_root = xml.at('CompletedTrackDetails')
success = response_success?(tracking_details_root)
message = response_message(tracking_details_root)
end
if success
delivery_signature = nil
shipment_events = []
all_tracking_details = xml.root.xpath('CompletedTrackDetails/TrackDetails')
tracking_details = case all_tracking_details.length
when 1
all_tracking_details.first
when 0
message = "The response did not contain tracking details"
return TrackingResponse.new(
false,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request
)
else
all_unique_identifiers = xml.root.xpath('CompletedTrackDetails/TrackDetails/TrackingNumberUniqueIdentifier').map(&:text)
message = "Multiple matches were found. Specify a unqiue identifier: #{all_unique_identifiers.join(', ')}"
return TrackingResponse.new(
false,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request
)
end
first_notification = tracking_details.at('Notification')
severity = first_notification.at('Severity').text
if severity == 'ERROR' || severity == 'FAILURE'
message = first_notification.try(:text)
code = first_notification.at('Code').try(:text)
case code
when *TRANSIENT_TRACK_RESPONSE_CODES
raise ActiveShipping::ShipmentNotFound, first_notification.at('Message').text
else
raise ActiveShipping::ResponseContentError, StandardError.new(first_notification.at('Message').text)
end
end
tracking_number = tracking_details.at('TrackingNumber').text
status_detail = tracking_details.at('StatusDetail')
if status_detail.blank?
status_code, status, status_description, delivery_signature = nil
else
status_code = status_detail.at('Code').try(:text)
status_description = status_detail.at('AncillaryDetails/ReasonDescription').try(:text) || status_detail.at('Description').try(:text)
status = TRACKING_STATUS_CODES[status_code]
if status_code == 'DL' && tracking_details.at('AvailableImages').try(:text) == 'SIGNATURE_PROOF_OF_DELIVERY'
delivery_signature = tracking_details.at('DeliverySignatureName').try(:text)
end
end
origin = if origin_node = tracking_details.at('OriginLocationAddress')
Location.new(
country: origin_node.at('CountryCode').text,
province: origin_node.at('StateOrProvinceCode').text,
city: origin_node.at('City').text
)
end
destination = extract_address(tracking_details, DELIVERY_ADDRESS_NODE_NAMES)
shipper_address = extract_address(tracking_details, SHIPPER_ADDRESS_NODE_NAMES)
ship_time = extract_timestamp(tracking_details, 'ShipTimestamp')
actual_delivery_time = extract_timestamp(tracking_details, 'ActualDeliveryTimestamp')
scheduled_delivery_time = extract_timestamp(tracking_details, 'EstimatedDeliveryTimestamp')
tracking_details.xpath('Events').each do |event|
address = event.at('Address')
next if address.nil? || address.at('CountryCode').nil?
city = address.at('City').try(:text)
state = address.at('StateOrProvinceCode').try(:text)
zip_code = address.at('PostalCode').try(:text)
country = address.at('CountryCode').try(:text)
location = Location.new(:city => city, :state => state, :postal_code => zip_code, :country => country)
description = event.at('EventDescription').text
type_code = event.at('EventType').text
time = Time.parse(event.at('Timestamp').text)
zoneless_time = time.utc
shipment_events << ShipmentEvent.new(description, zoneless_time, location, description, type_code)
end
shipment_events = shipment_events.sort_by(&:time)
end
TrackingResponse.new(
success,
message,
Hash.from_xml(response),
carrier: @@name,
xml: response,
request: last_request,
status: status,
status_code: status_code,
status_description: status_description,
ship_time: ship_time,
scheduled_delivery_date: scheduled_delivery_time,
actual_delivery_date: actual_delivery_time,
delivery_signature: delivery_signature,
shipment_events: shipment_events,
shipper_address: (shipper_address.nil? || shipper_address.unknown?) ? nil : shipper_address,
origin: origin,
destination: destination,
tracking_number: tracking_number
)
end
def ship_timestamp(delay_in_hours)
delay_in_hours ||= 0
Time.now + delay_in_hours.hours
end
def ship_date(delay_in_hours)
delay_in_hours ||= 0
(Time.now + delay_in_hours.hours).to_date
end
def response_success?(document)
highest_severity = document.at('HighestSeverity')
return false if highest_severity.nil?
%w(SUCCESS WARNING NOTE).include?(highest_severity.text)
end
def response_message(document)
notifications = document.at('Notifications')
return "" if notifications.nil?
"#{notifications.at('Severity').text} - #{notifications.at('Code').text}: #{notifications.at('Message').text}"
end
def commit(request, test = false)
ssl_post(test ? TEST_URL : LIVE_URL, request.gsub("\n", ''))
end
def parse_transit_times(times)
results = []
times.each do |day_count|
days = TRANSIT_TIMES.index(day_count.to_s.chomp)
results << days.to_i
end
results
end
def extract_address(document, possible_node_names)
node = nil
args = {}
possible_node_names.each do |name|
node = document.at(name)
break if node
end
if node
args[:country] =
node.at('CountryCode').try(:text) ||
ActiveUtils::Country.new(:alpha2 => 'ZZ', :name => 'Unknown or Invalid Territory', :alpha3 => 'ZZZ', :numeric => '999')
args[:province] = node.at('StateOrProvinceCode').try(:text) || 'unknown'
args[:city] = node.at('City').try(:text) || 'unknown'
end
Location.new(args)
end
def extract_timestamp(document, node_name)
if timestamp_node = document.at(node_name)
if timestamp_node.text =~ /\A(\d{4}-\d{2}-\d{2})T00:00:00\Z/
Date.parse($1)
else
Time.parse(timestamp_node.text)
end
end
end
def build_document(xml, expected_root_tag)
document = Nokogiri.XML(xml) { |config| config.strict }
document.remove_namespaces!
if document.root.nil? || document.root.name != expected_root_tag
raise ActiveShipping::ResponseContentError.new(StandardError.new('Invalid document'), xml)
end
document
rescue Nokogiri::XML::SyntaxError => e
raise ActiveShipping::ResponseContentError.new(e, xml)
end
def location_uses_imperial(location)
%w(US LR MM).include?(location.country_code(:alpha2))
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.