CombinedText stringlengths 4 3.42M |
|---|
class TopicsController < ApplicationController
before_filter :find_forum
before_filter :find_topic, :only => [:show, :edit, :update, :destroy]
def index
respond_to do |format|
format.html { redirect_to forum_path(@forum) }
format.xml do
@topics = find_forum.topics.paginate(:page => current_page)
render :xml => @topics
end
end
end
def edit
end
def show
respond_to do |format|
format.html do
if logged_in?
current_user.seen!
(session[:topics] ||= {})[@topic.id] = Time.now.utc
end
@topic.hit! unless logged_in? && @topic.user_id == current_user.id
@posts = @topic.posts.paginate :page => current_page
@post = Post.new
end
format.xml { render :xml => @topic }
end
end
def new
@topic = Topic.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @topic }
end
end
def create
@topic = current_user.post @forum, params[:topic]
respond_to do |format|
if @topic.new_record?
format.html { render :action => "new" }
format.xml { render :xml => @topic.errors, :status => :unprocessable_entity }
else
flash[:notice] = 'Topic was successfully created.'
format.html { redirect_to(forum_topic_path(@forum, @topic)) }
format.xml { render :xml => @topic, :status => :created, :location => forum_topic_url(@forum, @topic) }
end
end
end
def update
current_user.revise @topic, params[:topic]
respond_to do |format|
if @topic.errors.empty?
flash[:notice] = 'Topic was successfully updated.'
format.html { redirect_to(forum_topic_path(@forum, @topic)) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @topic.errors, :status => :unprocessable_entity }
end
end
end
def destroy
@topic.destroy
respond_to do |format|
format.html { redirect_to(@forum) }
format.xml { head :ok }
end
end
protected
def find_forum
@forum = Forum.find_by_permalink(params[:forum_id])
end
def find_topic
@topic = @forum.topics.find_by_permalink(params[:id])
end
end
force lookup of current site, so banner shows up
class TopicsController < ApplicationController
before_filter :find_forum
before_filter :find_topic, :only => [:show, :edit, :update, :destroy]
def index
respond_to do |format|
format.html { redirect_to forum_path(@forum) }
format.xml do
@topics = find_forum.topics.paginate(:page => current_page)
render :xml => @topics
end
end
end
def edit
end
def show
respond_to do |format|
format.html do
if logged_in?
current_user.seen!
(session[:topics] ||= {})[@topic.id] = Time.now.utc
end
@topic.hit! unless logged_in? && @topic.user_id == current_user.id
@posts = @topic.posts.paginate :page => current_page
@post = Post.new
end
format.xml { render :xml => @topic }
end
end
def new
@topic = Topic.new
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @topic }
end
end
def create
@topic = current_user.post @forum, params[:topic]
respond_to do |format|
if @topic.new_record?
format.html { render :action => "new" }
format.xml { render :xml => @topic.errors, :status => :unprocessable_entity }
else
flash[:notice] = 'Topic was successfully created.'
format.html { redirect_to(forum_topic_path(@forum, @topic)) }
format.xml { render :xml => @topic, :status => :created, :location => forum_topic_url(@forum, @topic) }
end
end
end
def update
current_user.revise @topic, params[:topic]
respond_to do |format|
if @topic.errors.empty?
flash[:notice] = 'Topic was successfully updated.'
format.html { redirect_to(forum_topic_path(@forum, @topic)) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @topic.errors, :status => :unprocessable_entity }
end
end
end
def destroy
@topic.destroy
respond_to do |format|
format.html { redirect_to(@forum) }
format.xml { head :ok }
end
end
protected
def find_forum
current_site
@forum = Forum.find_by_permalink(params[:forum_id])
end
def find_topic
@topic = @forum.topics.find_by_permalink(params[:id])
end
end
|
class TopicsController < ApplicationController
load_and_authorize_resource :only => [:index, :new, :create, :edit, :update, :destroy, :batch_actions]
before_filter :authenticate_user!, :except => [:show, :get_topic_with_latlng, :show_topic_in_touch]
before_filter :save_in_locations, :only => [:create, :update]
# GET /topics
# GET /topics.json
def index
if params[:topic_report] && params[:topic_report][:descending] == "false"
@flag = true
else
@flag = false
end
@topic_report = TopicReport.new(params[:topic_report]) do |scope|
if current_user.role.contributor?
scope = scope.where(user_id: current_user.id)
else
scope = scope.where(published: true)
end
scope
end
@under_moderation = Topic.where(:published=>false).count
end
def search_stats
@from, @to = nil, nil
if params[:st] == "1" && params[:period]
case params[:period].to_s
when 'last_24'
@from = (Time.now - 1.day).strftime("%Y-%m-%d %H:%M")
@to = Time.now.strftime("%Y-%m-%d %H:%M")
when 'this_week'
@from = Date.today - (Date.today.cwday - 1)%7
@to = @from + 6
when 'last_week'
@from = Date.today - 7 - (Date.today.cwday - 1)%7
@to = @from + 6
when 'this_month'
@from = Date.civil(Date.today.year, Date.today.month, 1)
@to = (@from >> 1) - 1
when 'last_month'
@from = Date.civil(Date.today.year, Date.today.month, 1) << 1
@to = (@from >> 1) - 1
end
@from, @to = @to, @from if @from && @to && @from > @to
@search_stat = SearchTopic.where("updated_at >= ? AND updated_at <= ?", @from,@to).order('view_count DESC').to_a
elsif params[:st] == "2" && params[:period]
@from = params[:period]+" "+params[:time]
@to = Time.now.strftime("%Y-%m-%d %H:%M")
@search_stat = SearchTopic.where("updated_at >= ? AND updated_at <= ?", @from,@to).order('view_count DESC').to_a
else
@search_stat = SearchTopic.order('view_count DESC')
end
end
def get_address_with_topic
@locations = Topic.find(params[:topic]).locations.map{|l| [l.latitude, l.longitude, l.address]}
respond_to do |format|
format.json { render json: @locations }
end
end
def show_topic_in_touch
@topic = Topic.find_by_id(params[:id])
respond_to do |format|
format.html { render :layout => 'touch' }
format.json { render json: @topic }
end
end
def get_topic_with_latlng
location = Location.where(:latitude => params["lat"].to_f).last
if !location.nil?
@topic = []
@title = location.topics.each do |t|
x = []
if !t.title.nil? && !t.title.blank?
x << t.title
else
x << "No title"
end
if !t.content.nil? && !t.content.blank?
x << t.content[0..150].html_safe
else
x << "No content"
end
if !t.avatar.nil? && !t.avatar.avatar_img.nil?
x << t.avatar.avatar_img.url(:thumb)
else
x << "/assets/no-image.png"
end
if !t.id.nil?
x << t.id
end
@topic << x
end
respond_to do |format|
format.json { render json: @topic }
end
else
@topic = ["","","No topic for this location"]
respond_to do |format|
format.json { render json: @topic }
end
end
end
# GET /topics/1
# GET /topics/1.json
def show
@topic_locations = []
@all_locations = Location.joins(:topics).merge(Topic.published).to_gmaps4rails do |location, marker|
marker.infowindow render_to_string(:partial => "/welcome/infowindow", :locals => { :topics => location.topics })
topic_belongs = false
location.topics.each { |topic| topic_belongs = true if topic.to_param == params[:id] }
if topic_belongs
marker.picture(picture: location.topics.size > 1 ? "http://www.googlemapsmarkers.com/v1/#{location.topics.size}/6991FD/" : "http://www.google.com/intl/en_us/mapfiles/ms/micons/blue-dot.png")
marker.json(belongs_to_current_topic: true)
else
marker.picture(picture: location.topics.size > 1 ? "http://www.googlemapsmarkers.com/v1/#{location.topics.size}/FD7567/" : "http://www.google.com/intl/en_us/mapfiles/ms/micons/red-dot.png")
marker.json(belongs_to_current_topic: false)
end
end
begin
@topic = Topic.includes(:locations, :references).find params[:id]
@topic = Topic.working_version(@topic) if !@topic.published and (params[:moderation].nil? or current_user.nil?)
raise Exception.new if @topic.nil?
raise Exception.new if !@topic.published and (params[:moderation].nil? or current_user.nil?)
rescue Exception => e
redirect_to root_path, notice: "This topic is not published" and return
end
title @topic.title
@frequent_searches = SearchTopic.where('created_at > ?',Time.now - 7.days).limit(10)
@touch = request.subdomains.first == "touch"
if @touch
layout = false
action = "show"
else
layout = "public"
action = "show"
end
respond_to do |format|
format.html {render :layout => layout, :action => action }
format.json { render json: @topic.to_json(:include => [:locations, :references]) }
end
end
# GET /topics/new
# GET /topics/new.json
def new
@topic = Topic.new
@topic.locations.build
@topic.references.build
respond_to do |format|
format.html # new.html.erb
format.json { render json: @topic }
end
end
# GET /topics/1/edit
def edit
@topic = Topic.find(params[:id])
end
# POST /topics
# POST /topics.json
def create
@topic = Topic.new(params[:topic])
@topic.published = !(current_user.role.contributor?)
@topic.user_id = current_user.id
respond_to do |format|
if @topic.save
if params[:avatar] && params[:avatar][:avatar_img]
@avatar = Avatar.create(:avatar_img => params[:avatar][:avatar_img], :topic_id => @topic.id)
end
if params[:commit] == "Lagre"
format.html { redirect_to edit_topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { render json: edit_topic_path(@topic), status: :created, location: @topic }
else
format.html { redirect_to topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { render json: @topic, status: :created, location: @topic }
end
else
@topic.locations.build
@topic.references.build
format.html { render action: "new" }
format.json { render json: @topic.errors, status: :unprocessable_entity }
end
end
end
# PUT /topics/1
# PUT /topics/1.json
def update
input_locs = params[:topic][:locations_attributes].map{|x| x[:address]} unless params[:topic][:locations_attributes].nil?
@topic = Topic.find(params[:id])
if input_locs
@topic.locations.each do |l|
if !input_locs.include?(l.address)
l.delete
end
end
end
respond_to do |format|
if @topic.update_attributes(params[:topic])
@topic.update_attribute :published, !(current_user.role.contributor?) # to get changes verified by trusted contributor or admin
Rejection.where(:topic_id => @topic.id).update_all(:unchanged => false)
if params[:avatar] && params[:avatar][:avatar_img]
@avatar = Avatar.find_by_topic_id(@topic.id)
if @avatar
@avatar.update_attribute(:avatar_img,params[:avatar][:avatar_img])
else
@avatar = Avatar.create(:avatar_img => params[:avatar][:avatar_img], :topic_id => @topic.id)
end
end
if params[:commit] == "Lagre"
format.html { redirect_to edit_topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { head :no_content }
else
format.html { redirect_to topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { head :no_content }
end
else
format.html { render action: "edit" }
format.json { render json: @topic.errors, status: :unprocessable_entity }
end
end
end
# DELETE /topics/1
# DELETE /topics/1.json
def destroy
@topic = Topic.find(params[:id])
@topic.locations.delete_all
@topic.references.delete_all
@topic.destroy
respond_to do |format|
format.html { redirect_to topics_url }
format.json { head :no_content }
end
end
def batch_actions
topic_ids = params["top_ids"].split(",")
topics = Topic.find_all_by_id(topic_ids)
topics.each do |top|
if params["batch_action"] == "delet"
top.locations.delete_all
top.references.delete_all
top.destroy
else
top.update_attribute(:published,params["batch_action"] == "publish" ? true : false)
end
end
flash[:notice] = "#{topics.length} topic(s) have been #{params["batch_action"]}ed"
redirect_to topics_url
end
private
def save_in_locations
if !params["locations_attributes"].blank?
new_location_ids = []
params["locations_attributes"].each do |values|
if values[:id].blank?
location = Location.create values
else
location = Location.find values[:id]
location.update_attributes values
end
new_location_ids << location.id
end
params["topic"]["location_ids"] ||= Array.new
params["topic"]["location_ids"] += new_location_ids
end
end
end
fixed: Map in topic#show only adjusts to bounds when full name is given
class TopicsController < ApplicationController
load_and_authorize_resource :only => [:index, :new, :create, :edit, :update, :destroy, :batch_actions]
before_filter :authenticate_user!, :except => [:show, :get_topic_with_latlng, :show_topic_in_touch]
before_filter :save_in_locations, :only => [:create, :update]
# GET /topics
# GET /topics.json
def index
if params[:topic_report] && params[:topic_report][:descending] == "false"
@flag = true
else
@flag = false
end
@topic_report = TopicReport.new(params[:topic_report]) do |scope|
if current_user.role.contributor?
scope = scope.where(user_id: current_user.id)
else
scope = scope.where(published: true)
end
scope
end
@under_moderation = Topic.where(:published=>false).count
end
def search_stats
@from, @to = nil, nil
if params[:st] == "1" && params[:period]
case params[:period].to_s
when 'last_24'
@from = (Time.now - 1.day).strftime("%Y-%m-%d %H:%M")
@to = Time.now.strftime("%Y-%m-%d %H:%M")
when 'this_week'
@from = Date.today - (Date.today.cwday - 1)%7
@to = @from + 6
when 'last_week'
@from = Date.today - 7 - (Date.today.cwday - 1)%7
@to = @from + 6
when 'this_month'
@from = Date.civil(Date.today.year, Date.today.month, 1)
@to = (@from >> 1) - 1
when 'last_month'
@from = Date.civil(Date.today.year, Date.today.month, 1) << 1
@to = (@from >> 1) - 1
end
@from, @to = @to, @from if @from && @to && @from > @to
@search_stat = SearchTopic.where("updated_at >= ? AND updated_at <= ?", @from,@to).order('view_count DESC').to_a
elsif params[:st] == "2" && params[:period]
@from = params[:period]+" "+params[:time]
@to = Time.now.strftime("%Y-%m-%d %H:%M")
@search_stat = SearchTopic.where("updated_at >= ? AND updated_at <= ?", @from,@to).order('view_count DESC').to_a
else
@search_stat = SearchTopic.order('view_count DESC')
end
end
def get_address_with_topic
@locations = Topic.find(params[:topic]).locations.map{|l| [l.latitude, l.longitude, l.address]}
respond_to do |format|
format.json { render json: @locations }
end
end
def show_topic_in_touch
@topic = Topic.find_by_id(params[:id])
respond_to do |format|
format.html { render :layout => 'touch' }
format.json { render json: @topic }
end
end
def get_topic_with_latlng
location = Location.where(:latitude => params["lat"].to_f).last
if !location.nil?
@topic = []
@title = location.topics.each do |t|
x = []
if !t.title.nil? && !t.title.blank?
x << t.title
else
x << "No title"
end
if !t.content.nil? && !t.content.blank?
x << t.content[0..150].html_safe
else
x << "No content"
end
if !t.avatar.nil? && !t.avatar.avatar_img.nil?
x << t.avatar.avatar_img.url(:thumb)
else
x << "/assets/no-image.png"
end
if !t.id.nil?
x << t.id
end
@topic << x
end
respond_to do |format|
format.json { render json: @topic }
end
else
@topic = ["","","No topic for this location"]
respond_to do |format|
format.json { render json: @topic }
end
end
end
# GET /topics/1
# GET /topics/1.json
def show
@topic_locations = []
@all_locations = Location.joins(:topics).merge(Topic.published).to_gmaps4rails do |location, marker|
marker.infowindow render_to_string(:partial => "/welcome/infowindow", :locals => { :topics => location.topics })
topic_belongs = false
location.topics.each { |topic| topic_belongs = true if topic.to_param == params[:id] || topic.id.to_s == params[:id] }
if topic_belongs
marker.picture(picture: location.topics.size > 1 ? "http://www.googlemapsmarkers.com/v1/#{location.topics.size}/6991FD/" : "http://www.google.com/intl/en_us/mapfiles/ms/micons/blue-dot.png")
marker.json(belongs_to_current_topic: true)
else
marker.picture(picture: location.topics.size > 1 ? "http://www.googlemapsmarkers.com/v1/#{location.topics.size}/FD7567/" : "http://www.google.com/intl/en_us/mapfiles/ms/micons/red-dot.png")
marker.json(belongs_to_current_topic: false)
end
end
begin
@topic = Topic.includes(:locations, :references).find params[:id]
@topic = Topic.working_version(@topic) if !@topic.published and (params[:moderation].nil? or current_user.nil?)
raise Exception.new if @topic.nil?
raise Exception.new if !@topic.published and (params[:moderation].nil? or current_user.nil?)
rescue Exception => e
redirect_to root_path, notice: "This topic is not published" and return
end
title @topic.title
@frequent_searches = SearchTopic.where('created_at > ?',Time.now - 7.days).limit(10)
@touch = request.subdomains.first == "touch"
if @touch
layout = false
action = "show"
else
layout = "public"
action = "show"
end
respond_to do |format|
format.html {render :layout => layout, :action => action }
format.json { render json: @topic.to_json(:include => [:locations, :references]) }
end
end
# GET /topics/new
# GET /topics/new.json
def new
@topic = Topic.new
@topic.locations.build
@topic.references.build
respond_to do |format|
format.html # new.html.erb
format.json { render json: @topic }
end
end
# GET /topics/1/edit
def edit
@topic = Topic.find(params[:id])
end
# POST /topics
# POST /topics.json
def create
@topic = Topic.new(params[:topic])
@topic.published = !(current_user.role.contributor?)
@topic.user_id = current_user.id
respond_to do |format|
if @topic.save
if params[:avatar] && params[:avatar][:avatar_img]
@avatar = Avatar.create(:avatar_img => params[:avatar][:avatar_img], :topic_id => @topic.id)
end
if params[:commit] == "Lagre"
format.html { redirect_to edit_topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { render json: edit_topic_path(@topic), status: :created, location: @topic }
else
format.html { redirect_to topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { render json: @topic, status: :created, location: @topic }
end
else
@topic.locations.build
@topic.references.build
format.html { render action: "new" }
format.json { render json: @topic.errors, status: :unprocessable_entity }
end
end
end
# PUT /topics/1
# PUT /topics/1.json
def update
input_locs = params[:topic][:locations_attributes].map{|x| x[:address]} unless params[:topic][:locations_attributes].nil?
@topic = Topic.find(params[:id])
if input_locs
@topic.locations.each do |l|
if !input_locs.include?(l.address)
l.delete
end
end
end
respond_to do |format|
if @topic.update_attributes(params[:topic])
@topic.update_attribute :published, !(current_user.role.contributor?) # to get changes verified by trusted contributor or admin
Rejection.where(:topic_id => @topic.id).update_all(:unchanged => false)
if params[:avatar] && params[:avatar][:avatar_img]
@avatar = Avatar.find_by_topic_id(@topic.id)
if @avatar
@avatar.update_attribute(:avatar_img,params[:avatar][:avatar_img])
else
@avatar = Avatar.create(:avatar_img => params[:avatar][:avatar_img], :topic_id => @topic.id)
end
end
if params[:commit] == "Lagre"
format.html { redirect_to edit_topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { head :no_content }
else
format.html { redirect_to topic_path(@topic), notice: I18n.t("topics.create_flash") }
format.json { head :no_content }
end
else
format.html { render action: "edit" }
format.json { render json: @topic.errors, status: :unprocessable_entity }
end
end
end
# DELETE /topics/1
# DELETE /topics/1.json
def destroy
@topic = Topic.find(params[:id])
@topic.locations.delete_all
@topic.references.delete_all
@topic.destroy
respond_to do |format|
format.html { redirect_to topics_url }
format.json { head :no_content }
end
end
def batch_actions
topic_ids = params["top_ids"].split(",")
topics = Topic.find_all_by_id(topic_ids)
topics.each do |top|
if params["batch_action"] == "delet"
top.locations.delete_all
top.references.delete_all
top.destroy
else
top.update_attribute(:published,params["batch_action"] == "publish" ? true : false)
end
end
flash[:notice] = "#{topics.length} topic(s) have been #{params["batch_action"]}ed"
redirect_to topics_url
end
private
def save_in_locations
if !params["locations_attributes"].blank?
new_location_ids = []
params["locations_attributes"].each do |values|
if values[:id].blank?
location = Location.create values
else
location = Location.find values[:id]
location.update_attributes values
end
new_location_ids << location.id
end
params["topic"]["location_ids"] ||= Array.new
params["topic"]["location_ids"] += new_location_ids
end
end
end
|
require 'twilio-ruby'
#add comment
class TwilioController < ApplicationController
# Before we allow the incoming request to connect, verify
# that it is a Twilio request
before_filter :authenticate_twilio_request, :only => [
:connect
]
# Define our Twilio credentials as instance variables for later use
@@twilio_sid = ENV['TWILIO_ACCOUNT_SID']
@@twilio_token = ENV['TWILIO_AUTH_TOKEN']
@@twilio_number = ENV['TWILIO_NUMBER']
# Render home page
def index
render 'index'
end
# Handle a POST from our web form and connect a call via REST API
def call
contact = Contact.new
contact.phone = params[:phone]
# Validate contact
if contact.valid?
@client = Twilio::REST::Client.new 'ACd67c324e3724e949de870352e6288273', 'f54c2d610b75cb050497e5560f165301'
# Connect an outbound call to the number submitted
@call = @client.calls.create(
:from => '+12242315467',
:to => contact.phone,
:url => connect_url # Fetch instructions from this URL when the call connects
)
# Let's respond to the ajax call with some positive reinforcement
@msg = { :message => 'Phone call incoming!', :status => 'ok' }
else
# Oops there was an error, lets return the validation errors
@msg = { :message => contact.errors.full_messages, :status => 'ok' }
end
respond_to do |format|
format.json { render :json => @msg }
end
end
# This URL contains instructions for the call that is connected with a lead
# that is using the web form.
def connect
# Our response to this request will be an XML document in the "TwiML"
# format. Our Ruby library provides a helper for generating one
# of these documents
response = Twilio::TwiML::Response.new do |r|
# r.Say 'If this were a real click to call implementation, you would be connected to an agent at this point.', :voice => 'alice'
r.Dial '+18479452629'
end
end
render text: response.text
end
# Authenticate that all requests to our public-facing TwiML pages are
# coming from Twilio. Adapted from the example at
# http://twilio-ruby.readthedocs.org/en/latest/usage/validation.html
# Read more on Twilio Security at https://www.twilio.com/docs/security
private
def authenticate_twilio_request
twilio_signature = request.headers['HTTP_X_TWILIO_SIGNATURE']
# Helper from twilio-ruby to validate requests.
@validator = Twilio::Util::RequestValidator.new(@@twilio_token)
# the POST variables attached to the request (eg "From", "To")
# Twilio requests only accept lowercase letters. So scrub here:
post_vars = params.reject {|k, v| k.downcase == k}
is_twilio_req = @validator.validate(request.url, post_vars, twilio_signature)
unless is_twilio_req
render :xml => (Twilio::TwiML::Response.new {|r| r.Hangup}).text, :status => :unauthorized
false
end
end
end
Fix error
require 'twilio-ruby'
#add comment
class TwilioController < ApplicationController
# Before we allow the incoming request to connect, verify
# that it is a Twilio request
before_filter :authenticate_twilio_request, :only => [
:connect
]
# Define our Twilio credentials as instance variables for later use
@@twilio_sid = ENV['TWILIO_ACCOUNT_SID']
@@twilio_token = ENV['TWILIO_AUTH_TOKEN']
@@twilio_number = ENV['TWILIO_NUMBER']
# Render home page
def index
render 'index'
end
# Handle a POST from our web form and connect a call via REST API
def call
contact = Contact.new
contact.phone = params[:phone]
# Validate contact
if contact.valid?
@client = Twilio::REST::Client.new 'ACd67c324e3724e949de870352e6288273', 'f54c2d610b75cb050497e5560f165301'
# Connect an outbound call to the number submitted
@call = @client.calls.create(
:from => '+12242315467',
:to => contact.phone,
:url => connect_url # Fetch instructions from this URL when the call connects
)
# Let's respond to the ajax call with some positive reinforcement
@msg = { :message => 'Phone call incoming!', :status => 'ok' }
else
# Oops there was an error, lets return the validation errors
@msg = { :message => contact.errors.full_messages, :status => 'ok' }
end
respond_to do |format|
format.json { render :json => @msg }
end
end
# This URL contains instructions for the call that is connected with a lead
# that is using the web form.
def connect
# Our response to this request will be an XML document in the "TwiML"
# format. Our Ruby library provides a helper for generating one
# of these documents
response = Twilio::TwiML::Response.new do |r|
# r.Say 'If this were a real click to call implementation, you would be connected to an agent at this point.', :voice => 'alice'
r.Dial '+18479452629'
end
render text: response.text
end
# Authenticate that all requests to our public-facing TwiML pages are
# coming from Twilio. Adapted from the example at
# http://twilio-ruby.readthedocs.org/en/latest/usage/validation.html
# Read more on Twilio Security at https://www.twilio.com/docs/security
private
def authenticate_twilio_request
twilio_signature = request.headers['HTTP_X_TWILIO_SIGNATURE']
# Helper from twilio-ruby to validate requests.
@validator = Twilio::Util::RequestValidator.new(@@twilio_token)
# the POST variables attached to the request (eg "From", "To")
# Twilio requests only accept lowercase letters. So scrub here:
post_vars = params.reject {|k, v| k.downcase == k}
is_twilio_req = @validator.validate(request.url, post_vars, twilio_signature)
unless is_twilio_req
render :xml => (Twilio::TwiML::Response.new {|r| r.Hangup}).text, :status => :unauthorized
false
end
end
end
|
class TwilioController < ApplicationController
include Webhookable
after_filter :set_header
skip_before_action :verify_authenticity_token
def respond_to_homeless_voice
answer = params[:Digits].to_i
response = Twilio::TwiML::Response.new do |r|
case answer
when 1
ask_survey_question(r)
user.update_question(:needs_shelter, true)
when 2
respond_with_shelters(r, user)
user.update_question(:needs_shelter, false)
else
ask_for_shelter_again(r)
end
end
render_twiml response
end
def respond_to_question_voice
answer = params[:Digits]
user.update_question(params[:question], answer == "1")
response = Twilio::TwiML::Response.new do |r|
r.Say "Thanks for helping us keep our records up to date.", voice: "alice"
end
render_twiml response
end
def voice
response = Twilio::TwiML::Response.new do |r|
r.Gather numDigits: 1, action: user_twilio_respond_to_homeless_voice_url(user) do |g|
g.Say "Hey there, #{user.first_name}. Do you have a place to stay tonight? Press one for yes or two for no.", voice: "alice"
end
end
render_twiml response
end
private
def user
@user ||= User.find(params[:user_id])
end
def ask_survey_question(r)
question = History.random_question
r.Gather numDigits: 1, action: user_twilio_respond_to_question_voice_url(user, question) do |g|
g.Say I18n.translate("simple_form.labels.defaults.#{question}") + "Press one for yes or two for no.", voice: "alice"
end
end
def ask_for_shelter_again(r)
r.Gather numDigits: 1, action: user_twilio_respond_to_homeless_voice_url(user) do |g|
g.Say "Sorry, #{user.first_name}. I don't understand your response. Do you have a place to stay tonight? Press one for yes or two for no.", voice: "alice"
end
end
def respond_with_shelters(r, user)
shelters = Coc.find_for_user(User.second).select {|h| h[:services].include?(:housing)}.map {|h| h[:name]}.join(', ')
if shelters
r.Say "You may be able to stay at #{shelters}", voice: "alice"
else
r.Say "We don't know of any shelters that will take you. We're very sorry, #{user.first_name}.", voice: "alice"
end
end
# untested
def send_shelter_text(shelters, user, text)
Twilio::REST::Client.new.messages.create(
from: PHONE_NUMBER,
to: user.call_number,
body: "Blah blah blah shelters")
end
end
try sending text of shelters
class TwilioController < ApplicationController
include Webhookable
after_filter :set_header
skip_before_action :verify_authenticity_token
def respond_to_homeless_voice
answer = params[:Digits].to_i
response = Twilio::TwiML::Response.new do |r|
case answer
when 1
user.update_question(:needs_shelter, true)
ask_survey_question(r)
when 2
user.update_question(:needs_shelter, false)
respond_with_shelters(r, user)
else
ask_for_shelter_again(r)
end
end
render_twiml response
end
def respond_to_question_voice
answer = params[:Digits]
user.update_question(params[:question], answer == "1")
response = Twilio::TwiML::Response.new do |r|
r.Say "Thanks for helping us keep our records up to date.", voice: "alice"
end
render_twiml response
end
def voice
response = Twilio::TwiML::Response.new do |r|
r.Gather numDigits: 1, action: user_twilio_respond_to_homeless_voice_url(user) do |g|
g.Say "Hey there, #{user.first_name}. Do you have a place to stay tonight? Press one for yes or two for no.", voice: "alice"
end
end
render_twiml response
end
private
def user
@user ||= User.find(params[:user_id])
end
def ask_survey_question(r)
question = History.random_question
r.Gather numDigits: 1, action: user_twilio_respond_to_question_voice_url(user, question) do |g|
g.Say I18n.translate("simple_form.labels.defaults.#{question}") + "Press one for yes or two for no.", voice: "alice"
end
end
def ask_for_shelter_again(r)
r.Gather numDigits: 1, action: user_twilio_respond_to_homeless_voice_url(user) do |g|
g.Say "Sorry, #{user.first_name}. I don't understand your response. Do you have a place to stay tonight? Press one for yes or two for no.", voice: "alice"
end
end
def respond_with_shelters(r, user)
shelters = Coc.find_for_user(User.second).select {|h| h[:services].include?(:housing)}.map {|h| h[:name]}
if shelters.present?
r.Say "You may be able to stay at #{shelters.join(', ')}", voice: "alice"
send_shelter_text(shelters.join(', '), user)
else
r.Say "We don't know of any shelters that will take you. We're very sorry, #{user.first_name}.", voice: "alice"
end
end
# untested
def send_shelter_text(shelters, user)
Twilio::REST::Client.new.messages.create(
from: PHONE_NUMBER,
to: user.call_number,
body: "Blah blah blah shelters")
end
end
|
# coding: UTF-8
class UploadController < ApplicationController
ssl_required :create
skip_before_filter :verify_authenticity_token
before_filter :login_required
def create
head(400) and return if params[:qqfile].blank? || request.body.blank?
begin
file_path = Tempfile.new(params[:qqfile])
File.open(file_path, 'w+') do |file|
file.write(request.body.read.force_encoding('utf-8'))
end
render :json => {:file_uri => file_path, :success => true}
rescue => e
debugger
head(400) and return
end
end
end
revert upload refactor temporarily
# coding: UTF-8
class UploadController < ApplicationController
ssl_required :create
skip_before_filter :verify_authenticity_token
before_filter :login_required
def create
head(400) and return if params[:qqfile].blank? || request.body.blank?
upload_path = Rails.root.join('public', 'uploads', current_user.id.to_s)
file_path = upload_path.join(params[:qqfile])
FileUtils.mkdir_p(upload_path) unless File.directory?(upload_path)
File.open(file_path, 'w+') do |file|
file.write(request.body.read.force_encoding('utf-8'))
end
render :json => {:file_uri => "/uploads/#{current_user.id}/" + params[:qqfile], :success => true}
# begin
# file_path = Tempfile.new(params[:qqfile])
# File.open(file_path, 'w+') do |file|
# file.write(request.body.read.force_encoding('utf-8'))
# end
# render :json => {:file_uri => file_path, :success => true}
# rescue => e
# debugger
# head(400) and return
# end
end
end
|
class VideosController < ApplicationController
before_filter :find_video, :only => [:show, :edit, :update, :destroy] # 必须在access_control之前取到@video
# acl9插件提供的访问控制列表DSL
access_control do
allow all, :to => [:index, :show, :download]
allow :admin
allow logged_in, :to => [:new, :create]
allow :creator, :editor, :of => :video, :to => [:edit, :update] # :video 是对@video的引用
# allow logged_in, :except => :destroy
# allow anonymous, :to => [:index, :show]
end
skip_before_filter :verify_authenticity_token, :only =>[:create] # 供美食上传视频用
def index
@videos = Video.published.paginate :page => params[:page], :order => 'created_at DESC', :per_page => 12
end
def new
flash[:notice] = "上传视频文件不能超过#{CONFIG['max_upload_file_size']}MB"
@video = Video.new
render('/meishi/videos/new_iframe', :layout => false) if params[:iframe] == "true"
render('/meishi/videos/share_dv_iframe', :layout => false) if params[:iframe] == "share_dv"
end
def create
@video = Video.new(params[:video])
@video.user = @current_user
# flash上传的二进制流mime type是 application/octet-stream。
# 需要给上传的视频文件用mime-type插件获取mime type保存到属性里
# @video.asset_content_type = MIME::Types.type_for(@video.asset.original_filename).to_s
@video.asset_content_type = File.mime_type?(@video.asset.original_filename)
if @video.save
create_meishi_tv if params[:tv] # 直接操作meishi的tv模型
if request.env['HTTP_USER_AGENT'] =~ /^(Adobe|Shockwave) Flash/
# head(:ok, :id => @video.id) and return
render :text => "id=#{@video.id} title=#{@video.title} desc=#{@video.description}"
else
# @video.convert
flash[:notice] = '视频文件已成功上传'
redirect_to @video
end
else
render :action => 'new'
end
end
def show
@reply = VideoReply.new
end
# 下载文件,通过webserver的x sendfile直接发送文件
# TODO 可在此完善下载计数器
# TODO 下载的是原文件名的新文件
# SEND_FILE_METHOD = 'default' # 配置webserver
# SEND_FILE_METHOD = 'nginx' # 配置webserver为nginx,nginx需要相应配置X-Sendfile
SEND_FILE_METHOD = CONFIG['web_server']
def download
head(:not_found) and return if (video = Video.find_by_id(params[:id])).nil?
head(:forbidden) and return unless video.downloadable?(current_user)
path = video.asset.path(params[:style])
head(:bad_request) and return unless File.exist?(path) && params[:format].to_s == File.extname(path).gsub(/^\.+/, '')
send_file_options = { :type => File.mime_type?(path) }
case SEND_FILE_METHOD
when 'apache' then send_file_options[:x_sendfile] = true
when 'nginx' then head(:x_accel_redirect => path.gsub(Rails.root, ''), :content_type => send_file_options[:type]) and return
end
send_file(path, send_file_options)
end
private
def find_video
@video = Video.find(params[:id])
end
# 直接操作meishi的tv模型
def create_meishi_tv
tv = Tv.new
tv.name = @video.title
tv.intro = @video.description
tv.state = @video.visibility
tv.flv_url = @video.asset.url
tv.dv_type = 2 # shadowgraph创建的视频类型。重要!meishi根据这个类型生成视频url。
tv.is_published = 0
tv.user_id = params[:tv][:user_id]
tv.article_category_id = params[:tv][:cat_id]
tv.save
end
end
fix relation bug
class VideosController < ApplicationController
before_filter :find_video, :only => [:show, :edit, :update, :destroy] # 必须在access_control之前取到@video
# acl9插件提供的访问控制列表DSL
access_control do
allow all, :to => [:index, :show, :download]
allow :admin
allow logged_in, :to => [:new, :create]
allow :creator, :editor, :of => :video, :to => [:edit, :update] # :video 是对@video的引用
# allow logged_in, :except => :destroy
# allow anonymous, :to => [:index, :show]
end
skip_before_filter :verify_authenticity_token, :only =>[:create] # 供美食上传视频用
def index
@videos = Video.published.paginate :page => params[:page], :order => 'created_at DESC', :per_page => 12
end
def new
flash[:notice] = "上传视频文件不能超过#{CONFIG['max_upload_file_size']}MB"
@video = Video.new
render('/meishi/videos/new_iframe', :layout => false) if params[:iframe] == "true"
render('/meishi/videos/share_dv_iframe', :layout => false) if params[:iframe] == "share_dv"
end
def create
@video = Video.new(params[:video])
@video.user = @current_user
# flash上传的二进制流mime type是 application/octet-stream。
# 需要给上传的视频文件用mime-type插件获取mime type保存到属性里
# @video.asset_content_type = MIME::Types.type_for(@video.asset.original_filename).to_s
@video.asset_content_type = File.mime_type?(@video.asset.original_filename)
if @video.save
create_meishi_tv if params[:tv] # 直接操作meishi的tv模型
if request.env['HTTP_USER_AGENT'] =~ /^(Adobe|Shockwave) Flash/
# head(:ok, :id => @video.id) and return
render :text => "id=#{@video.id} title=#{@video.title} desc=#{@video.description}"
else
# @video.convert
flash[:notice] = '视频文件已成功上传'
redirect_to @video
end
else
render :action => 'new'
end
end
def show
@reply = VideoReply.new
end
# 下载文件,通过webserver的x sendfile直接发送文件
# TODO 可在此完善下载计数器
# TODO 下载的是原文件名的新文件
# SEND_FILE_METHOD = 'default' # 配置webserver
# SEND_FILE_METHOD = 'nginx' # 配置webserver为nginx,nginx需要相应配置X-Sendfile
SEND_FILE_METHOD = CONFIG['web_server']
def download
head(:not_found) and return if (video = Video.find_by_id(params[:id])).nil?
head(:forbidden) and return unless video.downloadable?(current_user)
path = video.asset.path(params[:style])
head(:bad_request) and return unless File.exist?(path) && params[:format].to_s == File.extname(path).gsub(/^\.+/, '')
send_file_options = { :type => File.mime_type?(path) }
case SEND_FILE_METHOD
when 'apache' then send_file_options[:x_sendfile] = true
when 'nginx' then head(:x_accel_redirect => path.gsub(Rails.root, ''), :content_type => send_file_options[:type]) and return
end
send_file(path, send_file_options)
end
private
def find_video
@video = Video.find(params[:id])
end
# 直接操作meishi的tv模型
def create_meishi_tv
tv = Tv.new
tv.name = @video.title
tv.intro = @video.description
tv.state = @video.visibility
tv.flv_url = @video.asset.url
tv.video_id = @video.id
tv.dv_type = 2 # shadowgraph创建的视频类型。重要!meishi根据这个类型生成视频url。
tv.is_published = 0
tv.user_id = params[:tv][:user_id]
tv.article_category_id = params[:tv][:cat_id]
tv.save
end
end
|
module EasyAPP
module DatesHelper
def ll(date)
date != '' ? l(date) : ''
end
end
end
Allow to pass a hash of options to *ll* helper method
module EasyAPP
module DatesHelper
def ll(date, opts = {})
date.blank? ? '' : l(date, opts)
end
end
end
|
module TransamFormatHelper
# Include the fiscal year mixin
include FiscalYear
# Formats text as as HTML using simple_format
def format_as_text(val, sanitize=false)
simple_format(val, {}, :sanitize => sanitize)
end
# Formats a user name and provides message link and optional messaging options
# available via the options hash
def format_as_message_link(user, options = {})
html = ''
unless user.blank?
options[:to_user] = user
options[:subject] = options[:subject] || ''
options[:body] = options[:body] || ''
message_url = new_user_message_path(current_user, options)
html = "<a href='#{message_url}'>#{user.email}"
html << ' '
html << "<i class = 'fa fa-envelope'></i>"
html << "</a>"
end
html.html_safe
end
# Formats a user name and provides an optional (defaulted) message link and
# messaging options
def format_as_user_link(user, options = {})
html = ''
unless user.blank?
options[:to_user] = user
options[:subject] = options[:subject] || ''
options[:body] = options[:body] || ''
user_url = user_path(user)
html = "<a href='#{user_url}'>#{user}</a>"
from_user = options[:from_user]
if from_user.present?
message_url = new_user_message_path(from_user, options)
html << ' '
html << "<span class = 'message-link'>"
html << "<a href='#{message_url}'>"
html << "<i class = 'fa fa-envelope'></i>"
html << "</a>"
html << "</span>"
end
end
html.html_safe
end
# Formats a quantity as an integer followed by a unit type
def format_as_quantity(count, unit_type = 'unit')
unless unit_type.blank?
"#{format_as_integer(count)} #{unit_type}"
else
"#{count}"
end
end
# formats an assets list of asset groups with remove option
def format_asset_groups(asset, style = 'info')
html = ""
asset.asset_groups.each do |grp|
html << "<span class='label label-#{style}'>"
html << grp.code
html << "<span data-role='remove' data-action-path='#{remove_from_group_inventory_path(asset, :asset_group => grp)}'></span>"
html << "</span>"
end
html.html_safe
end
# formats a collection of objecsts as labels/tags. By default labels are displayed
# using label-info but can be controlled using the optional style param. Label text
# is generated using to_s unless the object has a 'code' method
def format_as_labels(coll, style = 'info')
html = ''
coll.each do |e|
if e.respond_to? :code
txt = e.code
else
txt = e.to_s
end
html << format_as_label(txt, style)
end
html.html_safe
end
# formats an element as a label. By default labels are displayed
# using label-info but can be controlled using the optional style param
def format_as_label(elem, style = 'info')
html = "<span class='label label-#{style}'>"
html << elem.to_s
html << "</span>"
html.html_safe
end
# formats a year value as a fiscal year string 'FY XX-YY'
def format_as_fiscal_year(val)
fiscal_year(val) unless val.nil?
end
# formats a URL as a link
def format_as_url(url, target = '_blank')
link_to(url, url, :target => target)
end
# if no precision is set this truncates any decimals and returns the number as currency
def format_as_currency(val, precision = 0)
val ||= 0
if precision == 0
if val < 0
val = val - 0.5
else
val = val + 0.5
end
number_to_currency(val.to_i, :precision => 0)
else
number_to_currency(val, :precision => precision)
end
end
# if the value is a number it is formatted as a decimal or integer
# otherwise we assume it is a string and is returned
def format_as_general(val, precision = 2)
begin
Float(val)
number_with_precision(val, :precision => precision, :delimiter => ",")
rescue
val
end
end
# truncates any decimals and returns the number with thousands delimiters
def format_as_integer(val)
format_as_decimal(val, 0)
end
# returns a number as a decimal
def format_as_decimal(val, precision = 2)
number_with_precision(val, :precision => precision, :delimiter => ",")
end
# returns a number as a percentage
def format_as_percentage(val, precision = 0)
"#{number_with_precision(val, :precision => precision)}%"
end
# returns a number formatted as a phone number
def format_as_phone_number(val, area_code = true)
number_to_phone(val, :area_code => area_code)
end
# returns a collection as a formatted list
def format_as_list(coll)
html = "<ul class='list-unstyled'>"
coll.each do |e|
html << "<li>"
html << e.to_s
html << "</li>"
end
html << "</ul>"
html.html_safe
end
# returns a collection as a formatted table without headers
def format_as_table_without_headers(data, number_of_columns = 5, cell_padding_in_px = '6px')
html = "<table class='table-unstyled'>"
counter = 0
data.each do |datum|
if counter == 0
html << '<tr>'
end
html << "<td style='padding:#{cell_padding_in_px};'>"
html << datum.to_s
html << "</td>"
counter += 1
if ( (counter >= number_of_columns) || (datum.equal? data.last))
html << '</tr>'
counter = 0
end
end
html << "</table>"
html.html_safe
end
# formats a boolean field using a checkbox if the value is true
def format_as_checkbox(val, text_class='text-default')
if val
return "<i class='fa fa-check-square-o #{text_class}'></i>".html_safe
else
return "<i class='fa fa-square-o #{text_class}'></i>".html_safe
end
end
# formats a boolean field using a flag if the value is true
def format_as_boolean(val, icon="fa-check", text_class='text-default')
if val
return "<i class='fa #{icon} #{text_class}'></i>".html_safe
else
return "<i class='fa #{icon} #{text_class}' style = 'visibility: hidden;'></i>".html_safe
end
end
# formats a boolean field as Yes or No
def format_as_yes_no(val)
if val
return "Yes"
else
return "No"
end
end
# Formats a date as a day date eg Mon 24 Oct
def format_as_day_date(date)
date.strftime("%a %d %b") unless date.nil?
end
# formats a date/time as a distance in words. e.g. 6 days ago
def format_as_date_time_distance(datetime)
dist = distance_of_time_in_words_to_now(datetime)
if Time.current > datetime
dist = dist + " ago"
end
return dist
end
# formats a date/time, where use_slashes indicates eg 10/24/2014 instead of 24 Oct 2014
def format_as_date_time(datetime, use_slashes=true)
if use_slashes
datetime.strftime("%I:%M %p %m/%d/%Y") unless datetime.nil?
else
datetime.strftime("%I:%M %p %b %d %Y") unless datetime.nil?
end
end
# formats a date, where use_slashes indicates eg 10/24/2014 instead of 24 Oct 2014
def format_as_date(date, use_slashes=true)
if use_slashes
date.strftime("%m/%d/%Y") unless (date.nil? || date.year == 1)
else
date.strftime("%b %d %Y") unless (date.nil? || date.year == 1)
end
end
# formats a time as eg " 8:00 am" or "11:00 pm"
def format_as_time(time)
return time.strftime("%l:%M %p") unless time.nil?
end
# formats a time as eg "08:00" or "23:00"
def format_as_military_time(time)
return time.strftime("%H:%M") unless time.nil?
end
# formats a number of seconds as the corresponding days, hours, minutes, and optional seconds
def format_as_time_difference(s, show_seconds = false)
return if s.blank?
dhms = [60,60,24].reduce([s]) { |m,o| m.unshift(m.shift.divmod(o)).flatten }
val = []
val << "#{dhms[0]} days" unless dhms[0] == 0
val << "#{dhms[1]}h" unless dhms[1] == 0
val << "#{dhms[2]}m"
val << "#{dhms[3]}s" if show_seconds
val.join(' ')
end
# formats an object containing US address fields as html
def format_as_address(m)
full_address = []
full_address << m.address1 unless m.address1.blank?
full_address << m.address2 unless m.address2.blank?
address3 = []
address3 << m.city unless m.city.blank?
address3 << m.state unless m.state.blank?
address3 << m.zip unless m.zip.blank?
address3 = address3.compact.join(', ')
full_address << address3
full_address = full_address.compact.join('<br/>')
return full_address.html_safe
end
# formats an unconstrained string as a valid HTML id
def format_as_id(val)
val.parameterize.underscore
end
# Formats an object attribute using format_field
def format_attribute(attribute, object)
format_field(attribute.to_s.titleize, object.send(attribute))
end
# formats a label/value combination, providing optional popover support
def format_field(label, value, popover_text=nil, popover_iconcls=nil, popover_label=nil)
html = "<div class='row control-group'>"
html << "<div class='col-xs-5 display-label'>"
html << label
html << "</div>"
html << "<div class='col-xs-7 display-value'>"
html << value.to_s unless value.nil?
unless popover_text.nil?
popover_iconcls = 'fa fa-info-circle info-icon' unless popover_iconcls
popover_label = label unless popover_label
html << "<i class='#{popover_iconcls} info-icon' data-toggle='popover' data-trigger='hover' title='#{popover_label}' data-placement='right' data-content='#{popover_text}'></i>"
end
html << "</div>"
html << "</div>"
return html.html_safe
end
# formats a value using the indicated format
def format_using_format(val, format)
case format
when :currencyM
number_to_currency(val, format: '%u%nM', negative_format: '(%u%nM)')
when :currency
format_as_currency(val)
when :fiscal_year
format_as_fiscal_year(val.to_i) unless val.nil?
when :integer
format_as_integer(val)
when :decimal
format_as_decimal(val)
when :percent
format_as_percentage(val)
when :string
val
when :checkbox
format_as_checkbox(val)
when :boolean
# Check for 1/0 val as well as true/false given direct query clause
format_as_boolean(val == 0 ? false : val)
when :list
format_as_list(val)
else
# Note, current implementation uses rescue and is thus potentially inefficient.
# Consider alterantives.
format_as_general(val)
end
end
end
Revert format_attribute. [TTPLAT-10]
Unneeded if we go with X-editable approach.
module TransamFormatHelper
# Include the fiscal year mixin
include FiscalYear
# Formats text as as HTML using simple_format
def format_as_text(val, sanitize=false)
simple_format(val, {}, :sanitize => sanitize)
end
# Formats a user name and provides message link and optional messaging options
# available via the options hash
def format_as_message_link(user, options = {})
html = ''
unless user.blank?
options[:to_user] = user
options[:subject] = options[:subject] || ''
options[:body] = options[:body] || ''
message_url = new_user_message_path(current_user, options)
html = "<a href='#{message_url}'>#{user.email}"
html << ' '
html << "<i class = 'fa fa-envelope'></i>"
html << "</a>"
end
html.html_safe
end
# Formats a user name and provides an optional (defaulted) message link and
# messaging options
def format_as_user_link(user, options = {})
html = ''
unless user.blank?
options[:to_user] = user
options[:subject] = options[:subject] || ''
options[:body] = options[:body] || ''
user_url = user_path(user)
html = "<a href='#{user_url}'>#{user}</a>"
from_user = options[:from_user]
if from_user.present?
message_url = new_user_message_path(from_user, options)
html << ' '
html << "<span class = 'message-link'>"
html << "<a href='#{message_url}'>"
html << "<i class = 'fa fa-envelope'></i>"
html << "</a>"
html << "</span>"
end
end
html.html_safe
end
# Formats a quantity as an integer followed by a unit type
def format_as_quantity(count, unit_type = 'unit')
unless unit_type.blank?
"#{format_as_integer(count)} #{unit_type}"
else
"#{count}"
end
end
# formats an assets list of asset groups with remove option
def format_asset_groups(asset, style = 'info')
html = ""
asset.asset_groups.each do |grp|
html << "<span class='label label-#{style}'>"
html << grp.code
html << "<span data-role='remove' data-action-path='#{remove_from_group_inventory_path(asset, :asset_group => grp)}'></span>"
html << "</span>"
end
html.html_safe
end
# formats a collection of objecsts as labels/tags. By default labels are displayed
# using label-info but can be controlled using the optional style param. Label text
# is generated using to_s unless the object has a 'code' method
def format_as_labels(coll, style = 'info')
html = ''
coll.each do |e|
if e.respond_to? :code
txt = e.code
else
txt = e.to_s
end
html << format_as_label(txt, style)
end
html.html_safe
end
# formats an element as a label. By default labels are displayed
# using label-info but can be controlled using the optional style param
def format_as_label(elem, style = 'info')
html = "<span class='label label-#{style}'>"
html << elem.to_s
html << "</span>"
html.html_safe
end
# formats a year value as a fiscal year string 'FY XX-YY'
def format_as_fiscal_year(val)
fiscal_year(val) unless val.nil?
end
# formats a URL as a link
def format_as_url(url, target = '_blank')
link_to(url, url, :target => target)
end
# if no precision is set this truncates any decimals and returns the number as currency
def format_as_currency(val, precision = 0)
val ||= 0
if precision == 0
if val < 0
val = val - 0.5
else
val = val + 0.5
end
number_to_currency(val.to_i, :precision => 0)
else
number_to_currency(val, :precision => precision)
end
end
# if the value is a number it is formatted as a decimal or integer
# otherwise we assume it is a string and is returned
def format_as_general(val, precision = 2)
begin
Float(val)
number_with_precision(val, :precision => precision, :delimiter => ",")
rescue
val
end
end
# truncates any decimals and returns the number with thousands delimiters
def format_as_integer(val)
format_as_decimal(val, 0)
end
# returns a number as a decimal
def format_as_decimal(val, precision = 2)
number_with_precision(val, :precision => precision, :delimiter => ",")
end
# returns a number as a percentage
def format_as_percentage(val, precision = 0)
"#{number_with_precision(val, :precision => precision)}%"
end
# returns a number formatted as a phone number
def format_as_phone_number(val, area_code = true)
number_to_phone(val, :area_code => area_code)
end
# returns a collection as a formatted list
def format_as_list(coll)
html = "<ul class='list-unstyled'>"
coll.each do |e|
html << "<li>"
html << e.to_s
html << "</li>"
end
html << "</ul>"
html.html_safe
end
# returns a collection as a formatted table without headers
def format_as_table_without_headers(data, number_of_columns = 5, cell_padding_in_px = '6px')
html = "<table class='table-unstyled'>"
counter = 0
data.each do |datum|
if counter == 0
html << '<tr>'
end
html << "<td style='padding:#{cell_padding_in_px};'>"
html << datum.to_s
html << "</td>"
counter += 1
if ( (counter >= number_of_columns) || (datum.equal? data.last))
html << '</tr>'
counter = 0
end
end
html << "</table>"
html.html_safe
end
# formats a boolean field using a checkbox if the value is true
def format_as_checkbox(val, text_class='text-default')
if val
return "<i class='fa fa-check-square-o #{text_class}'></i>".html_safe
else
return "<i class='fa fa-square-o #{text_class}'></i>".html_safe
end
end
# formats a boolean field using a flag if the value is true
def format_as_boolean(val, icon="fa-check", text_class='text-default')
if val
return "<i class='fa #{icon} #{text_class}'></i>".html_safe
else
return "<i class='fa #{icon} #{text_class}' style = 'visibility: hidden;'></i>".html_safe
end
end
# formats a boolean field as Yes or No
def format_as_yes_no(val)
if val
return "Yes"
else
return "No"
end
end
# Formats a date as a day date eg Mon 24 Oct
def format_as_day_date(date)
date.strftime("%a %d %b") unless date.nil?
end
# formats a date/time as a distance in words. e.g. 6 days ago
def format_as_date_time_distance(datetime)
dist = distance_of_time_in_words_to_now(datetime)
if Time.current > datetime
dist = dist + " ago"
end
return dist
end
# formats a date/time, where use_slashes indicates eg 10/24/2014 instead of 24 Oct 2014
def format_as_date_time(datetime, use_slashes=true)
if use_slashes
datetime.strftime("%I:%M %p %m/%d/%Y") unless datetime.nil?
else
datetime.strftime("%I:%M %p %b %d %Y") unless datetime.nil?
end
end
# formats a date, where use_slashes indicates eg 10/24/2014 instead of 24 Oct 2014
def format_as_date(date, use_slashes=true)
if use_slashes
date.strftime("%m/%d/%Y") unless (date.nil? || date.year == 1)
else
date.strftime("%b %d %Y") unless (date.nil? || date.year == 1)
end
end
# formats a time as eg " 8:00 am" or "11:00 pm"
def format_as_time(time)
return time.strftime("%l:%M %p") unless time.nil?
end
# formats a time as eg "08:00" or "23:00"
def format_as_military_time(time)
return time.strftime("%H:%M") unless time.nil?
end
# formats a number of seconds as the corresponding days, hours, minutes, and optional seconds
def format_as_time_difference(s, show_seconds = false)
return if s.blank?
dhms = [60,60,24].reduce([s]) { |m,o| m.unshift(m.shift.divmod(o)).flatten }
val = []
val << "#{dhms[0]} days" unless dhms[0] == 0
val << "#{dhms[1]}h" unless dhms[1] == 0
val << "#{dhms[2]}m"
val << "#{dhms[3]}s" if show_seconds
val.join(' ')
end
# formats an object containing US address fields as html
def format_as_address(m)
full_address = []
full_address << m.address1 unless m.address1.blank?
full_address << m.address2 unless m.address2.blank?
address3 = []
address3 << m.city unless m.city.blank?
address3 << m.state unless m.state.blank?
address3 << m.zip unless m.zip.blank?
address3 = address3.compact.join(', ')
full_address << address3
full_address = full_address.compact.join('<br/>')
return full_address.html_safe
end
# formats an unconstrained string as a valid HTML id
def format_as_id(val)
val.parameterize.underscore
end
# formats a label/value combination, providing optional popover support
def format_field(label, value, popover_text=nil, popover_iconcls=nil, popover_label=nil)
html = "<div class='row control-group'>"
html << "<div class='col-xs-5 display-label'>"
html << label
html << "</div>"
html << "<div class='col-xs-7 display-value'>"
html << value.to_s unless value.nil?
unless popover_text.nil?
popover_iconcls = 'fa fa-info-circle info-icon' unless popover_iconcls
popover_label = label unless popover_label
html << "<i class='#{popover_iconcls} info-icon' data-toggle='popover' data-trigger='hover' title='#{popover_label}' data-placement='right' data-content='#{popover_text}'></i>"
end
html << "</div>"
html << "</div>"
return html.html_safe
end
# formats a value using the indicated format
def format_using_format(val, format)
case format
when :currencyM
number_to_currency(val, format: '%u%nM', negative_format: '(%u%nM)')
when :currency
format_as_currency(val)
when :fiscal_year
format_as_fiscal_year(val.to_i) unless val.nil?
when :integer
format_as_integer(val)
when :decimal
format_as_decimal(val)
when :percent
format_as_percentage(val)
when :string
val
when :checkbox
format_as_checkbox(val)
when :boolean
# Check for 1/0 val as well as true/false given direct query clause
format_as_boolean(val == 0 ? false : val)
when :list
format_as_list(val)
else
# Note, current implementation uses rescue and is thus potentially inefficient.
# Consider alterantives.
format_as_general(val)
end
end
end
|
# Retrieves agent statuses and calls AgentStatusUpdater to update them where necessary
class TrackAgentStatusesJob
include Now
def perform
current = BackendService.new.get_agent_online_state.map do |data|
if data[:status] == 'Ruokatunti'
agent_id = data[:agent_id].to_i
luncheds = Rails.cache.fetch('lunched', force: true)
luncheds = [] if luncheds.nil?
luncheds.push(agent_id)
luncheds = luncheds.uniq
Rails.cache.fetch('lunched', force: true) { luncheds }
end
AgentStatus.new(agent: Agent.find_or_create(data[:agent_id], data[:name], data[:team]),
status: data[:status],
time_in_status: data[:time_in_status])
end
log = JobLog.new('TrackAgentStatusesJob')
AgentStatusUpdater.new(now, log.last_success).update_statuses(current)
end
def max_run_time
5.seconds
end
def max_attempts
1
end
def success(*)
JobLog.new('TrackAgentStatusesJob').log_success
end
def failure(*)
JobLog.new('TrackAgentStatusesJob').log_failure
end
def queue_name
'statuses'
end
end
refactor lunch thing to its own method in track_agent_statuses_job
# Retrieves agent statuses and calls AgentStatusUpdater to update them where necessary
class TrackAgentStatusesJob
include Now
def perform
current = BackendService.new.get_agent_online_state.map do |data|
agent_has_eaten(data) if data[:status] == 'Ruokatunti'
AgentStatus.new(agent: Agent.find_or_create(data[:agent_id], data[:name], data[:team]),
status: data[:status],
time_in_status: data[:time_in_status])
end
log = JobLog.new('TrackAgentStatusesJob')
AgentStatusUpdater.new(now, log.last_success).update_statuses(current)
end
def agent_has_eaten(data)
agent_id = data[:agent_id].to_i
luncheds = Rails.cache.fetch('lunched', force: true)
return if luncheds.nil?
luncheds.push(agent_id) unless luncheds.contains? agent_id
Rails.cache.fetch('lunched', force: true) { luncheds }
end
def max_run_time
5.seconds
end
def max_attempts
1
end
def success(*)
JobLog.new('TrackAgentStatusesJob').log_success
end
def failure(*)
JobLog.new('TrackAgentStatusesJob').log_failure
end
def queue_name
'statuses'
end
end
|
# frozen_string_literal: true
class ActivityPub::Activity::Move < ActivityPub::Activity
PROCESSING_COOLDOWN = 7.days.seconds
def perform
return if origin_account.uri != object_uri || processed?
mark_as_processing!
target_account = ActivityPub::FetchRemoteAccountService.new.call(target_uri)
if target_account.nil? || target_account.suspended? || !target_account.also_known_as.include?(origin_account.uri)
unmark_as_processing!
return
end
# In case for some reason we didn't have a redirect for the profile already, set it
origin_account.update(moved_to_account: target_account)
# Initiate a re-follow for each follower
MoveWorker.perform_async(origin_account.id, target_account.id)
end
private
def origin_account
@account
end
def target_uri
value_or_id(@json['target'])
end
def processed?
redis.exists?("move_in_progress:#{@account.id}")
end
def mark_as_processing!
redis.setex("move_in_progress:#{@account.id}", PROCESSING_COOLDOWN, true)
end
def unmark_as_processing!
redis.del("move_in_progress:#{@account.id}")
end
end
Fix Move handler not being triggered when failing to fetch target (#15107)
When failing to fetch the target account, the ProcessingWorker fails
as expected, but since it hasn't cleared the `move_in_progress` flag,
the next attempt at processing skips the `Move` activity altogether.
This commit changes it to clear the flag when encountering any
unexpected error on fetching the target account. This is likely to
occur because, of, e.g., a timeout, when many instances query the
same actor at the same time.
# frozen_string_literal: true
class ActivityPub::Activity::Move < ActivityPub::Activity
PROCESSING_COOLDOWN = 7.days.seconds
def perform
return if origin_account.uri != object_uri || processed?
mark_as_processing!
target_account = ActivityPub::FetchRemoteAccountService.new.call(target_uri)
if target_account.nil? || target_account.suspended? || !target_account.also_known_as.include?(origin_account.uri)
unmark_as_processing!
return
end
# In case for some reason we didn't have a redirect for the profile already, set it
origin_account.update(moved_to_account: target_account)
# Initiate a re-follow for each follower
MoveWorker.perform_async(origin_account.id, target_account.id)
rescue
unmark_as_processing!
raise
end
private
def origin_account
@account
end
def target_uri
value_or_id(@json['target'])
end
def processed?
redis.exists?("move_in_progress:#{@account.id}")
end
def mark_as_processing!
redis.setex("move_in_progress:#{@account.id}", PROCESSING_COOLDOWN, true)
end
def unmark_as_processing!
redis.del("move_in_progress:#{@account.id}")
end
end
|
module AdvancedSearches
class Gene
include Base
def initialize(params)
@params = params
@presentation_class = GeneWithStateParamsPresenter
end
def model_class
::Gene
end
private
def handler_for_field(field)
default_handler = method(:default_handler).to_proc
@handlers ||= {
'entrez_id' => default_handler.curry['genes.entrez_id']
'name' => default_handler.curry['genes.name'],
'official_name' => default_handler.curry['genes.official_name'],
'description' => default_handler.curry['genes.description'],
'clinical_description' => default_handler.curry['genes.clinical_description']
}
@handlers[field]
end
end
end
add gene aliases to advanced search
module AdvancedSearches
class Gene
include Base
def initialize(params)
@params = params
@presentation_class = GeneWithStateParamsPresenter
end
def model_class
::Gene.index_scope
end
private
def handler_for_field(field)
default_handler = method(:default_handler).to_proc
@handlers ||= {
'entrez_id' => default_handler.curry['genes.entrez_id'],
'name' => default_handler.curry['genes.name'],
'description' => default_handler.curry['genes.description'],
'aliases' => default_handler.curry['gene_aliases.name']
}
@handlers[field]
end
end
end
|
class BuildAttemptObserver < ActiveRecord::Observer
TIMEOUT_THRESHOLD = 40.minutes
def after_save(record)
if record.should_reattempt?
record.build_part.rebuild!
elsif record.state == :failed && record.elapsed_time >= TIMEOUT_THRESHOLD
BuildPartTimeOutMailer.send(record.build_part)
BuildStateUpdateJob.enqueue(record.build_part.build_id)
elsif record.state != :runnable && record.state != :running
BuildStateUpdateJob.enqueue(record.build_part.build_id)
end
end
end
fix the build
class BuildAttemptObserver < ActiveRecord::Observer
TIMEOUT_THRESHOLD = 40.minutes
def after_save(record)
if record.should_reattempt?
record.build_part.rebuild!
elsif record.state == :failed && record.elapsed_time.try(:>=, TIMEOUT_THRESHOLD)
BuildPartTimeOutMailer.send(record.build_part)
BuildStateUpdateJob.enqueue(record.build_part.build_id)
elsif record.state != :runnable && record.state != :running
BuildStateUpdateJob.enqueue(record.build_part.build_id)
end
end
end
|
class ChargebackRateDetail < ApplicationRecord
belongs_to :chargeback_rate
belongs_to :chargeable_field
belongs_to :detail_measure, :class_name => "ChargebackRateDetailMeasure", :foreign_key => :chargeback_rate_detail_measure_id
belongs_to :detail_currency, :class_name => "ChargebackRateDetailCurrency", :foreign_key => :chargeback_rate_detail_currency_id
has_many :chargeback_tiers, :dependent => :destroy, :autosave => true
default_scope { joins(:chargeable_field).merge(ChargeableField.order(:group => :asc, :description => :asc)) }
validates :chargeback_rate, :chargeable_field, :presence => true
validate :contiguous_tiers?
delegate :rate_type, :to => :chargeback_rate, :allow_nil => true
delegate :metric_column_key, :metric_key, :cost_keys, :rate_key, :to => :chargeable_field
FORM_ATTRIBUTES = %i(description per_time per_unit metric group source metric chargeable_field_id sub_metric).freeze
PER_TIME_TYPES = {
"hourly" => _("Hourly"),
"daily" => _("Daily"),
"weekly" => _("Weekly"),
"monthly" => _("Monthly"),
'yearly' => _('Yearly')
}.freeze
# gigabytes -> GiB
#
def showback_unit(p_per_unit = nil)
return '' unless chargeable_field.detail_measure
{'bytes' => '',
'kilobytes' => 'KiB',
'megabytes' => 'MiB',
'gigabytes' => 'GiB',
'terabytes' => 'TiB',
'hertz' => '',
'kilohertz' => 'KHz',
'megahertz' => 'MHz',
'gigahertz' => 'GHz',
'teraherts' => 'THz',
'bps' => '',
'kbps' => 'Mbps',
'mbps' => 'Gbps',
'gbps' => 'Tbps'}[p_per_unit || per_unit]
end
def populate_showback_rate(plan, rate_detail, entity)
group = rate_detail.chargeable_field.showback_measure
field, _, calculation = rate_detail.chargeable_field.showback_dimension
unit = rate_detail.showback_unit
showback_rate = ManageIQ::Showback::Rate.find_or_create_by(:entity => entity,
:group => group,
:field => field,
:price_plan => plan,
:calculation => calculation,
:concept => rate_detail.id)
showback_rate.tiers.destroy_all
rate_detail.chargeback_tiers.each do |tier|
showback_rate.tiers.build(:tier_start_value => tier.start,
:tier_end_value => tier.finish,
:variable_rate_per_time => rate_detail.per_time,
:variable_rate_per_unit => unit,
:fixed_rate_per_time => rate_detail.per_time,
:fixed_rate => Money.new(tier.fixed_rate * Money.default_currency.subunit_to_unit),
:variable_rate => Money.new(tier.variable_rate * Money.default_currency.subunit_to_unit))
end
showback_rate.save
end
def sub_metrics
if metric == 'derived_vm_allocated_disk_storage'
volume_types = CloudVolume.volume_types
unless volume_types.empty?
res = {}
res[_('All')] = ''
volume_types.each { |type| res[type.capitalize] = type }
res[_('Other - Unclassified')] = 'unclassified'
res
end
end
end
def sub_metric_human
sub_metric.present? ? sub_metric.capitalize : 'All'
end
def rate_values(consumption, options)
fixed_rate, variable_rate = find_rate(chargeable_field.measure(consumption, options, sub_metric))
hourly_fixed_rate = hourly(fixed_rate, consumption)
hourly_variable_rate = hourly(variable_rate, consumption)
"#{hourly_fixed_rate}/#{hourly_variable_rate}"
end
def charge(relevant_fields, consumption, options)
result = {}
if (relevant_fields & [metric_key]).present? || (relevant_fields & cost_keys).present?
metric_value, cost = metric_and_cost_by(consumption, options)
if !consumption.chargeback_fields_present && chargeable_field.fixed?
cost = 0
end
result[rate_key(sub_metric)] = rate_values(consumption, options)
result[metric_key(sub_metric)] = metric_value
cost_keys(sub_metric).each { |field| result[field] = cost }
end
result
end
# Set the rates according to the tiers
def find_rate(value)
@found_rates ||= {}
@found_rates[value] ||=
begin
fixed_rate = 0.0
variable_rate = 0.0
tier_found = chargeback_tiers.detect { |tier| tier.includes?(value * rate_adjustment) }
unless tier_found.nil?
fixed_rate = tier_found.fixed_rate
variable_rate = tier_found.variable_rate
end
[fixed_rate, variable_rate]
end
end
PER_TIME_MAP = {
:hourly => "Hour",
:daily => "Day",
:weekly => "Week",
:monthly => "Month",
:yearly => "Year"
}
def hourly_cost(value, consumption)
return 0.0 unless self.enabled?
(fixed_rate, variable_rate) = find_rate(value)
hourly_fixed_rate = hourly(fixed_rate, consumption)
hourly_variable_rate = hourly(variable_rate, consumption)
hourly_fixed_rate + rate_adjustment * value * hourly_variable_rate
end
def hourly(rate, consumption)
hourly_rate = case per_time
when "hourly" then rate
when "daily" then rate / 24
when "weekly" then rate / 24 / 7
when "monthly" then rate / consumption.hours_in_month
when "yearly" then rate / 24 / 365
else raise "rate time unit of '#{per_time}' not supported"
end
hourly_rate
end
def rate_adjustment
@rate_adjustment ||= chargeable_field.adjustment_to(per_unit)
end
def affects_report_fields(report_cols)
([metric_key].to_set & report_cols).present? || ((cost_keys.to_set & report_cols).present? && !gratis?)
end
def friendly_rate
(fixed_rate, variable_rate) = find_rate(0.0)
value = read_attribute(:friendly_rate)
return value unless value.nil?
if chargeable_field.fixed?
# Example: 10.00 Monthly
"#{fixed_rate + variable_rate} #{per_time.to_s.capitalize}"
else
s = ""
chargeback_tiers.each do |tier|
# Example: Daily @ .02 per MHz from 0.0 to Infinity
s += "#{per_time.to_s.capitalize} @ #{tier.fixed_rate} + "\
"#{tier.variable_rate} per #{per_unit_display} from #{tier.start} to #{tier.finish}\n"
end
s.chomp
end
end
def per_unit_display
measure = chargeable_field.detail_measure
measure.nil? ? per_unit.to_s.capitalize : measure.measures.key(per_unit)
end
# New method created in order to show the rates in a easier to understand way
def show_rates
hr = ChargebackRateDetail::PER_TIME_MAP[per_time.to_sym]
rate_display = "#{detail_currency.code} / #{hr}"
rate_display_unit = "#{rate_display} / #{per_unit_display}"
per_unit.nil? ? rate_display : rate_display_unit
end
def save_tiers(tiers)
temp = self.class.new(:chargeback_tiers => tiers)
if temp.contiguous_tiers?
self.chargeback_tiers.replace(tiers)
else
temp.errors.each {|a, e| errors.add(a, e)}
end
end
# Check that tiers are complete and disjoint
def contiguous_tiers?
error = false
# Note, we use sort_by vs. order since we need to call this method against
# the in memory chargeback_tiers association and NOT hit the database.
tiers = chargeback_tiers
tiers.each_with_index do |tier, index|
if single_tier?(tier,tiers)
error = true if !tier.starts_with_zero? || !tier.ends_with_infinity?
elsif first_tier?(tier,tiers)
error = true if !tier.starts_with_zero? || tier.ends_with_infinity?
elsif last_tier?(tier,tiers)
error = true if !consecutive_tiers?(tier, tiers[index - 1]) || !tier.ends_with_infinity?
elsif middle_tier?(tier,tiers)
error = true if !consecutive_tiers?(tier, tiers[index - 1]) || tier.ends_with_infinity?
end
break if error
end
errors.add(:chargeback_tiers, _("must start at zero and not contain any gaps between start and prior end value.")) if error
!error
end
private
def gratis?
chargeback_tiers.all?(&:gratis?)
end
def metric_and_cost_by(consumption, options)
metric_value = chargeable_field.measure(consumption, options, sub_metric)
hourly_cost = hourly_cost(metric_value, consumption)
cost = chargeable_field.metering? ? hourly_cost : hourly_cost * consumption.consumed_hours_in_interval
[metric_value, cost]
end
def first_tier?(tier,tiers)
tier == tiers.first
end
def last_tier?(tier,tiers)
tier == tiers.last
end
def single_tier?(tier,tiers)
first_tier?(tier, tiers) && last_tier?(tier, tiers)
end
def middle_tier?(tier,tiers)
!first_tier?(tier, tiers) && !last_tier?(tier, tiers)
end
def consecutive_tiers?(tier, previous_tier)
tier.start == previous_tier.finish
end
def self.default_rate_details_for(rate_type)
rate_details = []
fixture_file = File.join(FIXTURE_DIR, "chargeback_rates.yml")
fixture = File.exist?(fixture_file) ? YAML.load_file(fixture_file) : []
fixture.each do |chargeback_rate|
next unless chargeback_rate[:rate_type] == rate_type && chargeback_rate[:description] == "Default"
chargeback_rate[:rates].each do |detail|
detail_new = ChargebackRateDetail.new(detail.slice(*ChargebackRateDetail::FORM_ATTRIBUTES))
detail_new.detail_currency = ChargebackRateDetailCurrency.find_by(:name => detail[:type_currency])
detail_new.metric = detail[:metric]
detail_new.chargeable_field = ChargeableField.find_by(:metric => detail.delete(:metric))
detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier|
detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES))
end
rate_details.push(detail_new)
if detail_new.chargeable_field.metric == 'derived_vm_allocated_disk_storage'
volume_types = CloudVolume.volume_types
volume_types.push('unclassified') if volume_types.present?
volume_types.each do |volume_type|
storage_detail_new = detail_new.dup
storage_detail_new.sub_metric = volume_type
detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier|
storage_detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES))
end
rate_details.push(storage_detail_new)
end
end
end
end
rate_details.sort_by { |rd| [rd.chargeable_field[:group], rd.chargeable_field[:description], rd[:sub_metric].to_s] }
end
end
Don't calculate cost if chargeback rate detail is gratis(zeros entered)
Calculation with zero rates is usefull and it is not affecting anything.
class ChargebackRateDetail < ApplicationRecord
belongs_to :chargeback_rate
belongs_to :chargeable_field
belongs_to :detail_measure, :class_name => "ChargebackRateDetailMeasure", :foreign_key => :chargeback_rate_detail_measure_id
belongs_to :detail_currency, :class_name => "ChargebackRateDetailCurrency", :foreign_key => :chargeback_rate_detail_currency_id
has_many :chargeback_tiers, :dependent => :destroy, :autosave => true
default_scope { joins(:chargeable_field).merge(ChargeableField.order(:group => :asc, :description => :asc)) }
validates :chargeback_rate, :chargeable_field, :presence => true
validate :contiguous_tiers?
delegate :rate_type, :to => :chargeback_rate, :allow_nil => true
delegate :metric_column_key, :metric_key, :cost_keys, :rate_key, :to => :chargeable_field
FORM_ATTRIBUTES = %i(description per_time per_unit metric group source metric chargeable_field_id sub_metric).freeze
PER_TIME_TYPES = {
"hourly" => _("Hourly"),
"daily" => _("Daily"),
"weekly" => _("Weekly"),
"monthly" => _("Monthly"),
'yearly' => _('Yearly')
}.freeze
# gigabytes -> GiB
#
def showback_unit(p_per_unit = nil)
return '' unless chargeable_field.detail_measure
{'bytes' => '',
'kilobytes' => 'KiB',
'megabytes' => 'MiB',
'gigabytes' => 'GiB',
'terabytes' => 'TiB',
'hertz' => '',
'kilohertz' => 'KHz',
'megahertz' => 'MHz',
'gigahertz' => 'GHz',
'teraherts' => 'THz',
'bps' => '',
'kbps' => 'Mbps',
'mbps' => 'Gbps',
'gbps' => 'Tbps'}[p_per_unit || per_unit]
end
def populate_showback_rate(plan, rate_detail, entity)
group = rate_detail.chargeable_field.showback_measure
field, _, calculation = rate_detail.chargeable_field.showback_dimension
unit = rate_detail.showback_unit
showback_rate = ManageIQ::Showback::Rate.find_or_create_by(:entity => entity,
:group => group,
:field => field,
:price_plan => plan,
:calculation => calculation,
:concept => rate_detail.id)
showback_rate.tiers.destroy_all
rate_detail.chargeback_tiers.each do |tier|
showback_rate.tiers.build(:tier_start_value => tier.start,
:tier_end_value => tier.finish,
:variable_rate_per_time => rate_detail.per_time,
:variable_rate_per_unit => unit,
:fixed_rate_per_time => rate_detail.per_time,
:fixed_rate => Money.new(tier.fixed_rate * Money.default_currency.subunit_to_unit),
:variable_rate => Money.new(tier.variable_rate * Money.default_currency.subunit_to_unit))
end
showback_rate.save
end
def sub_metrics
if metric == 'derived_vm_allocated_disk_storage'
volume_types = CloudVolume.volume_types
unless volume_types.empty?
res = {}
res[_('All')] = ''
volume_types.each { |type| res[type.capitalize] = type }
res[_('Other - Unclassified')] = 'unclassified'
res
end
end
end
def sub_metric_human
sub_metric.present? ? sub_metric.capitalize : 'All'
end
def rate_values(consumption, options)
fixed_rate, variable_rate = find_rate(chargeable_field.measure(consumption, options, sub_metric))
hourly_fixed_rate = hourly(fixed_rate, consumption)
hourly_variable_rate = hourly(variable_rate, consumption)
"#{hourly_fixed_rate}/#{hourly_variable_rate}"
end
def charge(relevant_fields, consumption, options)
result = {}
if (relevant_fields & [metric_key]).present? || ((cost_keys.to_set & report_cols).present? && !gratis?)
metric_value, cost = metric_and_cost_by(consumption, options)
if !consumption.chargeback_fields_present && chargeable_field.fixed?
cost = 0
end
result[rate_key(sub_metric)] = rate_values(consumption, options)
result[metric_key(sub_metric)] = metric_value
cost_keys(sub_metric).each { |field| result[field] = cost }
end
result
end
# Set the rates according to the tiers
def find_rate(value)
@found_rates ||= {}
@found_rates[value] ||=
begin
fixed_rate = 0.0
variable_rate = 0.0
tier_found = chargeback_tiers.detect { |tier| tier.includes?(value * rate_adjustment) }
unless tier_found.nil?
fixed_rate = tier_found.fixed_rate
variable_rate = tier_found.variable_rate
end
[fixed_rate, variable_rate]
end
end
PER_TIME_MAP = {
:hourly => "Hour",
:daily => "Day",
:weekly => "Week",
:monthly => "Month",
:yearly => "Year"
}
def hourly_cost(value, consumption)
return 0.0 unless self.enabled?
(fixed_rate, variable_rate) = find_rate(value)
hourly_fixed_rate = hourly(fixed_rate, consumption)
hourly_variable_rate = hourly(variable_rate, consumption)
hourly_fixed_rate + rate_adjustment * value * hourly_variable_rate
end
def hourly(rate, consumption)
hourly_rate = case per_time
when "hourly" then rate
when "daily" then rate / 24
when "weekly" then rate / 24 / 7
when "monthly" then rate / consumption.hours_in_month
when "yearly" then rate / 24 / 365
else raise "rate time unit of '#{per_time}' not supported"
end
hourly_rate
end
def rate_adjustment
@rate_adjustment ||= chargeable_field.adjustment_to(per_unit)
end
def affects_report_fields(report_cols)
([metric_key].to_set & report_cols).present? || ((cost_keys.to_set & report_cols).present? && !gratis?)
end
def friendly_rate
(fixed_rate, variable_rate) = find_rate(0.0)
value = read_attribute(:friendly_rate)
return value unless value.nil?
if chargeable_field.fixed?
# Example: 10.00 Monthly
"#{fixed_rate + variable_rate} #{per_time.to_s.capitalize}"
else
s = ""
chargeback_tiers.each do |tier|
# Example: Daily @ .02 per MHz from 0.0 to Infinity
s += "#{per_time.to_s.capitalize} @ #{tier.fixed_rate} + "\
"#{tier.variable_rate} per #{per_unit_display} from #{tier.start} to #{tier.finish}\n"
end
s.chomp
end
end
def per_unit_display
measure = chargeable_field.detail_measure
measure.nil? ? per_unit.to_s.capitalize : measure.measures.key(per_unit)
end
# New method created in order to show the rates in a easier to understand way
def show_rates
hr = ChargebackRateDetail::PER_TIME_MAP[per_time.to_sym]
rate_display = "#{detail_currency.code} / #{hr}"
rate_display_unit = "#{rate_display} / #{per_unit_display}"
per_unit.nil? ? rate_display : rate_display_unit
end
def save_tiers(tiers)
temp = self.class.new(:chargeback_tiers => tiers)
if temp.contiguous_tiers?
self.chargeback_tiers.replace(tiers)
else
temp.errors.each {|a, e| errors.add(a, e)}
end
end
# Check that tiers are complete and disjoint
def contiguous_tiers?
error = false
# Note, we use sort_by vs. order since we need to call this method against
# the in memory chargeback_tiers association and NOT hit the database.
tiers = chargeback_tiers
tiers.each_with_index do |tier, index|
if single_tier?(tier,tiers)
error = true if !tier.starts_with_zero? || !tier.ends_with_infinity?
elsif first_tier?(tier,tiers)
error = true if !tier.starts_with_zero? || tier.ends_with_infinity?
elsif last_tier?(tier,tiers)
error = true if !consecutive_tiers?(tier, tiers[index - 1]) || !tier.ends_with_infinity?
elsif middle_tier?(tier,tiers)
error = true if !consecutive_tiers?(tier, tiers[index - 1]) || tier.ends_with_infinity?
end
break if error
end
errors.add(:chargeback_tiers, _("must start at zero and not contain any gaps between start and prior end value.")) if error
!error
end
private
def gratis?
chargeback_tiers.all?(&:gratis?)
end
def metric_and_cost_by(consumption, options)
metric_value = chargeable_field.measure(consumption, options, sub_metric)
hourly_cost = hourly_cost(metric_value, consumption)
cost = chargeable_field.metering? ? hourly_cost : hourly_cost * consumption.consumed_hours_in_interval
[metric_value, cost]
end
def first_tier?(tier,tiers)
tier == tiers.first
end
def last_tier?(tier,tiers)
tier == tiers.last
end
def single_tier?(tier,tiers)
first_tier?(tier, tiers) && last_tier?(tier, tiers)
end
def middle_tier?(tier,tiers)
!first_tier?(tier, tiers) && !last_tier?(tier, tiers)
end
def consecutive_tiers?(tier, previous_tier)
tier.start == previous_tier.finish
end
def self.default_rate_details_for(rate_type)
rate_details = []
fixture_file = File.join(FIXTURE_DIR, "chargeback_rates.yml")
fixture = File.exist?(fixture_file) ? YAML.load_file(fixture_file) : []
fixture.each do |chargeback_rate|
next unless chargeback_rate[:rate_type] == rate_type && chargeback_rate[:description] == "Default"
chargeback_rate[:rates].each do |detail|
detail_new = ChargebackRateDetail.new(detail.slice(*ChargebackRateDetail::FORM_ATTRIBUTES))
detail_new.detail_currency = ChargebackRateDetailCurrency.find_by(:name => detail[:type_currency])
detail_new.metric = detail[:metric]
detail_new.chargeable_field = ChargeableField.find_by(:metric => detail.delete(:metric))
detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier|
detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES))
end
rate_details.push(detail_new)
if detail_new.chargeable_field.metric == 'derived_vm_allocated_disk_storage'
volume_types = CloudVolume.volume_types
volume_types.push('unclassified') if volume_types.present?
volume_types.each do |volume_type|
storage_detail_new = detail_new.dup
storage_detail_new.sub_metric = volume_type
detail[:tiers].sort_by { |tier| tier[:start] }.each do |tier|
storage_detail_new.chargeback_tiers << ChargebackTier.new(tier.slice(*ChargebackTier::FORM_ATTRIBUTES))
end
rate_details.push(storage_detail_new)
end
end
end
end
rate_details.sort_by { |rd| [rd.chargeable_field[:group], rd.chargeable_field[:description], rd[:sub_metric].to_s] }
end
end
|
#
# Condition update event. This is event type is required for
# all implementations
#
class ConditionUpdateEvent < AssetEvent
# Callbacks
after_initialize :set_defaults
# Associations
# Condition of the asset
belongs_to :condition_type
validates :condition_type_id, :presence => true
validates :assessed_rating, :numericality => {:greater_than_or_equal_to => 0, :less_than_or_equal_to => 5}, :allow_nil => :true
before_validation do
self.assessed_rating ||= ConditionType.find(condition_type_id).rating unless condition_type_id.blank?
end
#------------------------------------------------------------------------------
# Scopes
#------------------------------------------------------------------------------
# set the default scope
default_scope { where(:asset_event_type_id => AssetEventType.find_by_class_name(self.name).id).order(:event_date) }
# List of hash parameters allowed by the controller
FORM_PARAMS = [
:condition_type_id,
:assessed_rating,
]
#------------------------------------------------------------------------------
#
# Class Methods
#
#------------------------------------------------------------------------------
def self.allowable_params
FORM_PARAMS
end
#returns the asset event type for this type of event
def self.asset_event_type
AssetEventType.find_by_class_name(self.name)
end
#------------------------------------------------------------------------------
#
# Instance Methods
#
#------------------------------------------------------------------------------
# Override numeric setters to remove any extraneous formats from the number strings eg $, etc.
def assessed_rating=(num)
self[:assessed_rating] = sanitize_to_float(num)
end
# This must be overriden otherwise a stack error will occur
def get_update
condition_type.name unless condition_type.nil?
end
protected
# Set resonable defaults for a new condition update event
def set_defaults
super
self.asset_event_type ||= AssetEventType.find_by_class_name(self.name)
end
end
Fix issue with condition update event
#
# Condition update event. This is event type is required for
# all implementations
#
class ConditionUpdateEvent < AssetEvent
# Callbacks
after_initialize :set_defaults
# Associations
# Condition of the asset
belongs_to :condition_type
validates :condition_type_id, :presence => true
validates :assessed_rating, :numericality => {:greater_than_or_equal_to => 0, :less_than_or_equal_to => 5}, :allow_nil => :true
before_validation do
self.condition_type ||= ConditionType.from_rating(assessed_rating) unless assessed_rating.blank?
end
#------------------------------------------------------------------------------
# Scopes
#------------------------------------------------------------------------------
# set the default scope
default_scope { where(:asset_event_type_id => AssetEventType.find_by_class_name(self.name).id).order(:event_date) }
# List of hash parameters allowed by the controller
FORM_PARAMS = [
:condition_type_id,
:assessed_rating,
]
#------------------------------------------------------------------------------
#
# Class Methods
#
#------------------------------------------------------------------------------
def self.allowable_params
FORM_PARAMS
end
#returns the asset event type for this type of event
def self.asset_event_type
AssetEventType.find_by_class_name(self.name)
end
#------------------------------------------------------------------------------
#
# Instance Methods
#
#------------------------------------------------------------------------------
# Override numeric setters to remove any extraneous formats from the number strings eg $, etc.
def assessed_rating=(num)
self[:assessed_rating] = sanitize_to_float(num)
end
# This must be overriden otherwise a stack error will occur
def get_update
condition_type.name unless condition_type.nil?
end
protected
# Set resonable defaults for a new condition update event
def set_defaults
super
self.asset_event_type ||= AssetEventType.find_by_class_name(self.name)
end
end
|
class Portal::SchoolSelector
USA = "United States"
NO_STATE = "XX" # db field width is 2 chars. :
CHOICES_FILE = File.join(Rails.root, "resources", "country_list.txt")
@@country_choices = nil
attr_accessor :country # string eg "United States"
attr_accessor :state # string eg "MA"
attr_accessor :district # int AR eg 212
attr_accessor :district_name # string
attr_accessor :school # int AR eg 212
attr_accessor :school_name # string
attr_accessor :previous_attr # hash old values
def self.country_choices
return @@country_choices if (@@country_choices && (! @@country_choices.empty?))
@@country_choices = []
File.open(CHOICES_FILE, "r:UTF-8") do |infile|
while(line = infile.gets)
@@country_choices.push line.strip.titlecase
end
end
return @@country_choices
end
def initialize(params)
params = params['school_selector'] if params['school_selector']
params = params.reject { |k,v| v.nil? || v.empty? || v.blank? }
params.each_pair do |attribute, value|
self.set_attr(attribute,value)
end
self.load_previous_attributes
self.validate
self.record_previous_attributes
end
def load_previous_attributes
# convert @previous_attr to hash from string
return if @previous_attr.nil? or @previous_attr.empty?
array = Base64.decode64(@previous_attr).split("|")
@previous_attr = {
:country => array[0],
:state => array[1],
:district => array[2] || nil, # 0 == nil for our purposes
:school => array[3] || nil # 0 == nil for our purposes
}
end
def record_previous_attributes
attrs = [@country, @state]
district_id = @district.id if @district
district_id ||= 0
school_id = @school.id if @school
school_id ||= 0
attrs << district_id.to_s
attrs << school_id.to_s
@previous_attr = Base64.encode64(attrs.join("|"))
end
def attr_changed?(symbol)
return false unless @previous_attr
return true if (self.send(symbol) != @previous_attr[symbol])
return false
end
def get_attr(attr)
get_method = attr.to_sym
if self.respond_to? get_method
return self.send get_method
end
return nil
end
def set_attr(attr,val)
assigment_method = "#{attr}=".to_sym
if self.respond_to? assigment_method
self.send(assigment_method, val)
end
end
def clear_attr(attr)
self.set_attr(attr,nil)
end
def clear_choices(attr)
@choices[attr] = []
end
def validate
@needs = nil
@choices = {}
previous_change = false
attr_order.each do |attr|
changed = attr_changed?(attr)
choice_method = "#{attr.to_s}_choices".to_sym
self.set_attr(attr,nil) if previous_change
@needs ||= attr unless validate_attr(attr)
@choices[attr] = (self.send(choice_method) || [])
previous_change ||= changed
end
end
def invalid?
return !valid
end
def valid?
return @needs == nil
end
def validate_attr(symbol)
validation_method = "validate_#{symbol.to_s}"
if self.respond_to? validation_method
return self.send validation_method
else
return !self.send(symbol).nil?
end
end
def validate_country
@country ||= USA
return true
end
def validate_state
if @country != USA
@state = default_state_for(@country)
return true
end
return state_choices.include? @state
end
def validate_district
return true if add_district
if @district && (@district.kind_of? String)
@district = Portal::District.find(@district)
end
if @country != USA
@district = default_district_for(@country)
return true
end
return false unless @district.kind_of? Portal::District
# ensure that the district is in our list of districts.
return district_choices.map {|d| d[1] }.include? @district.id
end
def validate_school
return true if add_school
if @school && (@school.kind_of? String)
@school = Portal::School.find(@school)
end
return false unless @school.kind_of? Portal::School
return school_choices.map { |s| s[1] }.include? @school.id
end
# def default_district
# return Portal::District.default
# end
def default_state_for(country)
return NO_STATE
end
def default_district_for(state_or_country)
Portal::District.find_by_similar_name_or_new("default district for #{state_or_country}")
end
def add_district
return add_portal_resource(:district)
end
def add_school
return add_portal_resource(:school)
end
# Attempt to add a new portal resource (school or district)
# return true if successfully created / found
def add_portal_resource(symbol)
attribute_name = get_attr("#{symbol}_name")
attribute = get_attr(symbol)
portal_clazz = "Portal::#{symbol.to_s.capitalize}".constantize
if self.allow_teacher_creation(symbol)
if attribute_name && (!attribute_name.blank?)
find_attributes = {:name => attribute_name}
if @district && (@district.kind_of? Portal::District)
find_attributes[:district_id] = @district.id
end
attribute = portal_clazz.find_by_similar_or_new(find_attributes,'registration')
if attribute.new_record?
# TODO: We should probably shouldn't create new
# records if there isn't a current user ...
attribute.state = @state
attribute.save
end
set_attr(symbol,attribute)
return !attribute.nil?
end
end
return false
return false if @school.nil?
end
def needs
return @needs
end
def choices(symbol=nil)
return @choices[symbol] if symbol
return @choices
end
def country_choices
return Portal::SchoolSelector.country_choices
end
def state_choices
if @country == USA
return usa_states
end
return []
end
def usa_states
return %w[ AL AK AS AZ AR CA CO CT DE DC FM FL GA GU HI ID IL IN IA KS KY LA ME MH MD MA MI MN MS MO MT NE NV NH NJ NM NY NC ND MP OH OK OR PW PA PR RI SC SD TN TX UT VT VI VA WA WV WI WY ]
# states = Portal::District.all.map { |d| d.state }
# states << "none"
# states.compact.uniq.sort
end
def district_choices
if @state
districts = Portal::District.find(:all, :conditions => {:state => @state })
return districts.sort{ |a,b| a.name <=> b.name}.map { |d| [d.name, d.id] }
end
# return [default_district].map { |d| [d.name, d.id] }
return []
end
def school_choices
if @district && (@district.kind_of? Portal::District)
schools = Portal::School.find(:all, :conditions => {:district_id => @district.id })
return schools.sort{ |a,b| a.name <=> b.name}.map { |s| [s.name, s.id] }
end
end
def select_args(field)
value = self.send field
if value && (value.respond_to? :id)
value = value.id
end
return [:school_selector, field, self.choices[field] || [], {:selected => value, :include_blank => true}]
end
def attr_order
[:country,:state,:district,:school]
end
def allow_teacher_creation(field=:school)
acceptable_fields = [:district,:school]
Admin::Project.default_project.allow_adhoc_schools && acceptable_fields.include?(field)
end
end
don't show fields which can't be edited
class Portal::SchoolSelector
USA = "United States"
NO_STATE = "XX" # db field width is 2 chars. :
CHOICES_FILE = File.join(Rails.root, "resources", "country_list.txt")
@@country_choices = nil
attr_accessor :country # string eg "United States"
attr_accessor :state # string eg "MA"
attr_accessor :district # int AR eg 212
attr_accessor :district_name # string
attr_accessor :school # int AR eg 212
attr_accessor :school_name # string
attr_accessor :previous_attr # hash old values
def self.country_choices
return @@country_choices if (@@country_choices && (! @@country_choices.empty?))
@@country_choices = []
File.open(CHOICES_FILE, "r:UTF-8") do |infile|
while(line = infile.gets)
@@country_choices.push line.strip.titlecase
end
end
return @@country_choices
end
def initialize(params)
params = params['school_selector'] if params['school_selector']
params = params.reject { |k,v| v.nil? || v.empty? || v.blank? }
params.each_pair do |attribute, value|
self.set_attr(attribute,value)
end
self.load_previous_attributes
self.validate
self.record_previous_attributes
end
def load_previous_attributes
# convert @previous_attr to hash from string
return if @previous_attr.nil? or @previous_attr.empty?
array = Base64.decode64(@previous_attr).split("|")
@previous_attr = {
:country => array[0],
:state => array[1],
:district => array[2] || nil, # 0 == nil for our purposes
:school => array[3] || nil # 0 == nil for our purposes
}
end
def record_previous_attributes
attrs = [@country, @state]
district_id = @district.id if @district
district_id ||= 0
school_id = @school.id if @school
school_id ||= 0
attrs << district_id.to_s
attrs << school_id.to_s
@previous_attr = Base64.encode64(attrs.join("|"))
end
def attr_changed?(symbol)
return false unless @previous_attr
return true if (self.send(symbol) != @previous_attr[symbol])
return false
end
def get_attr(attr)
get_method = attr.to_sym
if self.respond_to? get_method
return self.send get_method
end
return nil
end
def set_attr(attr,val)
assigment_method = "#{attr}=".to_sym
if self.respond_to? assigment_method
self.send(assigment_method, val)
end
end
def clear_attr(attr)
self.set_attr(attr,nil)
end
def clear_choices(attr)
@choices[attr] = []
end
def validate
@needs = nil
@choices = {}
previous_change = false
attr_order.each do |attr|
changed = attr_changed?(attr)
choice_method = "#{attr.to_s}_choices".to_sym
self.set_attr(attr,nil) if previous_change
@needs ||= attr unless validate_attr(attr)
@choices[attr] = (self.send(choice_method) || [])
previous_change ||= changed
end
end
def invalid?
return !valid
end
def valid?
return @needs == nil
end
def validate_attr(symbol)
validation_method = "validate_#{symbol.to_s}"
if self.respond_to? validation_method
return self.send validation_method
else
return !self.send(symbol).nil?
end
end
def validate_country
@country ||= USA
return true
end
def validate_state
if @country != USA
@state = default_state_for(@country)
return true
end
return state_choices.include? @state
end
def validate_district
return true if add_district
if @district && (@district.kind_of? String)
@district = Portal::District.find(@district)
end
if @country != USA
@district = default_district_for(@country)
return true
end
return false unless @district.kind_of? Portal::District
# ensure that the district is in our list of districts.
return district_choices.map {|d| d[1] }.include? @district.id
end
def validate_school
return true if add_school
if @school && (@school.kind_of? String)
@school = Portal::School.find(@school)
end
return false unless @school.kind_of? Portal::School
return school_choices.map { |s| s[1] }.include? @school.id
end
# def default_district
# return Portal::District.default
# end
def default_state_for(country)
return NO_STATE
end
def default_district_for(state_or_country)
Portal::District.find_by_similar_name_or_new("default district for #{state_or_country}")
end
def add_district
return add_portal_resource(:district)
end
def add_school
return add_portal_resource(:school)
end
# Attempt to add a new portal resource (school or district)
# return true if successfully created / found
def add_portal_resource(symbol)
attribute_name = get_attr("#{symbol}_name")
attribute = get_attr(symbol)
portal_clazz = "Portal::#{symbol.to_s.capitalize}".constantize
if self.allow_teacher_creation(symbol)
if attribute_name && (!attribute_name.blank?)
find_attributes = {:name => attribute_name}
if @district && (@district.kind_of? Portal::District)
find_attributes[:district_id] = @district.id
end
attribute = portal_clazz.find_by_similar_or_new(find_attributes,'registration')
if attribute.new_record?
# TODO: We should probably shouldn't create new
# records if there isn't a current user ...
attribute.state = @state
attribute.save
end
set_attr(symbol,attribute)
return !attribute.nil?
end
end
return false
return false if @school.nil?
end
def needs
return @needs
end
def choices(symbol=nil)
return @choices[symbol] if symbol
return @choices
end
def country_choices
return Portal::SchoolSelector.country_choices
end
def state_choices
if @country == USA
return usa_states
end
return []
end
def usa_states
return %w[ AL AK AS AZ AR CA CO CT DE DC FM FL GA GU HI ID IL IN IA KS KY LA ME MH MD MA MI MN MS MO MT NE NV NH NJ NM NY NC ND MP OH OK OR PW PA PR RI SC SD TN TX UT VT VI VA WA WV WI WY ]
# states = Portal::District.all.map { |d| d.state }
# states << "none"
# states.compact.uniq.sort
end
def district_choices
if @state
districts = Portal::District.find(:all, :conditions => {:state => @state })
return districts.sort{ |a,b| a.name <=> b.name}.map { |d| [d.name, d.id] }
end
# return [default_district].map { |d| [d.name, d.id] }
return []
end
def school_choices
if @district && (@district.kind_of? Portal::District)
schools = Portal::School.find(:all, :conditions => {:district_id => @district.id })
return schools.sort{ |a,b| a.name <=> b.name}.map { |s| [s.name, s.id] }
end
end
def select_args(field)
value = self.send field
if value && (value.respond_to? :id)
value = value.id
end
return [:school_selector, field, self.choices[field] || [], {:selected => value, :include_blank => true}]
end
def attr_order
[:country,:state,:district,:school]
end
def allow_teacher_creation(field=:school)
acceptable_fields = []
if self.country == USA
acceptable_fields << [:district] if self.state
else
acceptable_fields << [:district] if self.country
end
acceptable_fields << :school if self.district
Admin::Project.default_project.allow_adhoc_schools && acceptable_fields.include?(field)
end
end
|
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2016 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: product_nature_variants
#
# active :boolean default(FALSE), not null
# category_id :integer not null
# created_at :datetime not null
# creator_id :integer
# custom_fields :jsonb
# derivative_of :string
# gtin :string
# id :integer not null, primary key
# lock_version :integer default(0), not null
# name :string
# nature_id :integer not null
# number :string not null
# picture_content_type :string
# picture_file_name :string
# picture_file_size :integer
# picture_updated_at :datetime
# reference_name :string
# stock_account_id :integer
# stock_movement_account_id :integer
# unit_name :string not null
# updated_at :datetime not null
# updater_id :integer
# variety :string not null
# work_number :string
#
class ProductNatureVariant < Ekylibre::Record::Base
include Attachable
include Customizable
attr_readonly :number
refers_to :variety
refers_to :derivative_of, class_name: 'Variety'
belongs_to :nature, class_name: 'ProductNature', inverse_of: :variants
belongs_to :category, class_name: 'ProductNatureCategory', inverse_of: :variants
has_many :catalog_items, foreign_key: :variant_id, dependent: :destroy
has_many :root_components, -> { where(parent: nil) }, class_name: 'ProductNatureVariantComponent', dependent: :destroy, inverse_of: :product_nature_variant, foreign_key: :product_nature_variant_id
has_many :components, class_name: 'ProductNatureVariantComponent', dependent: :destroy, inverse_of: :product_nature_variant, foreign_key: :product_nature_variant_id
has_many :part_product_nature_variant_id, class_name: 'ProductNatureVariantComponent'
belongs_to :stock_movement_account, class_name: 'Account', dependent: :destroy
belongs_to :stock_account, class_name: 'Account', dependent: :destroy
has_many :parcel_items, foreign_key: :variant_id, dependent: :restrict_with_exception
has_many :products, foreign_key: :variant_id, dependent: :restrict_with_exception
has_many :purchase_items, foreign_key: :variant_id, inverse_of: :variant, dependent: :restrict_with_exception
has_many :sale_items, foreign_key: :variant_id, inverse_of: :variant, dependent: :restrict_with_exception
has_many :readings, class_name: 'ProductNatureVariantReading', foreign_key: :variant_id, inverse_of: :variant
has_picture
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :active, inclusion: { in: [true, false] }
validates :gtin, :name, :picture_content_type, :picture_file_name, :reference_name, :work_number, length: { maximum: 500 }, allow_blank: true
validates :number, presence: true, uniqueness: true, length: { maximum: 500 }
validates :picture_file_size, numericality: { only_integer: true, greater_than: -2_147_483_649, less_than: 2_147_483_648 }, allow_blank: true
validates :picture_updated_at, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.now + 50.years } }, allow_blank: true
validates :unit_name, presence: true, length: { maximum: 500 }
validates :category, :nature, :variety, presence: true
# ]VALIDATORS]
validates :number, length: { allow_nil: true, maximum: 60 }
validates :number, uniqueness: true
validates :derivative_of, :variety, length: { allow_nil: true, maximum: 120 }
validates :gtin, length: { allow_nil: true, maximum: 14 }
validates_attachment_content_type :picture, content_type: /image/
alias_attribute :commercial_name, :name
delegate :able_to?, :identifiable?, :able_to_each?, :has_indicator?, :matching_model, :indicators, :population_frozen?, :population_modulo, :frozen_indicators, :frozen_indicators_list, :variable_indicators, :variable_indicators_list, :linkage_points, :of_expression, :population_counting_unitary?, :whole_indicators_list, :whole_indicators, :individual_indicators_list, :individual_indicators, to: :nature
delegate :variety, :derivative_of, :name, to: :nature, prefix: true
delegate :depreciable?, :depreciation_rate, :deliverable?, :purchasable?, :saleable?, :storable?, :subscribing?, :fixed_asset_depreciation_method, :fixed_asset_depreciation_percentage, :fixed_asset_account, :fixed_asset_allocation_account, :fixed_asset_expenses_account, :product_account, :charge_account, to: :category
accepts_nested_attributes_for :products, reject_if: :all_blank, allow_destroy: true
accepts_nested_attributes_for :components, reject_if: :all_blank, allow_destroy: true
accepts_nested_attributes_for :readings, reject_if: proc { |params| params['measure_value_value'].blank? && params['integer_value'].blank? && params['boolean_value'].blank? && params['decimal_value'].blank? }, allow_destroy: true
accepts_nested_attributes_for :catalog_items, reject_if: :all_blank, allow_destroy: true
validates_associated :components
scope :availables, -> { where(nature_id: ProductNature.availables).order(:name) }
scope :saleables, -> { joins(:nature).merge(ProductNature.saleables) }
scope :purchaseables, -> { joins(:nature).merge(ProductNature.purchaseables) }
scope :deliverables, -> { joins(:nature).merge(ProductNature.stockables) }
scope :stockables_or_depreciables, -> { joins(:nature).merge(ProductNature.stockables_or_depreciables).order(:name) }
scope :identifiables, -> { where(nature: ProductNature.identifiables) }
scope :derivative_of, proc { |*varieties| of_derivative_of(*varieties) }
scope :can, proc { |*abilities|
of_expression(abilities.map { |a| "can #{a}" }.join(' or '))
}
scope :can_each, proc { |*abilities|
of_expression(abilities.map { |a| "can #{a}" }.join(' and '))
}
scope :of_working_set, lambda { |working_set|
if item = Nomen::WorkingSet.find(working_set)
of_expression(item.expression)
else
raise StandardError, "#{working_set.inspect} is not in Nomen::WorkingSet nomenclature"
end
}
scope :of_expression, lambda { |expression|
joins(:nature).where(WorkingSet.to_sql(expression, default: :product_nature_variants, abilities: :product_natures, indicators: :product_natures))
}
scope :of_natures, ->(*natures) { where(nature_id: natures) }
scope :of_categories, ->(*categories) { where(category_id: categories) }
scope :of_category, ->(category) { where(category: category) }
protect(on: :destroy) do
products.any? || sale_items.any? || purchase_items.any? ||
parcel_items.any? || !destroyable_accounts?
end
before_validation on: :create do
self.number = if ProductNatureVariant.any?
ProductNatureVariant.order(number: :desc).first.number.succ
else
'00000001'
end
end
before_validation do # on: :create
if nature
self.category_id = nature.category_id
self.nature_name ||= nature.name
# self.variable_indicators ||= self.nature.indicators
self.name ||= self.nature_name
self.variety ||= nature.variety
if derivative_of.blank? && nature.derivative_of
self.derivative_of ||= nature.derivative_of
end
if storable?
self.stock_account ||= create_unique_account(:stock)
self.stock_movement_account ||= create_unique_account(:stock_movement)
end
end
end
validate do
if nature
unless Nomen::Variety.find(nature_variety) >= self.variety
logger.debug "#{nature_variety}#{Nomen::Variety.all(nature_variety)} not include #{self.variety.inspect}"
errors.add(:variety, :invalid)
end
if Nomen::Variety.find(nature_derivative_of)
if self.derivative_of
unless Nomen::Variety.find(nature_derivative_of) >= self.derivative_of
errors.add(:derivative_of, :invalid)
end
else
errors.add(:derivative_of, :blank)
end
end
# if storable?
# unless self.stock_account
# errors.add(:stock_account, :not_defined)
# end
# unless self.stock_movement_account
# errors.add(:stock_movement_account, :not_defined)
# end
# end
end
if variety && products.any?
if products.detect { |p| Nomen::Variety.find(p.variety) > variety }
errors.add(:variety, :invalid)
end
end
if derivative_of && products.any?
if products.detect { |p| p.derivative_of? && Nomen::Variety.find(p.derivative_of) > derivative_of }
errors.add(:derivative_of, :invalid)
end
end
end
def destroyable_accounts?
stock_movement_account && stock_account && stock_movement_account.destroyable? && stock_account.destroyable?
end
# create unique account for stock management in accountancy
def create_unique_account(mode = :stock)
account_key = mode.to_s + '_account'
unless storable?
raise ArgumentError, "Don't known how to create account for #{self.name.inspect}. You have to check category first"
end
category_account = category.send(account_key)
unless category_account
raise ArgumentError, "Account is not configure for #{self.name.inspect}. You have to check category first"
end
options = {}
options[:number] = category_account.number + number[-6, 6].rjust(6)
options[:name] = category_account.name + ' [' + self.name + ']'
options[:label] = options[:number] + ' - ' + options[:name]
options[:usages] = category_account.usages
Account.create!(options)
end
# add animals to new variant
def add_products(products, options = {})
Intervention.write(:product_evolution, options) do |i|
i.cast :variant, self, as: 'product_evolution-variant'
products.each do |p|
product = (p.is_a?(Product) ? p : Product.find(p))
member = i.cast :product, product, as: 'product_evolution-target'
i.variant_cast :variant, member
end
end
end
# Measure a product for a given indicator
def read!(indicator, value)
unless indicator.is_a?(Nomen::Item)
indicator = Nomen::Indicator.find(indicator)
unless indicator
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
end
reading = readings.find_or_initialize_by(indicator_name: indicator.name)
reading.value = value
reading.save!
reading
end
# Return the reading
def reading(indicator)
unless indicator.is_a?(Nomen::Item) || indicator = Nomen::Indicator[indicator]
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
readings.find_by(indicator_name: indicator.name)
end
# Returns the direct value of an indicator of variant
def get(indicator, _options = {})
unless indicator.is_a?(Nomen::Item) || indicator = Nomen::Indicator[indicator]
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
if reading = reading(indicator.name)
return reading.value
elsif indicator.datatype == :measure
return 0.0.in(indicator.unit)
elsif indicator.datatype == :decimal
return 0.0
end
nil
end
# check if a variant has an indicator which is frozen or not
def has_frozen_indicator?(indicator)
if indicator.is_a?(Nomen::Item)
return frozen_indicators.include?(indicator)
else
return frozen_indicators_list.include?(indicator)
end
end
# Returns item from default catalog for given usage
def default_catalog_item(usage)
catalog = Catalog.by_default!(usage)
catalog.items.find_by(variant: self)
end
# Returns a list of couple indicator/unit usable for the given variant
# The result is only based on measure indicators
def quantifiers
list = []
indicators.each do |indicator|
next unless indicator.gathering == :proportional_to_population
if indicator.datatype == :measure
Measure.siblings(indicator.unit).each do |unit_name|
list << "#{indicator.name}/#{unit_name}"
end
elsif indicator.datatype == :integer || indicator.datatype == :decimal
list << indicator.name.to_s
end
end
variety = Nomen::Variety.find(self.variety)
# Specials indicators
if variety <= :product_group
list << 'members_count' unless list.include?('members_count/unity')
if variety <= :animal_group
list << 'members_livestock_unit' unless list.include?('members_livestock_unit/unity')
end
list << 'members_population' unless list.include?('members_population/unity')
end
list
end
# Returns a list of quantifier
def unified_quantifiers(options = {})
list = quantifiers.map do |quantifier|
pair = quantifier.split('/')
indicator = Nomen::Indicator.find(pair.first)
unit = (pair.second.blank? ? nil : Nomen::Unit.find(pair.second))
hash = { indicator: { name: indicator.name, human_name: indicator.human_name } }
hash[:unit] = if unit
{ name: unit.name, symbol: unit.symbol, human_name: unit.human_name }
elsif indicator.name =~ /^members\_/
unit = Nomen::Unit.find(:unity)
{ name: '', symbol: unit.symbol, human_name: unit.human_name }
else
{ name: '', symbol: unit_name, human_name: unit_name }
end
hash
end
# Add population
if options[:population]
# indicator = Nomen::Indicator[:population]
list << { indicator: { name: :population, human_name: Product.human_attribute_name(:population) }, unit: { name: '', symbol: unit_name, human_name: unit_name } }
end
# Add working duration (intervention durations)
if options[:working_duration]
Nomen::Unit.where(dimension: :time).find_each do |unit|
list << { indicator: { name: :working_duration, human_name: :working_duration.tl }, unit: { name: unit.name, symbol: unit.symbol, human_name: unit.human_name } }
end
end
list
end
# Get indicator value
# if option :at specify at which moment
# if option :reading is true, it returns the ProductNatureVariantReading record
# if option :interpolate is true, it returns the interpolated value
# :interpolate and :reading options are incompatible
def method_missing(method_name, *args)
return super unless Nomen::Indicator.items[method_name]
get(method_name)
end
def generate(*args)
options = args.extract_options!
product_name = args.shift || options[:name]
born_at = args.shift || options[:born_at]
default_storage = args.shift || options[:default_storage]
product_model = nature.matching_model
product_model.create!(variant: self, name: product_name + ' ' + born_at.l, initial_owner: Entity.of_company, initial_born_at: born_at, default_storage: default_storage)
end
# Shortcut for creating a new product of the variant
def create_product!(attributes = {})
attributes[:initial_owner] ||= Entity.of_company
attributes[:initial_born_at] ||= Time.zone.now
attributes[:born_at] ||= attributes[:initial_born_at]
attributes[:name] ||= "#{name} (#{attributes[:initial_born_at].to_date.l})"
matching_model.create!(attributes.merge(variant: self))
end
def take(quantity)
products.mine.each_with_object({}) do |product, result|
reminder = quantity - result.values.sum
result[product] = [product.population, reminder].min if reminder > 0
result
end
end
def take!(quantity)
raise 'errors.not_enough'.t if take(quantity).values.sum < quantity
end
# Returns last purchase item for the variant
# and a given supplier if any, or nil if there's
# no purchase item matching criterias
def last_purchase_item_for(supplier = nil)
return purchase_items.last unless supplier.present?
purchase_items
.joins(:purchase)
.where('purchases.supplier_id = ?', Entity.find(supplier).id)
.last
end
# Return current stock of all products link to the variant
def current_stock
products.map(&:population).compact.sum.to_f
end
# Return current quantity of all products link to the variant currently ordered or invoiced but not delivered
def current_outgoing_stock_ordered_not_delivered
sales = Sale.where(state: %w(order invoice))
sale_items = SaleItem.where(variant_id: id, sale_id: sales.pluck(:id)).includes(:parcel_items).where(parcel_items: { sale_item_id: nil })
sale_items.map(&:quantity).compact.sum.to_f
end
def picture_path(style = :original)
picture.path(style)
end
class << self
# Returns some nomenclature items are available to be imported, e.g. not
# already imported
def any_reference_available?
Nomen::ProductNatureVariant.without(ProductNatureVariant.pluck(:reference_name).uniq).any?
end
# Find or import variant from nomenclature with given attributes
# variety and derivative_of only are accepted for now
def find_or_import!(variety, options = {})
variants = of_variety(variety)
if derivative_of = options[:derivative_of]
variants = variants.derivative_of(derivative_of)
end
if variants.empty?
# Filter and imports
filtereds = flattened_nomenclature.select do |item|
item.variety >= variety &&
((derivative_of && item.derivative_of && item.derivative_of >= derivative_of) || (derivative_of.blank? && item.derivative_of.blank?))
end
filtereds.each do |item|
import_from_nomenclature(item.name)
end
end
variants.reload
end
ItemStruct = Struct.new(:name, :variety, :derivative_of, :abilities_list, :indicators, :frozen_indicators, :variable_indicators)
# Returns core attributes of nomenclature merge with nature if necessary
# name, variety, derivative_od, abilities
def flattened_nomenclature
@flattened_nomenclature ||= Nomen::ProductNatureVariant.list.collect do |item|
nature = Nomen::ProductNature[item.nature]
f = (nature.frozen_indicators || []).map(&:to_sym)
v = (nature.variable_indicators || []).map(&:to_sym)
ItemStruct.new(
item.name,
Nomen::Variety.find(item.variety || nature.variety),
Nomen::Variety.find(item.derivative_of || nature.derivative_of),
WorkingSet::AbilityArray.load(nature.abilities),
f + v, f, v
)
end
end
# Lists ProductNatureVariant::Item which match given expression
# Fully compatible with WSQL
def items_of_expression(expression)
flattened_nomenclature.select do |item|
WorkingSet.check_record(expression, item)
end
end
# Load a product nature variant from product nature variant nomenclature
def import_from_nomenclature(reference_name, force = false)
unless item = Nomen::ProductNatureVariant[reference_name]
raise ArgumentError, "The product_nature_variant #{reference_name.inspect} is not known"
end
unless nature_item = Nomen::ProductNature[item.nature]
raise ArgumentError, "The nature of the product_nature_variant #{item.nature.inspect} is not known"
end
unless !force && variant = ProductNatureVariant.find_by(reference_name: reference_name.to_s)
attributes = {
name: item.human_name,
active: true,
nature: ProductNature.import_from_nomenclature(item.nature),
reference_name: item.name,
unit_name: I18n.translate("nomenclatures.product_nature_variants.choices.unit_name.#{item.unit_name}"),
# :frozen_indicators => item.frozen_indicators_values.to_s,
variety: item.variety || nil,
derivative_of: item.derivative_of || nil
}
variant = new(attributes)
# puts variant.name.inspect.green
unless variant.save
raise "Cannot import variant #{reference_name.inspect}: #{variant.errors.full_messages.join(', ')}"
end
end
unless item.frozen_indicators_values.to_s.blank?
# create frozen indicator for each pair indicator, value ":population => 1unity"
item.frozen_indicators_values.to_s.strip.split(/[[:space:]]*\,[[:space:]]*/)
.collect { |i| i.split(/[[:space:]]*\:[[:space:]]*/) }.each do |i|
indicator_name = i.first.strip.downcase.to_sym
next unless variant.has_indicator? indicator_name
variant.read!(indicator_name, i.second)
end
end
variant
end
def load_defaults(_options = {})
Nomen::ProductNatureVariant.all.flatten.collect do |p|
import_from_nomenclature(p.to_s)
end
end
end
end
Add errors to record isntead of just crashign everything.
# = Informations
#
# == License
#
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2008-2009 Brice Texier, Thibaud Merigon
# Copyright (C) 2010-2012 Brice Texier
# Copyright (C) 2012-2016 Brice Texier, David Joulin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# == Table: product_nature_variants
#
# active :boolean default(FALSE), not null
# category_id :integer not null
# created_at :datetime not null
# creator_id :integer
# custom_fields :jsonb
# derivative_of :string
# gtin :string
# id :integer not null, primary key
# lock_version :integer default(0), not null
# name :string
# nature_id :integer not null
# number :string not null
# picture_content_type :string
# picture_file_name :string
# picture_file_size :integer
# picture_updated_at :datetime
# reference_name :string
# stock_account_id :integer
# stock_movement_account_id :integer
# unit_name :string not null
# updated_at :datetime not null
# updater_id :integer
# variety :string not null
# work_number :string
#
class ProductNatureVariant < Ekylibre::Record::Base
include Attachable
include Customizable
attr_readonly :number
refers_to :variety
refers_to :derivative_of, class_name: 'Variety'
belongs_to :nature, class_name: 'ProductNature', inverse_of: :variants
belongs_to :category, class_name: 'ProductNatureCategory', inverse_of: :variants
has_many :catalog_items, foreign_key: :variant_id, dependent: :destroy
has_many :root_components, -> { where(parent: nil) }, class_name: 'ProductNatureVariantComponent', dependent: :destroy, inverse_of: :product_nature_variant, foreign_key: :product_nature_variant_id
has_many :components, class_name: 'ProductNatureVariantComponent', dependent: :destroy, inverse_of: :product_nature_variant, foreign_key: :product_nature_variant_id
has_many :part_product_nature_variant_id, class_name: 'ProductNatureVariantComponent'
belongs_to :stock_movement_account, class_name: 'Account', dependent: :destroy
belongs_to :stock_account, class_name: 'Account', dependent: :destroy
has_many :parcel_items, foreign_key: :variant_id, dependent: :restrict_with_exception
has_many :products, foreign_key: :variant_id, dependent: :restrict_with_exception
has_many :purchase_items, foreign_key: :variant_id, inverse_of: :variant, dependent: :restrict_with_exception
has_many :sale_items, foreign_key: :variant_id, inverse_of: :variant, dependent: :restrict_with_exception
has_many :readings, class_name: 'ProductNatureVariantReading', foreign_key: :variant_id, inverse_of: :variant
has_picture
# [VALIDATORS[ Do not edit these lines directly. Use `rake clean:validations`.
validates :active, inclusion: { in: [true, false] }
validates :gtin, :name, :picture_content_type, :picture_file_name, :reference_name, :work_number, length: { maximum: 500 }, allow_blank: true
validates :number, presence: true, uniqueness: true, length: { maximum: 500 }
validates :picture_file_size, numericality: { only_integer: true, greater_than: -2_147_483_649, less_than: 2_147_483_648 }, allow_blank: true
validates :picture_updated_at, timeliness: { on_or_after: -> { Time.new(1, 1, 1).in_time_zone }, on_or_before: -> { Time.zone.now + 50.years } }, allow_blank: true
validates :unit_name, presence: true, length: { maximum: 500 }
validates :category, :nature, :variety, presence: true
# ]VALIDATORS]
validates :number, length: { allow_nil: true, maximum: 60 }
validates :number, uniqueness: true
validates :derivative_of, :variety, length: { allow_nil: true, maximum: 120 }
validates :gtin, length: { allow_nil: true, maximum: 14 }
validates_attachment_content_type :picture, content_type: /image/
alias_attribute :commercial_name, :name
delegate :able_to?, :identifiable?, :able_to_each?, :has_indicator?, :matching_model, :indicators, :population_frozen?, :population_modulo, :frozen_indicators, :frozen_indicators_list, :variable_indicators, :variable_indicators_list, :linkage_points, :of_expression, :population_counting_unitary?, :whole_indicators_list, :whole_indicators, :individual_indicators_list, :individual_indicators, to: :nature
delegate :variety, :derivative_of, :name, to: :nature, prefix: true
delegate :depreciable?, :depreciation_rate, :deliverable?, :purchasable?, :saleable?, :storable?, :subscribing?, :fixed_asset_depreciation_method, :fixed_asset_depreciation_percentage, :fixed_asset_account, :fixed_asset_allocation_account, :fixed_asset_expenses_account, :product_account, :charge_account, to: :category
accepts_nested_attributes_for :products, reject_if: :all_blank, allow_destroy: true
accepts_nested_attributes_for :components, reject_if: :all_blank, allow_destroy: true
accepts_nested_attributes_for :readings, reject_if: proc { |params| params['measure_value_value'].blank? && params['integer_value'].blank? && params['boolean_value'].blank? && params['decimal_value'].blank? }, allow_destroy: true
accepts_nested_attributes_for :catalog_items, reject_if: :all_blank, allow_destroy: true
validates_associated :components
scope :availables, -> { where(nature_id: ProductNature.availables).order(:name) }
scope :saleables, -> { joins(:nature).merge(ProductNature.saleables) }
scope :purchaseables, -> { joins(:nature).merge(ProductNature.purchaseables) }
scope :deliverables, -> { joins(:nature).merge(ProductNature.stockables) }
scope :stockables_or_depreciables, -> { joins(:nature).merge(ProductNature.stockables_or_depreciables).order(:name) }
scope :identifiables, -> { where(nature: ProductNature.identifiables) }
scope :derivative_of, proc { |*varieties| of_derivative_of(*varieties) }
scope :can, proc { |*abilities|
of_expression(abilities.map { |a| "can #{a}" }.join(' or '))
}
scope :can_each, proc { |*abilities|
of_expression(abilities.map { |a| "can #{a}" }.join(' and '))
}
scope :of_working_set, lambda { |working_set|
if item = Nomen::WorkingSet.find(working_set)
of_expression(item.expression)
else
raise StandardError, "#{working_set.inspect} is not in Nomen::WorkingSet nomenclature"
end
}
scope :of_expression, lambda { |expression|
joins(:nature).where(WorkingSet.to_sql(expression, default: :product_nature_variants, abilities: :product_natures, indicators: :product_natures))
}
scope :of_natures, ->(*natures) { where(nature_id: natures) }
scope :of_categories, ->(*categories) { where(category_id: categories) }
scope :of_category, ->(category) { where(category: category) }
protect(on: :destroy) do
products.any? || sale_items.any? || purchase_items.any? ||
parcel_items.any? || !destroyable_accounts?
end
before_validation on: :create do
self.number = if ProductNatureVariant.any?
ProductNatureVariant.order(number: :desc).first.number.succ
else
'00000001'
end
end
before_validation do # on: :create
if nature
self.category_id = nature.category_id
self.nature_name ||= nature.name
# self.variable_indicators ||= self.nature.indicators
self.name ||= self.nature_name
self.variety ||= nature.variety
if derivative_of.blank? && nature.derivative_of
self.derivative_of ||= nature.derivative_of
end
if storable?
self.stock_account ||= create_unique_account(:stock)
self.stock_movement_account ||= create_unique_account(:stock_movement)
end
end
end
validate do
if nature
unless Nomen::Variety.find(nature_variety) >= self.variety
logger.debug "#{nature_variety}#{Nomen::Variety.all(nature_variety)} not include #{self.variety.inspect}"
errors.add(:variety, :invalid)
end
if Nomen::Variety.find(nature_derivative_of)
if self.derivative_of
unless Nomen::Variety.find(nature_derivative_of) >= self.derivative_of
errors.add(:derivative_of, :invalid)
end
else
errors.add(:derivative_of, :blank)
end
end
# if storable?
# unless self.stock_account
# errors.add(:stock_account, :not_defined)
# end
# unless self.stock_movement_account
# errors.add(:stock_movement_account, :not_defined)
# end
# end
end
if variety && products.any?
if products.detect { |p| Nomen::Variety.find(p.variety) > variety }
errors.add(:variety, :invalid)
end
end
if derivative_of && products.any?
if products.detect { |p| p.derivative_of? && Nomen::Variety.find(p.derivative_of) > derivative_of }
errors.add(:derivative_of, :invalid)
end
end
end
def destroyable_accounts?
stock_movement_account && stock_account && stock_movement_account.destroyable? && stock_account.destroyable?
end
# create unique account for stock management in accountancy
def create_unique_account(mode = :stock)
account_key = mode.to_s + '_account'
unless storable?
errors.add :stock_account, "Don't known how to create account for #{self.name.inspect}. You have to check category first"
end
category_account = category.send(account_key)
unless category_account
errors.add :category_account, "Account is not configure for #{self.name.inspect}. You have to check category first"
end
options = {}
options[:number] = category_account.number + number[-6, 6].rjust(6)
options[:name] = category_account.name + ' [' + self.name + ']'
options[:label] = options[:number] + ' - ' + options[:name]
options[:usages] = category_account.usages
Account.create!(options)
end
# add animals to new variant
def add_products(products, options = {})
Intervention.write(:product_evolution, options) do |i|
i.cast :variant, self, as: 'product_evolution-variant'
products.each do |p|
product = (p.is_a?(Product) ? p : Product.find(p))
member = i.cast :product, product, as: 'product_evolution-target'
i.variant_cast :variant, member
end
end
end
# Measure a product for a given indicator
def read!(indicator, value)
unless indicator.is_a?(Nomen::Item)
indicator = Nomen::Indicator.find(indicator)
unless indicator
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
end
reading = readings.find_or_initialize_by(indicator_name: indicator.name)
reading.value = value
reading.save!
reading
end
# Return the reading
def reading(indicator)
unless indicator.is_a?(Nomen::Item) || indicator = Nomen::Indicator[indicator]
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
readings.find_by(indicator_name: indicator.name)
end
# Returns the direct value of an indicator of variant
def get(indicator, _options = {})
unless indicator.is_a?(Nomen::Item) || indicator = Nomen::Indicator[indicator]
raise ArgumentError, "Unknown indicator #{indicator.inspect}. Expecting one of them: #{Nomen::Indicator.all.sort.to_sentence}."
end
if reading = reading(indicator.name)
return reading.value
elsif indicator.datatype == :measure
return 0.0.in(indicator.unit)
elsif indicator.datatype == :decimal
return 0.0
end
nil
end
# check if a variant has an indicator which is frozen or not
def has_frozen_indicator?(indicator)
if indicator.is_a?(Nomen::Item)
return frozen_indicators.include?(indicator)
else
return frozen_indicators_list.include?(indicator)
end
end
# Returns item from default catalog for given usage
def default_catalog_item(usage)
catalog = Catalog.by_default!(usage)
catalog.items.find_by(variant: self)
end
# Returns a list of couple indicator/unit usable for the given variant
# The result is only based on measure indicators
def quantifiers
list = []
indicators.each do |indicator|
next unless indicator.gathering == :proportional_to_population
if indicator.datatype == :measure
Measure.siblings(indicator.unit).each do |unit_name|
list << "#{indicator.name}/#{unit_name}"
end
elsif indicator.datatype == :integer || indicator.datatype == :decimal
list << indicator.name.to_s
end
end
variety = Nomen::Variety.find(self.variety)
# Specials indicators
if variety <= :product_group
list << 'members_count' unless list.include?('members_count/unity')
if variety <= :animal_group
list << 'members_livestock_unit' unless list.include?('members_livestock_unit/unity')
end
list << 'members_population' unless list.include?('members_population/unity')
end
list
end
# Returns a list of quantifier
def unified_quantifiers(options = {})
list = quantifiers.map do |quantifier|
pair = quantifier.split('/')
indicator = Nomen::Indicator.find(pair.first)
unit = (pair.second.blank? ? nil : Nomen::Unit.find(pair.second))
hash = { indicator: { name: indicator.name, human_name: indicator.human_name } }
hash[:unit] = if unit
{ name: unit.name, symbol: unit.symbol, human_name: unit.human_name }
elsif indicator.name =~ /^members\_/
unit = Nomen::Unit.find(:unity)
{ name: '', symbol: unit.symbol, human_name: unit.human_name }
else
{ name: '', symbol: unit_name, human_name: unit_name }
end
hash
end
# Add population
if options[:population]
# indicator = Nomen::Indicator[:population]
list << { indicator: { name: :population, human_name: Product.human_attribute_name(:population) }, unit: { name: '', symbol: unit_name, human_name: unit_name } }
end
# Add working duration (intervention durations)
if options[:working_duration]
Nomen::Unit.where(dimension: :time).find_each do |unit|
list << { indicator: { name: :working_duration, human_name: :working_duration.tl }, unit: { name: unit.name, symbol: unit.symbol, human_name: unit.human_name } }
end
end
list
end
# Get indicator value
# if option :at specify at which moment
# if option :reading is true, it returns the ProductNatureVariantReading record
# if option :interpolate is true, it returns the interpolated value
# :interpolate and :reading options are incompatible
def method_missing(method_name, *args)
return super unless Nomen::Indicator.items[method_name]
get(method_name)
end
def generate(*args)
options = args.extract_options!
product_name = args.shift || options[:name]
born_at = args.shift || options[:born_at]
default_storage = args.shift || options[:default_storage]
product_model = nature.matching_model
product_model.create!(variant: self, name: product_name + ' ' + born_at.l, initial_owner: Entity.of_company, initial_born_at: born_at, default_storage: default_storage)
end
# Shortcut for creating a new product of the variant
def create_product!(attributes = {})
attributes[:initial_owner] ||= Entity.of_company
attributes[:initial_born_at] ||= Time.zone.now
attributes[:born_at] ||= attributes[:initial_born_at]
attributes[:name] ||= "#{name} (#{attributes[:initial_born_at].to_date.l})"
matching_model.create!(attributes.merge(variant: self))
end
def take(quantity)
products.mine.each_with_object({}) do |product, result|
reminder = quantity - result.values.sum
result[product] = [product.population, reminder].min if reminder > 0
result
end
end
def take!(quantity)
raise 'errors.not_enough'.t if take(quantity).values.sum < quantity
end
# Returns last purchase item for the variant
# and a given supplier if any, or nil if there's
# no purchase item matching criterias
def last_purchase_item_for(supplier = nil)
return purchase_items.last unless supplier.present?
purchase_items
.joins(:purchase)
.where('purchases.supplier_id = ?', Entity.find(supplier).id)
.last
end
# Return current stock of all products link to the variant
def current_stock
products.map(&:population).compact.sum.to_f
end
# Return current quantity of all products link to the variant currently ordered or invoiced but not delivered
def current_outgoing_stock_ordered_not_delivered
sales = Sale.where(state: %w(order invoice))
sale_items = SaleItem.where(variant_id: id, sale_id: sales.pluck(:id)).includes(:parcel_items).where(parcel_items: { sale_item_id: nil })
sale_items.map(&:quantity).compact.sum.to_f
end
def picture_path(style = :original)
picture.path(style)
end
class << self
# Returns some nomenclature items are available to be imported, e.g. not
# already imported
def any_reference_available?
Nomen::ProductNatureVariant.without(ProductNatureVariant.pluck(:reference_name).uniq).any?
end
# Find or import variant from nomenclature with given attributes
# variety and derivative_of only are accepted for now
def find_or_import!(variety, options = {})
variants = of_variety(variety)
if derivative_of = options[:derivative_of]
variants = variants.derivative_of(derivative_of)
end
if variants.empty?
# Filter and imports
filtereds = flattened_nomenclature.select do |item|
item.variety >= variety &&
((derivative_of && item.derivative_of && item.derivative_of >= derivative_of) || (derivative_of.blank? && item.derivative_of.blank?))
end
filtereds.each do |item|
import_from_nomenclature(item.name)
end
end
variants.reload
end
ItemStruct = Struct.new(:name, :variety, :derivative_of, :abilities_list, :indicators, :frozen_indicators, :variable_indicators)
# Returns core attributes of nomenclature merge with nature if necessary
# name, variety, derivative_od, abilities
def flattened_nomenclature
@flattened_nomenclature ||= Nomen::ProductNatureVariant.list.collect do |item|
nature = Nomen::ProductNature[item.nature]
f = (nature.frozen_indicators || []).map(&:to_sym)
v = (nature.variable_indicators || []).map(&:to_sym)
ItemStruct.new(
item.name,
Nomen::Variety.find(item.variety || nature.variety),
Nomen::Variety.find(item.derivative_of || nature.derivative_of),
WorkingSet::AbilityArray.load(nature.abilities),
f + v, f, v
)
end
end
# Lists ProductNatureVariant::Item which match given expression
# Fully compatible with WSQL
def items_of_expression(expression)
flattened_nomenclature.select do |item|
WorkingSet.check_record(expression, item)
end
end
# Load a product nature variant from product nature variant nomenclature
def import_from_nomenclature(reference_name, force = false)
unless item = Nomen::ProductNatureVariant[reference_name]
raise ArgumentError, "The product_nature_variant #{reference_name.inspect} is not known"
end
unless nature_item = Nomen::ProductNature[item.nature]
raise ArgumentError, "The nature of the product_nature_variant #{item.nature.inspect} is not known"
end
unless !force && variant = ProductNatureVariant.find_by(reference_name: reference_name.to_s)
attributes = {
name: item.human_name,
active: true,
nature: ProductNature.import_from_nomenclature(item.nature),
reference_name: item.name,
unit_name: I18n.translate("nomenclatures.product_nature_variants.choices.unit_name.#{item.unit_name}"),
# :frozen_indicators => item.frozen_indicators_values.to_s,
variety: item.variety || nil,
derivative_of: item.derivative_of || nil
}
variant = new(attributes)
# puts variant.name.inspect.green
unless variant.save
raise "Cannot import variant #{reference_name.inspect}: #{variant.errors.full_messages.join(', ')}"
end
end
unless item.frozen_indicators_values.to_s.blank?
# create frozen indicator for each pair indicator, value ":population => 1unity"
item.frozen_indicators_values.to_s.strip.split(/[[:space:]]*\,[[:space:]]*/)
.collect { |i| i.split(/[[:space:]]*\:[[:space:]]*/) }.each do |i|
indicator_name = i.first.strip.downcase.to_sym
next unless variant.has_indicator? indicator_name
variant.read!(indicator_name, i.second)
end
end
variant
end
def load_defaults(_options = {})
Nomen::ProductNatureVariant.all.flatten.collect do |p|
import_from_nomenclature(p.to_s)
end
end
end
end
|
module Spree
Order.class_eval do
# Confirmation step not compatible with CM-CIC paiement
def confirmation_required?
return "toto"
end
def cmcic_sDevise
self.currency
end
def cmcic_sDate
self.created_at.strftime("%d/%m/%Y:%H:%M:%S")
end
def cmcic_sMontant
("%.2f" % self.total) + cmcic_sDevise
end
def cmcic_sReference
self.number
end
def cmcic_email
self.email
end
def cmcic_sTexteLibre
self.special_instructions.nil? ? "" : self.special_instructions
end
def cmcic_sNbrEch
""
end
def cmcic_sDateEcheance1
""
end
def cmcic_sDateEcheance2
""
end
def cmcic_sDateEcheance3
""
end
def cmcic_sDateEcheance4
""
end
def cmcic_sMontantEcheance1
""
end
def cmcic_sMontantEcheance2
""
end
def cmcic_sMontantEcheance3
""
end
def cmcic_sMontantEcheance4
""
end
def cmcic_sOptions
""
end
def cmcic_sMAC_Tpe(oTpe)
sChaineMAC = [oTpe.sNumero, cmcic_sDate, cmcic_sMontant, cmcic_sReference, cmcic_sTexteLibre, oTpe.sVersion, oTpe.sLangue, oTpe.sCodeSociete, cmcic_email, cmcic_sNbrEch, cmcic_sDateEcheance1, cmcic_sMontantEcheance1, cmcic_sDateEcheance2, cmcic_sMontantEcheance2, cmcic_sDateEcheance3, cmcic_sMontantEcheance3, cmcic_sDateEcheance4, cmcic_sMontantEcheance4, cmcic_sOptions].join("*")
OpenSSL::HMAC.hexdigest(OpenSSL::Digest::Digest.new("sha1"), oTpe.getPackCle, sChaineMAC).upcase
end
end
end
Add attr_accessible
module Spree
Order.class_eval do
attr_accessible :cmcic_sDate
# Confirmation step not compatible with CM-CIC paiement
def confirmation_required?
return "toto"
end
def cmcic_sDevise
self.currency
end
def cmcic_sDate
self.created_at.strftime("%d/%m/%Y:%H:%M:%S")
end
def cmcic_sMontant
("%.2f" % self.total) + cmcic_sDevise
end
def cmcic_sReference
self.number
end
def cmcic_email
self.email
end
def cmcic_sTexteLibre
self.special_instructions.nil? ? "" : self.special_instructions
end
def cmcic_sNbrEch
""
end
def cmcic_sDateEcheance1
""
end
def cmcic_sDateEcheance2
""
end
def cmcic_sDateEcheance3
""
end
def cmcic_sDateEcheance4
""
end
def cmcic_sMontantEcheance1
""
end
def cmcic_sMontantEcheance2
""
end
def cmcic_sMontantEcheance3
""
end
def cmcic_sMontantEcheance4
""
end
def cmcic_sOptions
""
end
def cmcic_sMAC_Tpe(oTpe)
sChaineMAC = [oTpe.sNumero, cmcic_sDate, cmcic_sMontant, cmcic_sReference, cmcic_sTexteLibre, oTpe.sVersion, oTpe.sLangue, oTpe.sCodeSociete, cmcic_email, cmcic_sNbrEch, cmcic_sDateEcheance1, cmcic_sMontantEcheance1, cmcic_sDateEcheance2, cmcic_sMontantEcheance2, cmcic_sDateEcheance3, cmcic_sMontantEcheance3, cmcic_sDateEcheance4, cmcic_sMontantEcheance4, cmcic_sOptions].join("*")
OpenSSL::HMAC.hexdigest(OpenSSL::Digest::Digest.new("sha1"), oTpe.getPackCle, sChaineMAC).upcase
end
end
end |
class TransformationMapping < ApplicationRecord
VM_CONFLICT = "conflict".freeze
VM_EMPTY_NAME = "empty_name".freeze
VM_IN_OTHER_PLAN = "in_other_plan".freeze
VM_MIGRATED = "migrated".freeze
VM_NOT_EXIST = "not_exist".freeze
VM_VALID = "ok".freeze
VM_INACTIVE = "inactive".freeze
has_many :transformation_mapping_items, :dependent => :destroy
has_many :service_resources, :as => :resource, :dependent => :nullify
has_many :service_templates, :through => :service_resources
validates :name, :presence => true, :uniqueness => true
def destination(source)
transformation_mapping_items.find_by(:source => source).try(:destination)
end
# vm_list: collection of hashes, each descriping a VM.
def validate_vms(vm_list = nil)
vm_list.present? ? identify_vms(vm_list) : select_vms
end
private
def select_vms
valid_list = []
transformation_mapping_items.where(:source_type => EmsCluster).collect(&:source).each do |cluster|
cluster.vms.each do |vm|
reason = validate_vm(vm, true)
valid_list << describe_vm(vm, reason) if reason == VM_VALID
end
end
{"valid_vms" => valid_list}
end
def identify_vms(vm_list)
valid_list = []
invalid_list = []
conflict_list = []
vm_list.each do |row|
vm_name = row['name']
if vm_name.blank?
invalid_list << describe_non_vm(vm_name)
next
end
query = Vm.where(:name => vm_name)
query = query.where(:uid_ems => row['uid_ems']) if row['uid_ems'].present?
query = query.joins(:host).where(:hosts => {:name => row['host']}) if row['host'].present?
query = query.joins(:ext_management_system).where(:ext_management_systems => {:name => row['provider']}) if row['provider'].present?
vms = query.to_a
if vms.size.zero?
invalid_list << describe_non_vm(vm_name)
elsif vms.size == 1
reason = validate_vm(vms.first, false)
(reason == VM_VALID ? valid_list : invalid_list) << describe_vm(vms.first, reason)
else
vms.each { |vm| conflict_list << describe_vm(vm, VM_CONFLICT) }
end
end
{
"valid_vms" => valid_list,
"invalid_vms" => invalid_list,
"conflict_vms" => conflict_list
}
end
def describe_non_vm(vm_name)
{
"name" => vm_name,
"reason" => vm_name.blank? ? VM_EMPTY_NAME : VM_NOT_EXIST
}
end
def describe_vm(vm, reason)
{
"name" => vm.name,
"cluster" => vm.ems_cluster.try(:name) || '',
"path" => vm.ext_management_system ? "#{vm.ext_management_system.name}/#{vm.parent_blue_folder_path(:exclude_non_display_folders => true)}" : '',
"allocated_size" => vm.allocated_disk_storage,
"id" => vm.id,
"reason" => reason
}
end
def validate_vm(vm, quick = true)
validate_result = vm.validate_v2v_migration
return validate_result unless validate_result == VM_VALID
# a valid vm must find all resources in the mapping and has never been migrated
invalid_list = []
unless valid_cluster?(vm)
invalid_list << "cluster: %{name}" % {:name => vm.ems_cluster.name}
return no_mapping_msg(invalid_list) if quick
end
invalid_storages = unmapped_storages(vm)
if invalid_storages.present?
invalid_list << "storages: %{list}" % {:list => invalid_storages.collect(&:name).join(", ")}
return no_mapping_msg(invalid_list) if quick
end
invalid_lans = unmapped_lans(vm)
if invalid_lans.present?
invalid_list << "lans: %{list}" % {:list => invalid_lans.collect(&:name).join(', ')}
return no_mapping_msg(invalid_list) if quick
end
invalid_list.present? ? no_mapping_msg(invalid_list) : VM_VALID
end
def no_mapping_msg(list)
"Mapping source not found - %{list}" % {:list => list.join('. ')}
end
def valid_cluster?(vm)
transformation_mapping_items.where(:source => vm.ems_cluster).exists?
end
# return an empty array if all storages are valid for transformation
# otherwise return an array of invalid datastores
def unmapped_storages(vm)
vm.datastores - transformation_mapping_items.where(:source => vm.datastores).collect(&:source)
end
# return an empty array if all lans are valid for transformation
# otherwise return an array of invalid lans
def unmapped_lans(vm)
vm.lans - transformation_mapping_items.where(:source => vm.lans).collect(&:source)
end
end
Expose ems_cluster_id on VMs to support Target Cluster column in OSP properties UI table
class TransformationMapping < ApplicationRecord
VM_CONFLICT = "conflict".freeze
VM_EMPTY_NAME = "empty_name".freeze
VM_IN_OTHER_PLAN = "in_other_plan".freeze
VM_MIGRATED = "migrated".freeze
VM_NOT_EXIST = "not_exist".freeze
VM_VALID = "ok".freeze
VM_INACTIVE = "inactive".freeze
has_many :transformation_mapping_items, :dependent => :destroy
has_many :service_resources, :as => :resource, :dependent => :nullify
has_many :service_templates, :through => :service_resources
validates :name, :presence => true, :uniqueness => true
def destination(source)
transformation_mapping_items.find_by(:source => source).try(:destination)
end
# vm_list: collection of hashes, each descriping a VM.
def validate_vms(vm_list = nil)
vm_list.present? ? identify_vms(vm_list) : select_vms
end
private
def select_vms
valid_list = []
transformation_mapping_items.where(:source_type => EmsCluster).collect(&:source).each do |cluster|
cluster.vms.each do |vm|
reason = validate_vm(vm, true)
valid_list << describe_vm(vm, reason) if reason == VM_VALID
end
end
{"valid_vms" => valid_list}
end
def identify_vms(vm_list)
valid_list = []
invalid_list = []
conflict_list = []
vm_list.each do |row|
vm_name = row['name']
if vm_name.blank?
invalid_list << describe_non_vm(vm_name)
next
end
query = Vm.where(:name => vm_name)
query = query.where(:uid_ems => row['uid_ems']) if row['uid_ems'].present?
query = query.joins(:host).where(:hosts => {:name => row['host']}) if row['host'].present?
query = query.joins(:ext_management_system).where(:ext_management_systems => {:name => row['provider']}) if row['provider'].present?
vms = query.to_a
if vms.size.zero?
invalid_list << describe_non_vm(vm_name)
elsif vms.size == 1
reason = validate_vm(vms.first, false)
(reason == VM_VALID ? valid_list : invalid_list) << describe_vm(vms.first, reason)
else
vms.each { |vm| conflict_list << describe_vm(vm, VM_CONFLICT) }
end
end
{
"valid_vms" => valid_list,
"invalid_vms" => invalid_list,
"conflict_vms" => conflict_list
}
end
def describe_non_vm(vm_name)
{
"name" => vm_name,
"reason" => vm_name.blank? ? VM_EMPTY_NAME : VM_NOT_EXIST
}
end
def describe_vm(vm, reason)
{
"name" => vm.name,
"cluster" => vm.ems_cluster.try(:name) || '',
"path" => vm.ext_management_system ? "#{vm.ext_management_system.name}/#{vm.parent_blue_folder_path(:exclude_non_display_folders => true)}" : '',
"allocated_size" => vm.allocated_disk_storage,
"id" => vm.id,
"ems_cluster_id" => vm.ems_cluster_id,
"reason" => reason
}
end
def validate_vm(vm, quick = true)
validate_result = vm.validate_v2v_migration
return validate_result unless validate_result == VM_VALID
# a valid vm must find all resources in the mapping and has never been migrated
invalid_list = []
unless valid_cluster?(vm)
invalid_list << "cluster: %{name}" % {:name => vm.ems_cluster.name}
return no_mapping_msg(invalid_list) if quick
end
invalid_storages = unmapped_storages(vm)
if invalid_storages.present?
invalid_list << "storages: %{list}" % {:list => invalid_storages.collect(&:name).join(", ")}
return no_mapping_msg(invalid_list) if quick
end
invalid_lans = unmapped_lans(vm)
if invalid_lans.present?
invalid_list << "lans: %{list}" % {:list => invalid_lans.collect(&:name).join(', ')}
return no_mapping_msg(invalid_list) if quick
end
invalid_list.present? ? no_mapping_msg(invalid_list) : VM_VALID
end
def no_mapping_msg(list)
"Mapping source not found - %{list}" % {:list => list.join('. ')}
end
def valid_cluster?(vm)
transformation_mapping_items.where(:source => vm.ems_cluster).exists?
end
# return an empty array if all storages are valid for transformation
# otherwise return an array of invalid datastores
def unmapped_storages(vm)
vm.datastores - transformation_mapping_items.where(:source => vm.datastores).collect(&:source)
end
# return an empty array if all lans are valid for transformation
# otherwise return an array of invalid lans
def unmapped_lans(vm)
vm.lans - transformation_mapping_items.where(:source => vm.lans).collect(&:source)
end
end
|
module Queries
class GetWebContentItems
extend ArelHelpers
def self.call(content_item_ids)
content_items = table(:content_items)
get_rows(scope.where(content_items[:id].in(content_item_ids))).map do |row|
WebContentItem.from_hash(row)
end
end
def self.scope
content_items = table(:content_items)
locations = table(:locations)
states = table(:states)
translations = table(:translations)
user_facing_versions = table(:user_facing_versions)
content_items
.project(
content_items[:id],
content_items[:analytics_identifier],
content_items[:content_id],
content_items[:description],
content_items[:details],
content_items[:document_type],
content_items[:first_published_at],
content_items[:last_edited_at],
content_items[:need_ids],
content_items[:phase],
content_items[:public_updated_at],
content_items[:publishing_app],
content_items[:redirects],
content_items[:rendering_app],
content_items[:routes],
content_items[:schema_name],
content_items[:title],
content_items[:update_type],
locations[:base_path],
states[:name].as("state"),
translations[:locale],
user_facing_versions[:number].as("user_facing_version")
)
.outer_join(locations).on(content_items[:id].eq(locations[:content_item_id]))
.join(states).on(content_items[:id].eq(states[:content_item_id]))
.join(translations).on(content_items[:id].eq(translations[:content_item_id]))
.join(user_facing_versions).on(content_items[:id].eq(user_facing_versions[:content_item_id]))
end
end
end
Add a find method to for singular usage
module Queries
class GetWebContentItems
extend ArelHelpers
def self.call(content_item_ids)
content_items = table(:content_items)
get_rows(scope.where(content_items[:id].in(content_item_ids))).map do |row|
WebContentItem.from_hash(row)
end
end
def self.find(content_item_id)
call(content_item_id).first
end
def self.scope
content_items = table(:content_items)
locations = table(:locations)
states = table(:states)
translations = table(:translations)
user_facing_versions = table(:user_facing_versions)
content_items
.project(
content_items[:id],
content_items[:analytics_identifier],
content_items[:content_id],
content_items[:description],
content_items[:details],
content_items[:document_type],
content_items[:first_published_at],
content_items[:last_edited_at],
content_items[:need_ids],
content_items[:phase],
content_items[:public_updated_at],
content_items[:publishing_app],
content_items[:redirects],
content_items[:rendering_app],
content_items[:routes],
content_items[:schema_name],
content_items[:title],
content_items[:update_type],
locations[:base_path],
states[:name].as("state"),
translations[:locale],
user_facing_versions[:number].as("user_facing_version")
)
.outer_join(locations).on(content_items[:id].eq(locations[:content_item_id]))
.join(states).on(content_items[:id].eq(states[:content_item_id]))
.join(translations).on(content_items[:id].eq(translations[:content_item_id]))
.join(user_facing_versions).on(content_items[:id].eq(user_facing_versions[:content_item_id]))
end
end
end
|
# frozen_string_literal: true
Gem::Specification.new do |spec|
spec.name = "gir_ffi-pango"
spec.version = "0.0.14"
spec.authors = ["Matijs van Zuijlen"]
spec.email = ["matijs@matijs.net"]
spec.summary = "GirFFI-based bindings for Pango"
spec.description =
"Bindings for Pango generated by GirFFI, with an eclectic set of overrides."
spec.homepage = "http://www.github.com/mvz/gir_ffi-pango"
spec.license = "LGPL-2.1"
spec.required_ruby_version = ">= 2.5.0"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/mvz/gir_ffi-pango"
spec.metadata["changelog_uri"] = "https://github.com/mvz/gir_ffi-pango/blob/master/Changelog.md"
spec.files = File.read("Manifest.txt").split
spec.require_paths = ["lib"]
spec.add_runtime_dependency "gir_ffi", "~> 0.15.2"
spec.add_development_dependency "minitest", "~> 5.12"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "rake-manifest", "~> 0.1.0"
spec.add_development_dependency "simplecov", "~> 0.19.0"
end
Add rubocop gems as development dependencies
# frozen_string_literal: true
Gem::Specification.new do |spec|
spec.name = "gir_ffi-pango"
spec.version = "0.0.14"
spec.authors = ["Matijs van Zuijlen"]
spec.email = ["matijs@matijs.net"]
spec.summary = "GirFFI-based bindings for Pango"
spec.description =
"Bindings for Pango generated by GirFFI, with an eclectic set of overrides."
spec.homepage = "http://www.github.com/mvz/gir_ffi-pango"
spec.license = "LGPL-2.1"
spec.required_ruby_version = ">= 2.5.0"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/mvz/gir_ffi-pango"
spec.metadata["changelog_uri"] = "https://github.com/mvz/gir_ffi-pango/blob/master/Changelog.md"
spec.files = File.read("Manifest.txt").split
spec.require_paths = ["lib"]
spec.add_runtime_dependency "gir_ffi", "~> 0.15.2"
spec.add_development_dependency "minitest", "~> 5.12"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "rake-manifest", "~> 0.1.0"
spec.add_development_dependency "rubocop", "~> 0.92.0"
spec.add_development_dependency "rubocop-minitest", "~> 0.10.1"
spec.add_development_dependency "rubocop-packaging", "~> 0.5.0"
spec.add_development_dependency "rubocop-performance", "~> 1.8.0"
spec.add_development_dependency "simplecov", "~> 0.19.0"
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'git_clone_url/version'
Gem::Specification.new do |spec|
spec.name = 'git_clone_url'
spec.version = GitCloneUrl::VERSION
spec.authors = ['sanemat']
spec.email = ['o.gata.ken@gmail.com']
spec.summary = 'Parse git clone url'
spec.description = 'This supports https protocol, git protocol and ssh protocol.'
spec.homepage = 'https://github.com/packsaddle/ruby-git_clone_url'
spec.license = 'MIT'
spec.files = \
`git ls-files -z`
.split("\x0")
.reject { |f| f.match(%r{^(test|spec|features)/}) }
.reject do |f|
[
'.travis.yml',
'circle.yml',
'.tachikoma.yml'
].include?(f)
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'uri-ssh_git'
spec.add_development_dependency 'bundler', '~> 1.8'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'test-unit'
end
chore(gem): relax dependencies
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'git_clone_url/version'
Gem::Specification.new do |spec|
spec.name = 'git_clone_url'
spec.version = GitCloneUrl::VERSION
spec.authors = ['sanemat']
spec.email = ['o.gata.ken@gmail.com']
spec.summary = 'Parse git clone url'
spec.description = 'This supports https protocol, git protocol and ssh protocol.'
spec.homepage = 'https://github.com/packsaddle/ruby-git_clone_url'
spec.license = 'MIT'
spec.files = \
`git ls-files -z`
.split("\x0")
.reject { |f| f.match(%r{^(test|spec|features)/}) }
.reject do |f|
[
'.travis.yml',
'circle.yml',
'.tachikoma.yml'
].include?(f)
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_runtime_dependency 'uri-ssh_git'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'test-unit'
end
|
class CreateAdminService
def call
user = User.find_or_create_by!(email: Rails.application.secrets.admin_email) do |user|
user.password = Rails.application.secrets.admin_password
user.password_confirmation = Rails.application.secrets.admin_password
user.confirm!
user.admin!
user.first_name = Rails.application.secrets.admin_firstname
user.last_name = Rails.application.secrets.admin_lastname
user.phone_number = Rails.application.secrets.admin_phone
end
end
end
Corrected assignement order.
class CreateAdminService
def call
user = User.find_or_create_by!(email: Rails.application.secrets.admin_email) do |user|
user.password = Rails.application.secrets.admin_password
user.password_confirmation = Rails.application.secrets.admin_password
user.first_name = Rails.application.secrets.admin_firstname
user.last_name = Rails.application.secrets.admin_lastname
user.phone_number = Rails.application.secrets.admin_phone
user.confirm!
user.admin!
end
end
end
|
class ProcessFeedService < BaseService
ACTIVITY_NS = 'http://activitystrea.ms/spec/1.0/'.freeze
THREAD_NS = 'http://purl.org/syndication/thread/1.0'.freeze
# Create local statuses from an Atom feed
# @param [String] body Atom feed
# @param [Account] account Account this feed belongs to
# @return [Enumerable] created statuses
def call(body, account)
xml = Nokogiri::XML(body)
update_remote_profile_service.call(xml.at_xpath('/xmlns:feed/xmlns:author'), account) unless xml.at_xpath('/xmlns:feed').nil?
xml.xpath('//xmlns:entry').reverse_each.map { |entry| process_entry(account, entry) }.compact
end
private
def process_entry(account, entry)
return unless [:note, :comment, :activity].include? object_type(entry)
status = Status.find_by(uri: activity_id(entry))
# If we already have a post and the verb is now "delete", we gotta delete it and move on!
if !status.nil? && verb(entry) == :delete
delete_post!(status)
return
end
return unless status.nil?
status = Status.new(uri: activity_id(entry), url: activity_link(entry), account: account, text: content(entry), created_at: published(entry), updated_at: updated(entry))
if verb(entry) == :share
add_reblog!(entry, status)
elsif verb(entry) == :post
if thread_id(entry).nil?
add_post!(entry, status)
else
add_reply!(entry, status)
end
else
return
end
# If we added a status, go through accounts it mentions and create respective relations
# Also record all media attachments for the status and for the reblogged status if present
unless status.new_record?
record_remote_mentions(status, entry.xpath('./xmlns:link[@rel="mentioned"]'))
record_remote_mentions(status.reblog, entry.at_xpath('./activity:object', activity: ACTIVITY_NS).xpath('./xmlns:link[@rel="mentioned"]')) if status.reblog?
if status.reblog?
ProcessHashtagsService.new.call(status.reblog, entry.at_xpath('./activity:object', activity: ACTIVITY_NS).xpath('./xmlns:category').map { |category| category['term'] })
else
ProcessHashtagsService.new.call(status, entry.xpath('./xmlns:category').map { |category| category['term'] })
end
process_attachments(entry, status)
process_attachments(entry.xpath('./activity:object', activity: ACTIVITY_NS), status.reblog) if status.reblog?
Rails.logger.debug "Queuing remote status #{status.id} for distribution"
DistributionWorker.perform_async(status.id)
return status
end
end
def record_remote_mentions(status, links)
return if status.local?
# Here we have to do a reverse lookup of local accounts by their URL!
# It's not pretty at all! I really wish all these protocols sticked to
# using acct:username@domain only! It would make things so much easier
# and tidier
links.each do |mention_link|
href_val = mention_link.attribute('href').value
next if href_val == 'http://activityschema.org/collection/public'
href = Addressable::URI.parse(href_val)
if TagManager.instance.local_domain?(href.host)
# A local user is mentioned
mentioned_account = Account.find_local(href.path.gsub('/users/', ''))
unless mentioned_account.nil?
mentioned_account.mentions.where(status: status).first_or_create(status: status)
NotificationMailer.mention(mentioned_account, status).deliver_later unless mentioned_account.blocking?(status.account)
end
else
# What to do about remote user?
# This is kinda dodgy because URLs could change, we don't index them
mentioned_account = Account.find_by(url: href.to_s)
if mentioned_account.nil?
mentioned_account = FetchRemoteAccountService.new.call(href)
end
unless mentioned_account.nil?
mentioned_account.mentions.where(status: status).first_or_create(status: status)
end
end
end
end
def process_attachments(entry, status)
return if status.local?
entry.xpath('./xmlns:link[@rel="enclosure"]').each do |enclosure_link|
next if enclosure_link.attribute('href').nil?
media = MediaAttachment.where(status: status, remote_url: enclosure_link.attribute('href').value).first
next unless media.nil?
begin
media = MediaAttachment.new(account: status.account, status: status, remote_url: enclosure_link.attribute('href').value)
media.file_remote_url = enclosure_link.attribute('href').value
media.save
rescue Paperclip::Errors::NotIdentifiedByImageMagickError
Rails.logger.debug "Error saving attachment from #{enclosure_link.attribute('href').value}"
next
end
end
end
def add_post!(_entry, status)
status.save!
end
def add_reblog!(entry, status)
status.reblog = find_original_status(entry, target_id(entry))
if status.reblog.nil?
status.reblog = fetch_remote_status(entry)
end
if !status.reblog.nil?
status.save!
NotificationMailer.reblog(status.reblog, status.account).deliver_later if status.reblog.local? && !status.reblog.account.blocking?(status.account)
end
end
def add_reply!(entry, status)
status.thread = find_original_status(entry, thread_id(entry))
status.save!
if status.thread.nil? && !thread_href(entry).nil?
ThreadResolveWorker.perform_async(status.id, thread_href(entry))
end
end
def delete_post!(status)
remove_status_service.call(status)
end
def find_original_status(_xml, id)
return nil if id.nil?
if TagManager.instance.local_id?(id)
Status.find(TagManager.instance.unique_tag_to_local_id(id, 'Status'))
else
Status.find_by(uri: id)
end
end
def fetch_remote_status(xml)
username = xml.at_xpath('./activity:object', activity: ACTIVITY_NS).at_xpath('./xmlns:author/xmlns:name').content
url = xml.at_xpath('./activity:object', activity: ACTIVITY_NS).at_xpath('./xmlns:author/xmlns:uri').content
domain = Addressable::URI.parse(url).host
account = Account.find_remote(username, domain)
if account.nil?
account = follow_remote_account_service.call("#{username}@#{domain}")
end
status = Status.new(account: account, uri: target_id(xml), text: target_content(xml), url: target_url(xml), created_at: published(xml), updated_at: updated(xml))
status.thread = find_original_status(xml, thread_id(xml))
if status.save && status.thread.nil? && !thread_href(xml).nil?
ThreadResolveWorker.perform_async(status.id, thread_href(xml))
end
status
rescue Goldfinger::Error, HTTP::Error
nil
end
def published(xml)
xml.at_xpath('./xmlns:published').content
end
def updated(xml)
xml.at_xpath('./xmlns:updated').content
end
def content(xml)
xml.at_xpath('./xmlns:content').try(:content)
end
def thread_id(xml)
xml.at_xpath('./thr:in-reply-to', thr: THREAD_NS).attribute('ref').value
rescue
nil
end
def thread_href(xml)
xml.at_xpath('./thr:in-reply-to', thr: THREAD_NS).attribute('href').value
rescue
nil
end
def target_id(xml)
xml.at_xpath('.//activity:object', activity: ACTIVITY_NS).at_xpath('./xmlns:id').content
rescue
nil
end
def activity_id(xml)
xml.at_xpath('./xmlns:id').content
end
def activity_link(xml)
xml.at_xpath('./xmlns:link[@rel="alternate"]').attribute('href').value
rescue
''
end
def target_content(xml)
xml.at_xpath('.//activity:object', activity: ACTIVITY_NS).at_xpath('./xmlns:content').content
end
def target_url(xml)
xml.at_xpath('.//activity:object', activity: ACTIVITY_NS).at_xpath('./xmlns:link[@rel="alternate"]').attribute('href').value
end
def object_type(xml)
xml.at_xpath('./activity:object-type', activity: ACTIVITY_NS).content.gsub('http://activitystrea.ms/schema/1.0/', '').gsub('http://ostatus.org/schema/1.0/', '').to_sym
rescue
:activity
end
def verb(xml)
xml.at_xpath('./activity:verb', activity: ACTIVITY_NS).content.gsub('http://activitystrea.ms/schema/1.0/', '').gsub('http://ostatus.org/schema/1.0/', '').to_sym
rescue
:post
end
def follow_remote_account_service
@follow_remote_account_service ||= FollowRemoteAccountService.new
end
def update_remote_profile_service
@update_remote_profile_service ||= UpdateRemoteProfileService.new
end
def remove_status_service
@remove_status_service ||= RemoveStatusService.new
end
end
ProcessFeedService refactor
class ProcessFeedService < BaseService
ACTIVITY_NS = 'http://activitystrea.ms/spec/1.0/'.freeze
THREAD_NS = 'http://purl.org/syndication/thread/1.0'.freeze
def call(body, account)
xml = Nokogiri::XML(body)
update_author(xml, account)
process_entries(xml, account)
end
private
def update_author(xml, account)
return if xml.at_xpath('/xmlns:feed').nil?
UpdateRemoteProfileService.new.call(xml.at_xpath('/xmlns:feed/xmlns:author'), account)
end
def process_entries(xml, account)
xml.xpath('//xmlns:entry').reverse_each.map { |entry| ProcessEntry.new.call(entry, account) }.compact
end
class ProcessEntry
def call(xml, account)
@account = account
@xml = xml
return if skip_unsupported_type?
case verb
when :post, :share
return create_status
when :delete
return delete_status
end
end
private
def create_status
Rails.logger.debug "Creating remote status #{id}"
status = status_from_xml(@xml)
if verb == :share
original_status = status_from_xml(xml.at_xpath('.//activity:object', activity: ACTIVITY_NS))
status.reblog = original_status
end
status.save!
Rails.logger.debug "Queuing remote status #{status.id} (#{id}) for distribution"
DistributionWorker.perform_async(status.id)
status
end
def delete_status
Rails.logger.debug "Deleting remote status #{id}"
status = Status.find_by(uri: id)
RemoveStatusService.new.call(status) unless status.nil?
nil
end
def skip_unsupported_type?
!([:post, :share, :delete].include?(verb) && [:activity, :note, :comment].include?(type))
end
def status_from_xml(entry)
# Return early if status already exists in db
status = find_status(id(entry))
return status unless status.nil?
status = Status.create!({
uri: id(entry),
url: url(entry),
account: account?(entry) ? find_or_resolve_account(acct(entry)) : @account,
text: content(entry),
created_at: published(entry),
})
if thread?(entry)
status.thread = find_or_resolve_status(status, *thread(entry))
end
mentions_from_xml(status, entry)
hashtags_from_xml(status, entry)
media_from_xml(status, entry)
status
end
def find_or_resolve_account(acct)
FollowRemoteAccountService.new.call(acct)
end
def find_or_resolve_status(parent, uri, url)
status = find_status(uri)
ThreadResolveWorker.perform_async(parent.id, url) if status.nil?
status
end
def find_status(uri)
if TagManager.instance.local_id?(uri)
local_id = TagManager.instance.unique_tag_to_local_id(uri, 'Status')
return Status.find(local_id)
end
Status.find_by(uri: uri)
end
def mentions_from_xml(parent, xml)
processed_account_ids = []
xml.xpath('./xmlns:link[@rel="mentioned"]').each do |link|
next if link['href'] == 'http://activityschema.org/collection/public'
url = Addressable::URI.parse(link['href'])
mentioned_account = if TagManager.instance.local_domain?(url.host)
Account.find_local(url.path.gsub('/users/', ''))
else
Account.find_by(url: link['href']) || FetchRemoteAccountService.new.call(link['href'])
end
next if mentioned_account.nil? || processed_account_ids.include?(mentioned_account.id)
if mentioned_account.local?
# Send notifications
NotificationMailer.mention(mentioned_account, parent).deliver_later unless mentioned_account.blocking?(parent.account)
end
mentioned_account.mentions.where(status: parent).first_or_create(status: parent)
# So we can skip duplicate mentions
processed_account_ids << mentioned_account.id
end
end
def hashtags_from_xml(parent, xml)
tags = xml.xpath('./xmlns:category').map { |category| category['term'] }
ProcessHashtagsService.new.call(parent, tags)
end
def media_from_xml(parent, xml)
xml.xpath('./xmlns:link[@rel="enclosure"]').each do |link|
next unless link['href']
media = MediaAttachment.where(status: parent, remote_url: link['href']).first_or_initialize(account: parent.account, status: parent, remote_url: link['href'])
begin
media.file_remote_url = link['href']
media.save
rescue Paperclip::Errors::NotIdentifiedByImageMagickError
next
end
end
end
def id(xml = @xml)
xml.at_xpath('./xmlns:id').content
end
def verb(xml = @xml)
raw = xml.at_xpath('./activity:verb', activity: ACTIVITY_NS).content
raw.gsub('http://activitystrea.ms/schema/1.0/', '').gsub('http://ostatus.org/schema/1.0/', '').to_sym
rescue
:post
end
def type(xml = @xml)
raw = xml.at_xpath('./activity:object-type', activity: ACTIVITY_NS).content
raw.gsub('http://activitystrea.ms/schema/1.0/', '').gsub('http://ostatus.org/schema/1.0/', '').to_sym
rescue
:activity
end
def url(xml = @xml)
link = xml.at_xpath('./xmlns:link[@rel="alternate"]')
link['href']
end
def content(xml = @xml)
xml.at_xpath('./xmlns:content').content
end
def published(xml = @xml)
xml.at_xpath('./xmlns:published').content
end
def thread?(xml = @xml)
!xml.at_xpath('./thr:in-reply-to', thr: THREAD_NS).nil?
end
def thread(xml = @xml)
thr = xml.at_xpath('./thr:in-reply-to', thr: THREAD_NS)
[thr['ref'], thr['href']]
end
def account?(xml = @xml)
!xml.at_xpath('./xmlns:author').nil?
end
def acct(xml = @xml)
username = xml.at_xpath('./xmlns:author/xmlns:name').content
url = xml.at_xpath('./xmlns:author/xmlns:uri').content
domain = Addressable::URI.parse(url).host
"#{username}@#{domain}"
end
end
end
|
require "Practica9/Fraccion.rb"
class Matriz
attr_reader :filas,:columnas, :valor, :matriz, :m1, :m2
attr_writer :resultado
def initialize( valor_entrada)
@filas = valor_entrada.length
@columnas = valor_entrada[0].length
@matriz = Array.new(filas){Array.new(columnas)}
for i in 0...@filas
for j in 0...@columnas
matriz[i][j] = valor_entrada[i][j]
end
end
end
def to_s
i=0
txt = "";
rango = 0...@filas
rango_txt = rango.to_a
txt += "\n #{rango_txt.join(" ")}\n"
for fila in @matriz
txt += "#{i} #{fila}\n"
i += 1
end
return txt
end
def muestra_matriz(matriz)
i=0
rango = 0...matriz[0].length
rango_txt = rango.to_a
print "\n #{rango_txt.join(" ")}\n"
for fila in matriz
puts "#{i} #{fila}"
i += 1
end
end
def hacer_matriz(filas,columnas, valor)
array_devolucion = Array.new(filas){Array.new(columnas, valor)}
end
def *(m2)
dimensiones=[[@filas, @columnas],[m2.filas, m2.columnas]]
filas_final = dimensiones[0][0]
columnas_final = dimensiones[1][1]
resultado = Array.new(filas_final){Array.new(columnas_final, 0)}
for i in 0...@filas
for j in 0...m2.columnas
temp = Array.new(dimensiones[0][0])
temp[0] = @matriz[i][0] * val2 = m2[0][j];
for k in 1...@columnas
val1 = @matriz[i][k]
val2 = m2[k][j]
temp2 = val1 * val2
puts temp2
#temp[k] += temp2
puts(temp[k].class)
temp[k] = temp2
end
resultado[i][j] = temp.reduce(:+)
end
end
return Matriz.new(resultado)
end
def +(m2)
filas_final = @filas
columnas_final = @columnas
resultado = Array.new(filas_final){Array.new(columnas_final, 0)}
for i in 0...@filas
for j in 0...@columnas
resultado[i][j] = @matriz[i][j] + m2[i][j]
end
end
return Matriz.new(resultado)
end
def []=(i,j,x)
@matriz[i][j] = x
end
def [](i,j)
@matriz[i]
end
def [](i)
@matriz[i]
end
def == (other)
filas_final = @filas
columnas_final = @columnas
resultado = true
for i in 0...@filas
for j in 0...@columnas
resultado &= (@matriz[i][j] == other[i][j])
end
end
return(resultado)
end
#def = (m1)
# filas = m1.filas
# columnas = m1.columnas
# matriz = Array.new(filas){Array.new(columnas)}
# for i in 0...filas
# for j in 0...columnas
# matriz[i][j] = m1[i][j]
# end
# end
# resultado = Matriz.new(filas, columnas)
#end
end
Clase MatrizDensa y MatrizDispersa creadas (vacias)
require "Practica9/Fraccion.rb"
class Matriz
attr_reader :filas,:columnas, :valor, :matriz, :m1, :m2
attr_writer :resultado
def initialize( valor_entrada)
@filas = valor_entrada.length
@columnas = valor_entrada[0].length
@matriz = Array.new(filas){Array.new(columnas)}
for i in 0...@filas
for j in 0...@columnas
matriz[i][j] = valor_entrada[i][j]
end
end
end
def to_s
i=0
txt = "";
rango = 0...@filas
rango_txt = rango.to_a
txt += "\n #{rango_txt.join(" ")}\n"
for fila in @matriz
txt += "#{i} #{fila}\n"
i += 1
end
return txt
end
def muestra_matriz(matriz)
i=0
rango = 0...matriz[0].length
rango_txt = rango.to_a
print "\n #{rango_txt.join(" ")}\n"
for fila in matriz
puts "#{i} #{fila}"
i += 1
end
end
def hacer_matriz(filas,columnas, valor)
array_devolucion = Array.new(filas){Array.new(columnas, valor)}
end
def *(m2)
dimensiones=[[@filas, @columnas],[m2.filas, m2.columnas]]
filas_final = dimensiones[0][0]
columnas_final = dimensiones[1][1]
resultado = Array.new(filas_final){Array.new(columnas_final, 0)}
for i in 0...@filas
for j in 0...m2.columnas
temp = Array.new(dimensiones[0][0])
temp[0] = @matriz[i][0] * val2 = m2[0][j];
for k in 1...@columnas
val1 = @matriz[i][k]
val2 = m2[k][j]
temp2 = val1 * val2
puts temp2
#temp[k] += temp2
puts(temp[k].class)
temp[k] = temp2
end
resultado[i][j] = temp.reduce(:+)
end
end
return Matriz.new(resultado)
end
def +(m2)
filas_final = @filas
columnas_final = @columnas
resultado = Array.new(filas_final){Array.new(columnas_final, 0)}
for i in 0...@filas
for j in 0...@columnas
resultado[i][j] = @matriz[i][j] + m2[i][j]
end
end
return Matriz.new(resultado)
end
def []=(i,j,x)
@matriz[i][j] = x
end
def [](i,j)
@matriz[i]
end
def [](i)
@matriz[i]
end
def == (other)
filas_final = @filas
columnas_final = @columnas
resultado = true
for i in 0...@filas
for j in 0...@columnas
resultado &= (@matriz[i][j] == other[i][j])
end
end
return(resultado)
end
#def = (m1)
# filas = m1.filas
# columnas = m1.columnas
# matriz = Array.new(filas){Array.new(columnas)}
# for i in 0...filas
# for j in 0...columnas
# matriz[i][j] = m1[i][j]
# end
# end
# resultado = Matriz.new(filas, columnas)
#end
end
class MatrizDispersa < Matriz
def +(other)
puts(other.class)
if((other.class == MatrizDensa) || (other.class = Matriz))
puts(other.class)
end
end
end
class MatrizDensa < Matriz
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'stack_master/version'
Gem::Specification.new do |spec|
spec.name = "stack_master"
spec.version = StackMaster::VERSION
spec.authors = ["Steve Hodgkiss", "Glen Stampoultzis"]
spec.email = ["steve@hodgkiss.me", "gstamp@gmail.com"]
spec.summary = %q{AWS Stack Management}
spec.description = %q{}
spec.homepage = "https://github.com/envato/stack_master"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "pry"
spec.add_dependency "commander"
spec.add_dependency "virtus"
spec.add_dependency "aws-sdk"
spec.add_dependency "diffy"
spec.add_dependency "colorize"
spec.add_dependency "activesupport"
spec.add_dependency "sparkle_formation", "~> 1.1"
spec.add_dependency "table_print"
spec.add_dependency "dotgpg"
end
Add aruba & cucumber for testing
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'stack_master/version'
Gem::Specification.new do |spec|
spec.name = "stack_master"
spec.version = StackMaster::VERSION
spec.authors = ["Steve Hodgkiss", "Glen Stampoultzis"]
spec.email = ["steve@hodgkiss.me", "gstamp@gmail.com"]
spec.summary = %q{AWS Stack Management}
spec.description = %q{}
spec.homepage = "https://github.com/envato/stack_master"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "pry"
spec.add_development_dependency "cucumber"
spec.add_development_dependency "aruba"
spec.add_dependency "commander"
spec.add_dependency "virtus"
spec.add_dependency "aws-sdk"
spec.add_dependency "diffy"
spec.add_dependency "colorize"
spec.add_dependency "activesupport"
spec.add_dependency "sparkle_formation", "~> 1.1"
spec.add_dependency "table_print"
spec.add_dependency "dotgpg"
end
|
source "https://rubygems.org".freeze
gem "codeclimate-test-reporter".freeze
gem "rspec".freeze
gem "simplecov".freeze
Add rake to Travis gemfile.
source "https://rubygems.org".freeze
gem "codeclimate-test-reporter".freeze
gem "rake".freeze
gem "rspec".freeze
gem "simplecov".freeze
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib/", __FILE__)
require "version"
Gem::Specification.new do |s|
s.name = 'git-flattr'
s.version = GitFlattr::VERSION
s.authors = ['Simon Gate']
s.email = ['simon@smgt.me']
s.summary = %q{Flattr GitHub repositories from the cli}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.add_runtime_dependency "flattr"
s.add_runtime_dependency "launchy"
s.add_runtime_dependency "commander"
end
Add rake to dev
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib/", __FILE__)
require "version"
Gem::Specification.new do |s|
s.name = 'git-flattr'
s.version = GitFlattr::VERSION
s.authors = ['Simon Gate']
s.email = ['simon@smgt.me']
s.summary = %q{Flattr GitHub repositories from the cli}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.add_development_dependency "rake"
s.add_runtime_dependency "flattr"
s.add_runtime_dependency "launchy"
s.add_runtime_dependency "commander"
end
|
#!/usr/bin/env ruby
require "yaml"
require "twitter"
require "tumblr_client"
require "ostruct"
version = "v2.0.7"
# loading the config file
keys = YAML.load_file File.expand_path(".", "config.yml")
# loading the filter lists
FILTER_WORDS = YAML.load_file File.expand_path(".", "filters/words.yml")
FILTER_USERS = YAML.load_file File.expand_path(".", "filters/users.yml")
FILTER_CLIENTS = YAML.load_file File.expand_path(".", "filters/clients.yml")
# loading the lists containing characters
waifu = YAML.load_file File.expand_path(".", "lists/waifu.yml")
husbando = YAML.load_file File.expand_path(".", "lists/husbando.yml")
imouto = YAML.load_file File.expand_path(".", "lists/imouto.yml")
shipgirl = YAML.load_file File.expand_path(".", "lists/kancolle.yml")
touhou = YAML.load_file File.expand_path(".", "lists/touhou.yml")
# regex to get client name
SOURCE_REGEX = /^<a href=\"(https?:\/\/\S+|erased_\d+)\" rel=\"nofollow\">(.+)<\/a>$/
# Twitter client configuration
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
# OPTIONAL: Tumblr configuration
if keys['tumblr']['enabled']
Tumblr.configure do |config|
config.consumer_key = keys['tumblr']['consumer_key']
config.consumer_secret = keys['tumblr']['consumer_secret']
config.oauth_token = keys['tumblr']['access_token']
config.oauth_token_secret = keys['tumblr']['access_token_secret']
end
tumblr_client = Tumblr::Client.new
end
limited = false
begin
$current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
$current_user = OpenStruct.new
$current_user.id = keys['twitter']["access_token"].split("-")[0].to_i
end
puts "yourwaifu #{version}"
puts "-------------------------------"
puts "Entries: [\033[34;1m#{waifu.count}\033[0m] waifu"
puts " [\033[34;1m#{husbando.count}\033[0m] husbando"
puts " [\033[34;1m #{imouto.count}\033[0m] imouto"
puts "-------------------------------"
puts "Filters: [ \033[33;1m#{FILTER_WORDS.count}\033[0m] words"
puts " [ \033[33;1m#{FILTER_USERS.count}\033[0m] users"
puts " [ \033[33;1m#{FILTER_CLIENTS.count}\033[0m] clients"
puts "-------------------------------"
if keys['tumblr']['enabled']
puts "\033[36;1mposting to Tumblr if status limit occurs\033[0m"
puts "-------------------------------"
end
class NotImportantException < Exception
end
class FilteredException < Exception
end
class FilteredClientException < FilteredException
end
class FilteredUserException < FilteredException
end
class FilteredTweetException < FilteredException
end
class Twitter::Tweet
def raise_if_current_user!
raise NotImportantException if $current_user.id == self.user.id
end
def raise_if_retweet!
raise NotImportantException if self.text.start_with? "RT @"
end
def raise_if_client_filtered!
FILTER_CLIENTS.each do |fc|
filter_client = self.source.match SOURCE_REGEX
if filter_client[2].downcase.include? fc.downcase
raise FilteredClientException, "#{self.user.screen_name} is replying with #{fc}, a filtered client"
end
end
end
def raise_if_word_filtered!
FILTER_WORDS.each do |fw|
if self.text.downcase.include? fw.downcase
raise FilteredTweetException, "#{self.user.screen_name} triggered filter: '#{fw}'"
end
end
end
def raise_if_user_filtered!
FILTER_USERS.each do |fu|
if self.user.screen_name.downcase.include? fu.downcase
raise FilteredUserException, "#{self.user.screen_name} is filtered, not going to reply"
end
end
end
end
loop do
streamer.user do |object|
if object.is_a? Twitter::Tweet
begin
object.raise_if_current_user!
object.raise_if_retweet!
object.raise_if_client_filtered!
object.raise_if_word_filtered!
object.raise_if_user_filtered!
case object.text
when /husbando?/i
chosen_one = husbando.sample
chosen_one['title'] = "husbando"
when /imouto?/i
chosen_one = imouto.sample
chosen_one['title'] = "imouto"
when /shipgirl?/i
chosen_one = shipgirl.sample
chosen_one['title'] = "shipgirl"
when /touhou?/i
chosen_one = touhou.sample
chosen_one['title'] = "touhou"
else
chosen_one = waifu.sample
chosen_one['title'] = "waifu"
end
puts "[#{Time.new.to_s}][#{chosen_one["title"]}] #{object.user.screen_name}: #{chosen_one["name"]} - #{chosen_one["series"]}"
if File.exists? File.expand_path("../img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}", __FILE__)
client.update_with_media "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", File.new("img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}"), in_reply_to_status:object
else
client.update "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", in_reply_to_status:object
puts "\033[34;1m[#{Time.new.to_s}] posted without image!\033[0m"
end
if limited
limited = false
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "I'm back!", body: "The limit is gone now and you can get waifus/husbandos again! [Bot has been unlimited since: #{Time.new.to_s}]")
end
end
rescue NotImportantException => e
rescue FilteredClientException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredTweetException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredUserException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue Exception => e
puts "\033[31;1m[#{Time.new.to_s}] #{e.message}\033[0m"
if e.message.match /update limit/i and !limited
limited = true
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "Bot is limited", body: "I've reached the \"daily\" limit for now! Please wait a bit before mentioning me again. [Bot has been limited since: #{Time.new.to_s}]")
end
end
end
end
end
sleep 1
end
add new lists to startup information
#!/usr/bin/env ruby
require "yaml"
require "twitter"
require "tumblr_client"
require "ostruct"
version = "v2.0.7"
# loading the config file
keys = YAML.load_file File.expand_path(".", "config.yml")
# loading the filter lists
FILTER_WORDS = YAML.load_file File.expand_path(".", "filters/words.yml")
FILTER_USERS = YAML.load_file File.expand_path(".", "filters/users.yml")
FILTER_CLIENTS = YAML.load_file File.expand_path(".", "filters/clients.yml")
# loading the lists containing characters
waifu = YAML.load_file File.expand_path(".", "lists/waifu.yml")
husbando = YAML.load_file File.expand_path(".", "lists/husbando.yml")
imouto = YAML.load_file File.expand_path(".", "lists/imouto.yml")
shipgirl = YAML.load_file File.expand_path(".", "lists/kancolle.yml")
touhou = YAML.load_file File.expand_path(".", "lists/touhou.yml")
# regex to get client name
SOURCE_REGEX = /^<a href=\"(https?:\/\/\S+|erased_\d+)\" rel=\"nofollow\">(.+)<\/a>$/
# Twitter client configuration
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
# OPTIONAL: Tumblr configuration
if keys['tumblr']['enabled']
Tumblr.configure do |config|
config.consumer_key = keys['tumblr']['consumer_key']
config.consumer_secret = keys['tumblr']['consumer_secret']
config.oauth_token = keys['tumblr']['access_token']
config.oauth_token_secret = keys['tumblr']['access_token_secret']
end
tumblr_client = Tumblr::Client.new
end
limited = false
begin
$current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
$current_user = OpenStruct.new
$current_user.id = keys['twitter']["access_token"].split("-")[0].to_i
end
puts "yourwaifu #{version}"
puts "-------------------------------"
puts "Entries: [\033[34;1m#{waifu.count}\033[0m] waifu"
puts " [\033[34;1m#{shipgirl.count}\033[0m] shipgirls"
puts " [\033[34;1m#{husbando.count}\033[0m] husbando"
puts " [\033[34;1m#{touhou.count}\033[0m] touhou"
puts " [\033[34;1m #{imouto.count}\033[0m] imouto"
puts "-------------------------------"
puts "Filters: [ \033[33;1m#{FILTER_WORDS.count}\033[0m] words"
puts " [ \033[33;1m#{FILTER_USERS.count}\033[0m] users"
puts " [ \033[33;1m#{FILTER_CLIENTS.count}\033[0m] clients"
puts "-------------------------------"
if keys['tumblr']['enabled']
puts "\033[36;1mposting to Tumblr if status limit occurs\033[0m"
puts "-------------------------------"
end
class NotImportantException < Exception
end
class FilteredException < Exception
end
class FilteredClientException < FilteredException
end
class FilteredUserException < FilteredException
end
class FilteredTweetException < FilteredException
end
class Twitter::Tweet
def raise_if_current_user!
raise NotImportantException if $current_user.id == self.user.id
end
def raise_if_retweet!
raise NotImportantException if self.text.start_with? "RT @"
end
def raise_if_client_filtered!
FILTER_CLIENTS.each do |fc|
filter_client = self.source.match SOURCE_REGEX
if filter_client[2].downcase.include? fc.downcase
raise FilteredClientException, "#{self.user.screen_name} is replying with #{fc}, a filtered client"
end
end
end
def raise_if_word_filtered!
FILTER_WORDS.each do |fw|
if self.text.downcase.include? fw.downcase
raise FilteredTweetException, "#{self.user.screen_name} triggered filter: '#{fw}'"
end
end
end
def raise_if_user_filtered!
FILTER_USERS.each do |fu|
if self.user.screen_name.downcase.include? fu.downcase
raise FilteredUserException, "#{self.user.screen_name} is filtered, not going to reply"
end
end
end
end
loop do
streamer.user do |object|
if object.is_a? Twitter::Tweet
begin
object.raise_if_current_user!
object.raise_if_retweet!
object.raise_if_client_filtered!
object.raise_if_word_filtered!
object.raise_if_user_filtered!
case object.text
when /husbando?/i
chosen_one = husbando.sample
chosen_one['title'] = "husbando"
when /imouto?/i
chosen_one = imouto.sample
chosen_one['title'] = "imouto"
when /shipgirl?/i
chosen_one = shipgirl.sample
chosen_one['title'] = "shipgirl"
when /touhou?/i
chosen_one = touhou.sample
chosen_one['title'] = "touhou"
else
chosen_one = waifu.sample
chosen_one['title'] = "waifu"
end
puts "[#{Time.new.to_s}][#{chosen_one["title"]}] #{object.user.screen_name}: #{chosen_one["name"]} - #{chosen_one["series"]}"
if File.exists? File.expand_path("../img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}", __FILE__)
client.update_with_media "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", File.new("img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}"), in_reply_to_status:object
else
client.update "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", in_reply_to_status:object
puts "\033[34;1m[#{Time.new.to_s}] posted without image!\033[0m"
end
if limited
limited = false
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "I'm back!", body: "The limit is gone now and you can get waifus/husbandos again! [Bot has been unlimited since: #{Time.new.to_s}]")
end
end
rescue NotImportantException => e
rescue FilteredClientException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredTweetException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredUserException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue Exception => e
puts "\033[31;1m[#{Time.new.to_s}] #{e.message}\033[0m"
if e.message.match /update limit/i and !limited
limited = true
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "Bot is limited", body: "I've reached the \"daily\" limit for now! Please wait a bit before mentioning me again. [Bot has been limited since: #{Time.new.to_s}]")
end
end
end
end
end
sleep 1
end
|
#!/usr/bin/env ruby
require "yaml"
require "twitter"
require "tumblr_client"
require "ostruct"
version = "v2.0.7"
# loading the config file
keys = YAML.load_file File.expand_path(".", "config.yml")
# loading the filter lists
FILTER_WORDS = YAML.load_file File.expand_path(".", "filters/words.yml")
FILTER_USERS = YAML.load_file File.expand_path(".", "filters/users.yml")
FILTER_CLIENTS = YAML.load_file File.expand_path(".", "filters/clients.yml")
# loading the lists containing characters
waifu = YAML.load_file File.expand_path(".", "lists/waifu.yml")
husbando = YAML.load_file File.expand_path(".", "lists/husbando.yml")
imouto = YAML.load_file File.expand_path(".", "lists/imouto.yml")
shipgirl = YAML.load_file File.expand_path(".", "lists/kancolle.yml")
touhou = YAML.load_file File.expand_path(".", "lists/touhou.yml")
# regex to get client name
SOURCE_REGEX = /^<a href=\"(https?:\/\/\S+|erased_\d+)\" rel=\"nofollow\">(.+)<\/a>$/
# Twitter client configuration
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
# OPTIONAL: Tumblr configuration
if keys['tumblr']['enabled']
Tumblr.configure do |config|
config.consumer_key = keys['tumblr']['consumer_key']
config.consumer_secret = keys['tumblr']['consumer_secret']
config.oauth_token = keys['tumblr']['access_token']
config.oauth_token_secret = keys['tumblr']['access_token_secret']
end
tumblr_client = Tumblr::Client.new
end
limited = false
begin
$current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
$current_user = OpenStruct.new
$current_user.id = keys['twitter']["access_token"].split("-")[0].to_i
end
puts "yourwaifu #{version}"
puts "-------------------------------"
puts "Entries: [\033[34;1m#{waifu.count}\033[0m] waifu"
puts " [\033[34;1m#{shipgirl.count}\033[0m] shipgirls"
puts " [\033[34;1m#{husbando.count}\033[0m] husbando"
puts " [\033[34;1m#{touhou.count}\033[0m] touhou"
puts " [\033[34;1m #{imouto.count}\033[0m] imouto"
puts "-------------------------------"
puts "Filters: [ \033[33;1m#{FILTER_WORDS.count}\033[0m] words"
puts " [ \033[33;1m#{FILTER_USERS.count}\033[0m] users"
puts " [ \033[33;1m#{FILTER_CLIENTS.count}\033[0m] clients"
puts "-------------------------------"
if keys['tumblr']['enabled']
puts "\033[36;1mposting to Tumblr if status limit occurs\033[0m"
puts "-------------------------------"
end
class NotImportantException < Exception
end
class FilteredException < Exception
end
class FilteredClientException < FilteredException
end
class FilteredUserException < FilteredException
end
class FilteredTweetException < FilteredException
end
class Twitter::Tweet
def raise_if_current_user!
raise NotImportantException if $current_user.id == self.user.id
end
def raise_if_retweet!
raise NotImportantException if self.text.start_with? "RT @"
end
def raise_if_client_filtered!
FILTER_CLIENTS.each do |fc|
filter_client = self.source.match SOURCE_REGEX
if filter_client[2].downcase.include? fc.downcase
raise FilteredClientException, "#{self.user.screen_name} is replying with #{fc}, a filtered client"
end
end
end
def raise_if_word_filtered!
FILTER_WORDS.each do |fw|
if self.text.downcase.include? fw.downcase
raise FilteredTweetException, "#{self.user.screen_name} triggered filter: '#{fw}'"
end
end
end
def raise_if_user_filtered!
FILTER_USERS.each do |fu|
if self.user.screen_name.downcase.include? fu.downcase
raise FilteredUserException, "#{self.user.screen_name} is filtered, not going to reply"
end
end
end
end
loop do
streamer.user do |object|
if object.is_a? Twitter::Tweet
begin
object.raise_if_current_user!
object.raise_if_retweet!
object.raise_if_client_filtered!
object.raise_if_word_filtered!
object.raise_if_user_filtered!
case object.text
when /husbando?/i
chosen_one = husbando.sample
chosen_one['title'] = "husbando"
when /imouto?/i
chosen_one = imouto.sample
chosen_one['title'] = "imouto"
when /shipgirl?/i
chosen_one = shipgirl.sample
chosen_one['title'] = "shipgirl"
when /touhou?/i
chosen_one = touhou.sample
chosen_one['title'] = "touhou"
else
chosen_one = waifu.sample
chosen_one['title'] = "waifu"
end
puts "[#{Time.new.to_s}][#{chosen_one["title"]}] #{object.user.screen_name}: #{chosen_one["name"]} - #{chosen_one["series"]}"
if File.exists? File.expand_path("../img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}", __FILE__)
client.update_with_media "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", File.new("img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}"), in_reply_to_status:object
else
client.update "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", in_reply_to_status:object
puts "\033[34;1m[#{Time.new.to_s}] posted without image!\033[0m"
end
if limited
limited = false
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "I'm back!", body: "The limit is gone now and you can get waifus/husbandos again! [Bot has been unlimited since: #{Time.new.to_s}]")
end
end
rescue NotImportantException => e
rescue FilteredClientException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredTweetException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredUserException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue Exception => e
puts "\033[31;1m[#{Time.new.to_s}] #{e.message}\033[0m"
if e.message.match /update limit/i and !limited
limited = true
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "Bot is limited", body: "I've reached the \"daily\" limit for now! Please wait a bit before mentioning me again. [Bot has been limited since: #{Time.new.to_s}]")
end
end
end
end
end
sleep 1
end
fixed formatting and sorting
#!/usr/bin/env ruby
require "yaml"
require "twitter"
require "tumblr_client"
require "ostruct"
version = "v2.0.7"
# loading the config file
keys = YAML.load_file File.expand_path(".", "config.yml")
# loading the filter lists
FILTER_WORDS = YAML.load_file File.expand_path(".", "filters/words.yml")
FILTER_USERS = YAML.load_file File.expand_path(".", "filters/users.yml")
FILTER_CLIENTS = YAML.load_file File.expand_path(".", "filters/clients.yml")
# loading the lists containing characters
waifu = YAML.load_file File.expand_path(".", "lists/waifu.yml")
husbando = YAML.load_file File.expand_path(".", "lists/husbando.yml")
imouto = YAML.load_file File.expand_path(".", "lists/imouto.yml")
shipgirl = YAML.load_file File.expand_path(".", "lists/kancolle.yml")
touhou = YAML.load_file File.expand_path(".", "lists/touhou.yml")
# regex to get client name
SOURCE_REGEX = /^<a href=\"(https?:\/\/\S+|erased_\d+)\" rel=\"nofollow\">(.+)<\/a>$/
# Twitter client configuration
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['twitter']['consumer_key']
config.consumer_secret = keys['twitter']['consumer_secret']
config.access_token = keys['twitter']['access_token']
config.access_token_secret = keys['twitter']['access_token_secret']
end
# OPTIONAL: Tumblr configuration
if keys['tumblr']['enabled']
Tumblr.configure do |config|
config.consumer_key = keys['tumblr']['consumer_key']
config.consumer_secret = keys['tumblr']['consumer_secret']
config.oauth_token = keys['tumblr']['access_token']
config.oauth_token_secret = keys['tumblr']['access_token_secret']
end
tumblr_client = Tumblr::Client.new
end
limited = false
begin
$current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
$current_user = OpenStruct.new
$current_user.id = keys['twitter']["access_token"].split("-")[0].to_i
end
puts "yourwaifu #{version}"
puts "-------------------------------"
puts "Entries: [\033[34;1m#{waifu.count}\033[0m] waifu"
puts " [\033[34;1m#{husbando.count}\033[0m] husbando"
puts " [\033[34;1m#{shipgirl.count}\033[0m] shipgirls"
puts " [\033[34;1m #{touhou.count}\033[0m] touhou"
puts " [\033[34;1m #{imouto.count}\033[0m] imouto"
puts "-------------------------------"
puts "Filters: [ \033[33;1m#{FILTER_WORDS.count}\033[0m] words"
puts " [ \033[33;1m#{FILTER_USERS.count}\033[0m] users"
puts " [ \033[33;1m#{FILTER_CLIENTS.count}\033[0m] clients"
puts "-------------------------------"
if keys['tumblr']['enabled']
puts "\033[36;1mposting to Tumblr if status limit occurs\033[0m"
puts "-------------------------------"
end
class NotImportantException < Exception
end
class FilteredException < Exception
end
class FilteredClientException < FilteredException
end
class FilteredUserException < FilteredException
end
class FilteredTweetException < FilteredException
end
class Twitter::Tweet
def raise_if_current_user!
raise NotImportantException if $current_user.id == self.user.id
end
def raise_if_retweet!
raise NotImportantException if self.text.start_with? "RT @"
end
def raise_if_client_filtered!
FILTER_CLIENTS.each do |fc|
filter_client = self.source.match SOURCE_REGEX
if filter_client[2].downcase.include? fc.downcase
raise FilteredClientException, "#{self.user.screen_name} is replying with #{fc}, a filtered client"
end
end
end
def raise_if_word_filtered!
FILTER_WORDS.each do |fw|
if self.text.downcase.include? fw.downcase
raise FilteredTweetException, "#{self.user.screen_name} triggered filter: '#{fw}'"
end
end
end
def raise_if_user_filtered!
FILTER_USERS.each do |fu|
if self.user.screen_name.downcase.include? fu.downcase
raise FilteredUserException, "#{self.user.screen_name} is filtered, not going to reply"
end
end
end
end
loop do
streamer.user do |object|
if object.is_a? Twitter::Tweet
begin
object.raise_if_current_user!
object.raise_if_retweet!
object.raise_if_client_filtered!
object.raise_if_word_filtered!
object.raise_if_user_filtered!
case object.text
when /husbando?/i
chosen_one = husbando.sample
chosen_one['title'] = "husbando"
when /imouto?/i
chosen_one = imouto.sample
chosen_one['title'] = "imouto"
when /shipgirl?/i
chosen_one = shipgirl.sample
chosen_one['title'] = "shipgirl"
when /touhou?/i
chosen_one = touhou.sample
chosen_one['title'] = "touhou"
else
chosen_one = waifu.sample
chosen_one['title'] = "waifu"
end
puts "[#{Time.new.to_s}][#{chosen_one["title"]}] #{object.user.screen_name}: #{chosen_one["name"]} - #{chosen_one["series"]}"
if File.exists? File.expand_path("../img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}", __FILE__)
client.update_with_media "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", File.new("img/#{chosen_one["series"]}/#{chosen_one["name"]}.#{chosen_one["filetype"]}"), in_reply_to_status:object
else
client.update "@#{object.user.screen_name} Your #{chosen_one["title"]} is #{chosen_one["name"]} (#{chosen_one["series"]})", in_reply_to_status:object
puts "\033[34;1m[#{Time.new.to_s}] posted without image!\033[0m"
end
if limited
limited = false
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "I'm back!", body: "The limit is gone now and you can get waifus/husbandos again! [Bot has been unlimited since: #{Time.new.to_s}]")
end
end
rescue NotImportantException => e
rescue FilteredClientException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredTweetException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue FilteredUserException => e
puts "\033[33;1m[#{Time.new.to_s}] #{e.message}\033[0m"
rescue Exception => e
puts "\033[31;1m[#{Time.new.to_s}] #{e.message}\033[0m"
if e.message.match /update limit/i and !limited
limited = true
if keys['tumblr']['enabled']
tumblr_client.text(keys['tumblr']['blog_name'], title: "Bot is limited", body: "I've reached the \"daily\" limit for now! Please wait a bit before mentioning me again. [Bot has been limited since: #{Time.new.to_s}]")
end
end
end
end
end
sleep 1
end
|
require "yaml"
require "twitter"
require "ostruct"
keys = YAML.load_file File.expand_path(".", "config.yml")
waifu = [
{ name: "Yuuji Kazami", series: "Grisaia no Kajitsu" },
{ name: "Yumiko Sakake", series: "Grisaia no Kajitsu" },
{ name: "Amane Suou", series: "Grisaia no Kajitsu" },
{ name: "Michiru Matsushima", series: "Grisaia no Kajitsu" },
{ name: "Makina Irisu", series: "Grisaia no Kajitsu" },
{ name: "Sachi Komine", series: "Grisaia no Kajitsu" },
{ name: "Chizuru Tachibana", series: "Grisaia no Kajitsu" },
{ name: "Kazuki Kazami", series: "Grisaia no Kajitsu" },
{ name: "Hanako Ikezawa", series: "Katawa Shoujo" },
{ name: "Lilly Satou", series: "Katawa Shoujo" },
{ name: "Shizune Hakamichi", series: "Katawa Shoujo" },
{ name: "Hideaki Hakamichi", series: "Katawa Shoujo" },
{ name: "Shiina \"Misha\" Mikado", series: "Katawa Shoujo" },
{ name: "Emi Ibarazaki", series: "Katawa Shoujo" },
{ name: "Rin Tezuka", series: "Katawa Shoujo" },
{ name: "Kenji Setou", series: "Katawa Shoujo" },
{ name: "Suzu Suzuki", series: "Katawa Shoujo" },
{ name: "Ryouta Murakami", series: "Gokukoku no Brynhildr" },
{ name: "Neko Kuroha", series: "Gokukoku no Brynhildr" },
{ name: "Kana Tachibana", series: "Gokukoku no Brynhildr" },
{ name: "Kazumi Schlierenzauer", series: "Gokukoku no Brynhildr" },
{ name: "Kotori Takatori", series: "Gokukoku no Brynhildr" },
{ name: "Hatsuna Wakabayashi", series: "Gokukoku no Brynhildr" },
{ name: "Rintarō Okabe", series: "Steins;Gate" },
{ name: "Kurisu Makise", series: "Steins;Gate" },
{ name: "Mayuri Shiina", series: "Steins;Gate" },
{ name: "Ruka Urushibara", series: "Steins;Gate" },
{ name: "Misaka Mikoto", series: "To Aru Kagaku no Railgun" },
{ name: "Ruiko Saten", series: "To Aru Kagaku no Railgun" },
{ name: "Kazari Uiharu", series: "To Aru Kagaku no Railgun" },
{ name: "Kuroko Shirai", series: "To Aru Kagaku no Railgun" },
{ name: "Tsukihi Araragi", series: "Monogatari Series" },
{ name: "Tsubasa Hanekawa", series: "Monogatari Series" },
{ name: "Shinobu Oshino", series: "Monogatari Series" },
{ name: "Nadeko Sengoku", series: "Monogatari Series" },
{ name: "Karen Araragi", series: "Monogatari Series" },
{ name: "Ryuuko Matoi", series: "Kill la Kill" },
{ name: "Satsuki Kiryuuin", series: "Kill la Kill" },
{ name: "Nonon Jakuzure", series: "Kill la Kill" },
{ name: "Aoi Sakurai", series: "Rail Wars" },
{ name: "Haruka Koumi", series: "Rail Wars" },
{ name: "Akari Akaza", series: "Yuru Yuri" },
{ name: "Kyouko Toshino", series: "Yuru Yuri" },
{ name: "Junko Enoshima", series: "DanganRonpa" },
{ name: "Chihiro Fujisaki", series: "DanganRonpa" },
{ name: "Mirai Kuriyama", series: "Kyoukai no Kanata" },
{ name: "Izumi Nase", series: "Kyoukai no Kanata" },
{ name: "Illyasviel von Einzbern", series: "Fate/kaleid liner Prisma☆Illya" },
{ name: "Rin Tohsaka", series: "Fate/kaleid liner Prisma☆Illya" },
{ name: "Luvia Edelfelt", series: "Fate/stay" },
{ name: "Irisviel von Einzbern", series: "Fate/zero" },
]
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['consumer_key']
config.consumer_secret = keys['consumer_secret']
config.access_token = keys['access_token']
config.access_token_secret = keys['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['consumer_key']
config.consumer_secret = keys['consumer_secret']
config.access_token = keys['access_token']
config.access_token_secret = keys['access_token_secret']
end
begin
current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
current_user = OpenStruct.new
current_user.id = config["access_token"].split("-")[0]
end
streamer.user do |object|
if object.is_a? Twitter::Tweet
unless current_user.id == object.user.id
unless object.text.start_with? "RT @"
chosen_one = waifu.sample
puts "#{object.user.screen_name}: #{chosen_one[:name]} - #{chosen_one[:series]}"
client.update "@#{object.user.screen_name} Your waifu is #{chosen_one[:name]} (#{chosen_one[:series]})", in_reply_to_status:object
end
end
end
end
added OreImo characters
require "yaml"
require "twitter"
require "ostruct"
keys = YAML.load_file File.expand_path(".", "config.yml")
waifu = [
{ name: "Yuuji Kazami", series: "Grisaia no Kajitsu" },
{ name: "Yumiko Sakake", series: "Grisaia no Kajitsu" },
{ name: "Amane Suou", series: "Grisaia no Kajitsu" },
{ name: "Michiru Matsushima", series: "Grisaia no Kajitsu" },
{ name: "Makina Irisu", series: "Grisaia no Kajitsu" },
{ name: "Sachi Komine", series: "Grisaia no Kajitsu" },
{ name: "Chizuru Tachibana", series: "Grisaia no Kajitsu" },
{ name: "Kazuki Kazami", series: "Grisaia no Kajitsu" },
{ name: "Hanako Ikezawa", series: "Katawa Shoujo" },
{ name: "Lilly Satou", series: "Katawa Shoujo" },
{ name: "Shizune Hakamichi", series: "Katawa Shoujo" },
{ name: "Hideaki Hakamichi", series: "Katawa Shoujo" },
{ name: "Shiina \"Misha\" Mikado", series: "Katawa Shoujo" },
{ name: "Emi Ibarazaki", series: "Katawa Shoujo" },
{ name: "Rin Tezuka", series: "Katawa Shoujo" },
{ name: "Kenji Setou", series: "Katawa Shoujo" },
{ name: "Suzu Suzuki", series: "Katawa Shoujo" },
{ name: "Ryouta Murakami", series: "Gokukoku no Brynhildr" },
{ name: "Neko Kuroha", series: "Gokukoku no Brynhildr" },
{ name: "Kana Tachibana", series: "Gokukoku no Brynhildr" },
{ name: "Kazumi Schlierenzauer", series: "Gokukoku no Brynhildr" },
{ name: "Kotori Takatori", series: "Gokukoku no Brynhildr" },
{ name: "Hatsuna Wakabayashi", series: "Gokukoku no Brynhildr" },
{ name: "Rintarō Okabe", series: "Steins;Gate" },
{ name: "Kurisu Makise", series: "Steins;Gate" },
{ name: "Mayuri Shiina", series: "Steins;Gate" },
{ name: "Ruka Urushibara", series: "Steins;Gate" },
{ name: "Misaka Mikoto", series: "To Aru Kagaku no Railgun" },
{ name: "Ruiko Saten", series: "To Aru Kagaku no Railgun" },
{ name: "Kazari Uiharu", series: "To Aru Kagaku no Railgun" },
{ name: "Kuroko Shirai", series: "To Aru Kagaku no Railgun" },
{ name: "Tsukihi Araragi", series: "Monogatari Series" },
{ name: "Tsubasa Hanekawa", series: "Monogatari Series" },
{ name: "Shinobu Oshino", series: "Monogatari Series" },
{ name: "Nadeko Sengoku", series: "Monogatari Series" },
{ name: "Karen Araragi", series: "Monogatari Series" },
{ name: "Ryuuko Matoi", series: "Kill la Kill" },
{ name: "Satsuki Kiryuuin", series: "Kill la Kill" },
{ name: "Nonon Jakuzure", series: "Kill la Kill" },
{ name: "Aoi Sakurai", series: "Rail Wars" },
{ name: "Haruka Koumi", series: "Rail Wars" },
{ name: "Akari Akaza", series: "Yuru Yuri" },
{ name: "Kyouko Toshino", series: "Yuru Yuri" },
{ name: "Junko Enoshima", series: "DanganRonpa" },
{ name: "Chihiro Fujisaki", series: "DanganRonpa" },
{ name: "Mirai Kuriyama", series: "Kyoukai no Kanata" },
{ name: "Izumi Nase", series: "Kyoukai no Kanata" },
{ name: "Illyasviel von Einzbern", series: "Fate/kaleid liner Prisma☆Illya" },
{ name: "Rin Tohsaka", series: "Fate/kaleid liner Prisma☆Illya" },
{ name: "Luvia Edelfelt", series: "Fate/stay" },
{ name: "Irisviel von Einzbern", series: "Fate/zero" },
{ name: "Ayase Aragaki", series: "OreImo" },
{ name: "Kanako Kurusu", series: "OreImo" },
]
client = Twitter::REST::Client.new do |config|
config.consumer_key = keys['consumer_key']
config.consumer_secret = keys['consumer_secret']
config.access_token = keys['access_token']
config.access_token_secret = keys['access_token_secret']
end
streamer = Twitter::Streaming::Client.new do |config|
config.consumer_key = keys['consumer_key']
config.consumer_secret = keys['consumer_secret']
config.access_token = keys['access_token']
config.access_token_secret = keys['access_token_secret']
end
begin
current_user = client.current_user
rescue Exception => e
puts "Exception: #{e.message}"
# best hack:
current_user = OpenStruct.new
current_user.id = config["access_token"].split("-")[0]
end
streamer.user do |object|
if object.is_a? Twitter::Tweet
unless current_user.id == object.user.id
unless object.text.start_with? "RT @"
chosen_one = waifu.sample
puts "#{object.user.screen_name}: #{chosen_one[:name]} - #{chosen_one[:series]}"
client.update "@#{object.user.screen_name} Your waifu is #{chosen_one[:name]} (#{chosen_one[:series]})", in_reply_to_status:object
end
end
end
end
|
require "active_model"
require "forwardable"
# The ActiveConductor is an implementation of the conductor pattern.
#
# The conductor pattern unifies some models into a single object and
# cleans up controller code massively.
#
# @example
# class SignupConductor < ActiveConductor
# def models
# [user, profile]
# end
#
# def user
# @user ||= User.new
# end
#
# def profile
# @profile ||= Profile.new
# end
#
# conduct :user, :first_name, :last_name
# conduct :profile, :image
# end
#
# @author Scott Taylor
# @author Michael Kessler
#
class ActiveConductor
include ActiveModel::Conversion
include ActiveModel::Validations
extend ActiveModel::Naming
extend ActiveModel::Translation
extend Forwardable
# Conduct an attribute from the conductor to the associated
# model.
#
# @example Conduct an the email and password attribute to the user model
# conduct :user, :email, :password
#
# @param model [Symbol] the name of the model
# @param *attributes [Symbol] one or more model attribute name
#
def self.conduct(model, *attributes)
attributes.each do |attr|
def_delegator model, attr
def_delegator model, "#{attr}="
end
end
# Initialize the conductor with optional attributes.
#
# @param attributes [Hash] the attributes hash
# @return [ActiveConductor] the created conductor
#
def initialize(attributes={})
self.attributes = attributes
end
# Set the attributes on the associated models.
#
# @param attributes [Hash] the attributes hash
#
def attributes=(attributes)
attributes.each do |key, value|
self.send("#{key}=", value)
end if attributes
end
# Tests if all of the records have been persisted.
#
# @return [true, false] the persistence status
#
def new_record?
models.all? { |m| m.new_record? }
end
# Tests if the associated models have errors. The errors
# can be accessed afterwards through {#errors}.
#
# @return [true, false] the error status
#
def valid?
models.inject(true) do |result, model|
valid = model.valid?
model.errors.each do |field, value|
errors.add(field, value)
end
result && valid
end
end
# Returns the errors of the conductor. The errors
# are populated after a call to {#save}, {#valid?} or {.create}.
#
# @return [Hash] the error hash
#
def errors
@errors ||= ActiveModel::Errors.new(self)
end
# The models that the conductor holds.
#
# @return [Array] the array with the conducted models
#
def models
[]
end
# Saves the associated models.
#
# @return [true, false] the saved status
#
def save
models.each { |model| return false unless model.save } if valid?
end
# Create and persist a new conductor in one step.
#
# @example Create and yield a conductor
# registration = Registration.create(params[:registration]) do |conductor|
# conductor.user.is_admin = true
# end
#
# @param attributes [Hash] the attributes hash to initialize the conductor
# @param block [Proc] An optional proc that yields to the created conductor
# @return [ActiveConductor] the created conductor
#
def self.create(attributes, &block)
object = new(attributes)
yield(object) if block_given?
object.save
object
end
# ActiveModel compatibility method that always
# return false since a conductor cannot be
# destroyed (See {#persisted?}).
#
# @return [false] always false
#
def destroyed?
false
end
# ActiveModel compatibility method that always
# return false since a conductor will never be
# persisted.
#
# @return [false] always false
#
def persisted?
false
end
end
Remove obsolete block parameter
require "active_model"
require "forwardable"
# The ActiveConductor is an implementation of the conductor pattern.
#
# The conductor pattern unifies some models into a single object and
# cleans up controller code massively.
#
# @example
# class SignupConductor < ActiveConductor
# def models
# [user, profile]
# end
#
# def user
# @user ||= User.new
# end
#
# def profile
# @profile ||= Profile.new
# end
#
# conduct :user, :first_name, :last_name
# conduct :profile, :image
# end
#
# @author Scott Taylor
# @author Michael Kessler
#
class ActiveConductor
include ActiveModel::Conversion
include ActiveModel::Validations
extend ActiveModel::Naming
extend ActiveModel::Translation
extend Forwardable
# Conduct an attribute from the conductor to the associated
# model.
#
# @example Conduct an the email and password attribute to the user model
# conduct :user, :email, :password
#
# @param model [Symbol] the name of the model
# @param *attributes [Symbol] one or more model attribute name
#
def self.conduct(model, *attributes)
attributes.each do |attr|
def_delegator model, attr
def_delegator model, "#{attr}="
end
end
# Initialize the conductor with optional attributes.
#
# @param attributes [Hash] the attributes hash
# @return [ActiveConductor] the created conductor
#
def initialize(attributes={})
self.attributes = attributes
end
# Set the attributes on the associated models.
#
# @param attributes [Hash] the attributes hash
#
def attributes=(attributes)
attributes.each do |key, value|
self.send("#{key}=", value)
end if attributes
end
# Tests if all of the records have been persisted.
#
# @return [true, false] the persistence status
#
def new_record?
models.all? { |m| m.new_record? }
end
# Tests if the associated models have errors. The errors
# can be accessed afterwards through {#errors}.
#
# @return [true, false] the error status
#
def valid?
models.inject(true) do |result, model|
valid = model.valid?
model.errors.each do |field, value|
errors.add(field, value)
end
result && valid
end
end
# Returns the errors of the conductor. The errors
# are populated after a call to {#save}, {#valid?} or {.create}.
#
# @return [Hash] the error hash
#
def errors
@errors ||= ActiveModel::Errors.new(self)
end
# The models that the conductor holds.
#
# @return [Array] the array with the conducted models
#
def models
[]
end
# Saves the associated models.
#
# @return [true, false] the saved status
#
def save
models.each { |model| return false unless model.save } if valid?
end
# Create and persist a new conductor in one step.
#
# @example Create and yield a conductor
# registration = Registration.create(params[:registration]) do |conductor|
# conductor.user.is_admin = true
# end
#
# @param attributes [Hash] the attributes hash to initialize the conductor
# @param block [Proc] An optional proc that yields to the created conductor
# @return [ActiveConductor] the created conductor
#
def self.create(attributes)
object = new(attributes)
yield(object) if block_given?
object.save
object
end
# ActiveModel compatibility method that always
# return false since a conductor cannot be
# destroyed (See {#persisted?}).
#
# @return [false] always false
#
def destroyed?
false
end
# ActiveModel compatibility method that always
# return false since a conductor will never be
# persisted.
#
# @return [false] always false
#
def persisted?
false
end
end
|
module ActiveEnum
class DuplicateValue < StandardError; end
class InvalidValue < StandardError; end
class Base
class << self
attr_accessor :store
def inherited(subclass)
ActiveEnum.enum_classes << subclass
end
# Define enum values.
#
# Examples:
# value :id => 1, :name => 'Foo'
# value :name => 'Foo' # implicit id, incrementing from 1.
# value 1 => 'Foo'
#
def value(enum_value)
store.set *id_and_name_and_meta(enum_value)
end
# Specify order enum values are returned.
# Allowed values are :asc, :desc or :natural
#
def order(order)
if order == :as_defined
ActiveSupport::Deprecation.warn("You are using the order :as_defined which has been deprecated. Use :natural.")
order = :natural
end
@order = order
end
def all
store.values
end
# Array of all enum id values
def ids
store.values.map {|v| v[0] }
end
# Array of all enum name values
def names
store.values.map {|v| v[1] }
end
# Return enum values in an array suitable to pass to a Rails form select helper.
def to_select
store.values.map {|v| [v[1], v[0]] }
end
# Access id or name value. Pass an id number to retrieve the name or
# a symbol or string to retrieve the matching id.
def [](index)
if index.is_a?(Fixnum)
row = store.get_by_id(index)
row[1] if row
else
row = store.get_by_name(index)
row[0] if row
end
end
def include?(value)
!self[value].nil?
end
# Access any meta data defined for a given id or name. Returns a hash.
def meta(index)
row = if index.is_a?(Fixnum)
store.get_by_id(index)
else
store.get_by_name(index)
end
row[2] || {} if row
end
private
def id_and_name_and_meta(hash)
if hash.has_key?(:id) || hash.has_key?(:name)
id = hash.delete(:id) || next_id
name = hash.delete(:name)
meta = hash
return id, name, (meta.blank? ? nil : meta)
elsif hash.keys.first.is_a?(Fixnum)
return *Array(hash).first
else
raise ActiveEnum::InvalidValue, "The value supplied, #{hash}, is not a valid format."
end
end
def next_id
ids.max.to_i + 1
end
def store
@store ||= ActiveEnum.storage_class.new(self, @order || :asc, ActiveEnum.storage_options)
end
end
end
end
Tweaks
module ActiveEnum
class DuplicateValue < StandardError; end
class InvalidValue < StandardError; end
class Base
class << self
attr_accessor :store
def inherited(subclass)
ActiveEnum.enum_classes << subclass
end
# Define enum values.
#
# Examples:
# value :id => 1, :name => 'Foo'
# value :name => 'Foo' # implicit id, incrementing from 1.
# value 1 => 'Foo'
#
def value(enum_value)
store.set *id_and_name_and_meta(enum_value)
end
# Specify order enum values are returned.
# Allowed values are :asc, :desc or :natural
#
def order(order)
if order == :as_defined
ActiveSupport::Deprecation.warn("You are using the order :as_defined which has been deprecated. Use :natural.")
order = :natural
end
@order = order
end
def all
store.values
end
# Array of all enum id values
def ids
store.values.map {|v| v[0] }
end
# Array of all enum name values
def names
store.values.map {|v| v[1] }
end
# Return enum values in an array suitable to pass to a Rails form select helper.
def to_select
store.values.map {|v| [v[1], v[0]] }
end
# Access id or name value. Pass an id number to retrieve the name or
# a symbol or string to retrieve the matching id.
def [](index)
if index.is_a?(Fixnum)
row = store.get_by_id(index)
row[1] if row
else
row = store.get_by_name(index)
row[0] if row
end
end
def include?(value)
!self[value].nil?
end
# Access any meta data defined for a given id or name. Returns a hash.
def meta(index)
row = if index.is_a?(Fixnum)
store.get_by_id(index)
else
store.get_by_name(index)
end
row[2] || {} if row
end
private
def id_and_name_and_meta(hash)
if hash.has_key?(:name)
id = hash.delete(:id) || next_id
name = hash.delete(:name)
meta = hash
return id, name, (meta.empty? ? nil : meta)
elsif hash.keys.first.is_a?(Fixnum)
return *Array(hash).first
else
raise ActiveEnum::InvalidValue, "The value supplied, #{hash}, is not a valid format."
end
end
def next_id
ids.max.to_i + 1
end
def store
@store ||= ActiveEnum.storage_class.new(self, @order || :asc, ActiveEnum.storage_options)
end
end
end
end
|
if defined? Rails
require 'activity_tracker/version'
require 'activity_tracker/railtie'
else
require File.expand_path(File.dirname(__FILE__) + '/activity_tracker/version')
end
require 'rack'
require 'moneta'
require 'rack/moneta_store'
#use Rack::MonetaStore, :Memory
module ActivityTracker
class App
def initialize app
@app = app
end
def call env
interception = Interception.new env
if interception.intercept?
interception.track_activity
[200, {'Content-Type' => 'text/html'}, [interception.result.inspect]]
else
@app.call env
end
end
end
class Interception
def initialize env
@env = env
end
def request
@request ||= Rack::Request.new @env
end
def valid_params?
(request.params.keys & %w{user_id act}).size == 2
end
def valid_path?
request.path_info =~ /^\/track_activity.*/
end
def intercept?
valid_path? and valid_params?
end
def track_activity
add_to_batch activity_params
if batch_is_full?
push_batch
clear_batch
end
end
def result
batch
end
private
def activity_params
request.params.select { |k,v| %w{user_id act}.include? k.to_s }
end
def add_to_batch params
store['activity_batch'] = batch << params
end
def push_batch
end
def clear_batch
store['activity_batch'] = []
end
def batch_is_full?
batch.size == 50
end
def batch
store.fetch('activity_batch', [])
end
def store
@env['rack.moneta_store']
end
end
end
Created benchmark to check if saving actions in batches will be faster then saving them via speparate request each
if defined? Rails
require 'activity_tracker/version'
require 'activity_tracker/railtie'
else
require File.expand_path(File.dirname(__FILE__) + '/activity_tracker/version')
end
require 'rack'
require 'moneta'
require 'rack/moneta_store'
#use Rack::MonetaStore, :Memory
module ActivityTracker
class App
def initialize app
@app = app
end
def call env
interception = Interception.new env
if interception.intercept?
interception.track_activity
[200, {'Content-Type' => 'text/html'}, [interception.result.inspect]]
else
@app.call env
end
end
end
class Interception
def initialize env
@env = env
end
def request
@request ||= Rack::Request.new @env
end
def valid_params?
(request.params.keys & %w{user_id act}).size == 2
end
def valid_path?
request.path_info =~ /^\/track_activity.*/
end
def intercept?
valid_path? and valid_params?
end
def track_activity
add_to_batch activity_params
if batch_is_full?
push_batch
clear_batch
end
end
def result
batch
end
private
def activity_params
request.params.select { |k,v| %w{user_id act}.include? k.to_s }
end
def add_to_batch params
store['activity_batch'] = batch << params
end
def push_batch
end
def clear_batch
store['activity_batch'] = []
end
def batch_is_full?
batch.size == 50
end
def batch
store.fetch('activity_batch', [])
end
def store
@env['rack.moneta_store']
end
end
end
|
module ActsAsApi
# This module enriches the ActiveRecord::Base module of Rails.
module Base
# Indicates if the current model acts as api.
# False by default.
def acts_as_api?
false
end
# When invoked, it enriches the current model with the
# class and instance methods to act as api.
def acts_as_api
class_eval do
include ActsAsApi::Base::InstanceMethods
extend ActsAsApi::Base::ClassMethods
end
if block_given?
yield ActsAsApi::Config
end
end
module ClassMethods
def acts_as_api?#:nodoc:
self.included_modules.include?(InstanceMethods)
end
# Determines the attributes, methods of the model that are accessible in the api response.
# *Note*: There is only whitelisting for api accessible attributes.
# So once the model acts as api, you have to determine all attributes here that should
# be contained in the api responses.
def api_accessible(api_template, options = {}, &block)
attributes = api_accessible_attributes(api_template) || ApiTemplate.create(api_template)
attributes.merge!(api_accessible_attributes(options[:extend])) if options[:extend]
if block_given?
yield attributes
end
write_inheritable_attribute("api_accessible_#{api_template}".to_sym, attributes)
end
# Returns an array of all the attributes that have been made accessible to the api response.
def api_accessible_attributes(api_template)
read_inheritable_attribute("api_accessible_#{api_template}".to_sym)
end
end
module InstanceMethods
# Creates the api response of the model and returns it as a Hash.
# Will raise an exception if the passed api template is not defined for the model
def as_api_response(api_template)
api_attributes = self.class.api_accessible_attributes(api_template)
raise ActsAsApi::TemplateNotFoundError.new("acts_as_api template :#{api_template.to_s} was not found for model #{self.class}") if api_attributes.nil?
api_attributes.to_response_hash(self)
end
end
end
end
swapped out the deprecated inheritable_attribute for class_attribute
module ActsAsApi
# This module enriches the ActiveRecord::Base module of Rails.
module Base
# Indicates if the current model acts as api.
# False by default.
def acts_as_api?
false
end
# When invoked, it enriches the current model with the
# class and instance methods to act as api.
def acts_as_api
class_eval do
include ActsAsApi::Base::InstanceMethods
extend ActsAsApi::Base::ClassMethods
end
if block_given?
yield ActsAsApi::Config
end
end
module ClassMethods
def acts_as_api?#:nodoc:
self.included_modules.include?(InstanceMethods)
end
# Determines the attributes, methods of the model that are accessible in the api response.
# *Note*: There is only whitelisting for api accessible attributes.
# So once the model acts as api, you have to determine all attributes here that should
# be contained in the api responses.
def api_accessible(api_template, options = {}, &block)
attributes = api_accessible_attributes(api_template) || ApiTemplate.create(api_template)
attributes.merge!(api_accessible_attributes(options[:extend])) if options[:extend]
if block_given?
yield attributes
end
class_attribute "api_accessible_#{api_template}".to_sym
send "api_accessible_#{api_template}=", attributes
end
# Returns an array of all the attributes that have been made accessible to the api response.
def api_accessible_attributes(api_template)
begin send "api_accessible_#{api_template}".to_sym rescue nil end
end
end
module InstanceMethods
# Creates the api response of the model and returns it as a Hash.
# Will raise an exception if the passed api template is not defined for the model
def as_api_response(api_template)
api_attributes = self.class.api_accessible_attributes(api_template)
raise ActsAsApi::TemplateNotFoundError.new("acts_as_api template :#{api_template.to_s} was not found for model #{self.class}") if api_attributes.nil?
api_attributes.to_response_hash(self)
end
end
end
end
|
# encoding: utf-8
require "amq/client/entity"
require "amq/client/adapter"
require "amq/client/mixins/anonymous_entity"
module AMQ
module Client
class Queue < Entity
#
# Behaviors
#
include AnonymousEntityMixin
#
# API
#
# Qeueue name. May be server-generated or assigned directly.
attr_reader :name
# Channel this queue belongs to.
attr_reader :channel
# @param [AMQ::Client::Adapter] AMQ networking adapter to use.
# @param [AMQ::Client::Channel] AMQ channel this queue object uses.
# @param [String] Queue name. Please note that AMQP spec does not require brokers to support Unicode for queue names.
# @api public
def initialize(client, channel, name = AMQ::Protocol::EMPTY_STRING)
super(client)
@name = name
@channel = channel
end
def dup
if @name.empty?
raise RuntimeError.new("You can't clone anonymous queue until it receives back the name in Queue.Declare-Ok response. Move the code with #dup to the callback for the #declare method.") # TODO: that's not true in all cases, imagine the user didn't call #declare yet.
end
o = super
o.reset_consumer_tag!
o
end
# @return [Boolean] true if this queue was declared as durable (will survive broker restart).
# @api public
def durable?
@durable
end # durable?
# @return [Boolean] true if this queue was declared as exclusive (limited to just one consumer)
# @api public
def exclusive?
@exclusive
end # exclusive?
# @return [Boolean] true if this queue was declared as automatically deleted (deleted as soon as last consumer unbinds).
# @api public
def auto_delete?
@auto_delete
end # auto_delete?
# Declares this queue.
#
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.1.)
def declare(passive = false, durable = false, exclusive = false, auto_delete = false, nowait = false, arguments = nil, &block)
raise ArgumentError, "declaration with nowait does not make sense for server-named queues! Either specify name other than empty string or use #declare without nowait" if nowait && self.anonymous?
@durable = durable
@exclusive = exclusive
@auto_delete = auto_delete
nowait = true if !block && !@name.empty?
@client.send(Protocol::Queue::Declare.encode(@channel.id, @name, passive, durable, exclusive, auto_delete, nowait, arguments))
if !nowait
self.callbacks[:declare] = block
@channel.queues_awaiting_declare_ok.push(self)
end
if @client.sync?
@client.read_until_receives(Protocol::Queue::DeclareOk) unless nowait
end
self
end
# Deletes this queue.
#
# @param [Boolean] if_unused delete only if queue has no consumers (subscribers).
# @param [Boolean] if_empty delete only if queue has no messages in it.
# @param [Boolean] nowait Don't wait for reply from broker.
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.9.)
def delete(if_unused = false, if_empty = false, nowait = false, &block)
nowait = true unless block
@client.send(Protocol::Queue::Delete.encode(@channel.id, @name, if_unused, if_empty, nowait))
if !nowait
self.callbacks[:delete] = block
# TODO: delete itself from queues cache
@channel.queues_awaiting_delete_ok.push(self)
end
self
end # delete(channel, queue, if_unused, if_empty, nowait, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.3.)
def bind(exchange, routing_key = AMQ::Protocol::EMPTY_STRING, nowait = false, arguments = nil, &block)
nowait = true unless block
exchange_name = if exchange.respond_to?(:name)
exchange.name
else
exchange
end
@client.send(Protocol::Queue::Bind.encode(@channel.id, @name, exchange_name, routing_key, nowait, arguments))
if !nowait
self.callbacks[:bind] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_bind_ok.push(self)
end
self
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.5.)
def unbind(exchange, routing_key = AMQ::Protocol::EMPTY_STRING, arguments = nil, &block)
exchange_name = if exchange.respond_to?(:name)
exchange.name
else
exchange
end
@client.send(Protocol::Queue::Unbind.encode(@channel.id, @name, exchange_name, routing_key, arguments))
self.callbacks[:unbind] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_unbind_ok.push(self)
self
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.3.)
def consume(no_ack = false, exclusive = false, nowait = false, no_local = false, arguments = nil, &block)
raise RuntimeError.new("This instance is already being consumed! Create another one using #dup.") if @consumer_tag
nowait = true unless block
@consumer_tag = "#{name}-#{Time.now.to_i * 1000}-#{Kernel.rand(999_999_999_999)}"
@client.send(Protocol::Basic::Consume.encode(@channel.id, @name, @consumer_tag, no_local, no_ack, exclusive, nowait, arguments))
@client.consumers[@consumer_tag] = self
if !nowait
self.callbacks[:consume] = block
@channel.queues_awaiting_consume_ok.push(self)
end
self
end
# Resets consumer tag by setting it to nil.
# @return [String] Consumer tag this queue previously used.
#
# @api plugin
def reset_consumer_tag!
ct = @consumer_tag.dup
@consumer_tag = nil
ct
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.10.)
def get(no_ack = false, &block)
@client.send(Protocol::Basic::Get.encode(@channel.id, @name, no_ack))
self.callbacks[:get] = block
@channel.queues_awaiting_get_response.push(self)
self
end # get(no_ack = false, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.5.)
def cancel(nowait = false, &block)
@client.send(Protocol::Basic::Cancel.encode(@channel.id, @consumer_tag, nowait))
if !nowait
self.callbacks[:consume] = block
@channel.queues_awaiting_cancel_ok.push(self)
else
@consumer_tag = nil
end
self
end # cancel(&block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.7.)
def purge(nowait = false, &block)
nowait = true unless block
@client.send(Protocol::Queue::Purge.encode(@channel.id, @name, nowait))
if !nowait
self.callbacks[:purge] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_purge_ok.push(self)
end
self
end # purge(nowait = false, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.13.)
def acknowledge(delivery_tag)
@channel.acknowledge(delivery_tag)
self
end # acknowledge(delivery_tag)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.14.)
def reject(delivery_tag, requeue = true)
@channel.reject(delivery_tag, requeue)
self
end # reject(delivery_tag, requeue = true)
# Notifies AMQ broker that consumer has recovered and unacknowledged messages need
# to be redelivered.
#
# @return [Queue] self
#
# @note RabbitMQ as of 2.3.1 does not support basic.recover with requeue = false.
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.16.)
def recover(requeue = true, &block)
@client.send(Protocol::Basic::Recover.encode(@channel.id, requeue))
self.callbacks[:recover] = block
@channel.queues_awaiting_recover_ok.push(self)
self
end # recover(requeue = false, &block)
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Sections 1.8.3.9)
def on_delivery(&block)
self.callbacks[:delivery] = block if block
end # on_delivery(&block)
#
# Implementation
#
def handle_declare_ok(method)
@name = method.queue if self.anonymous?
@channel.register_queue(self)
self.exec_callback(:declare, method.queue, method.consumer_count, method.message_count)
end
def handle_delete_ok(method)
self.exec_callback(:delete, method.message_count)
end # handle_delete_ok(method)
def handle_consume_ok(method)
self.exec_callback(:consume, method.consumer_tag)
end # handle_consume_ok(method)
def handle_purge_ok(method)
self.exec_callback(:purge, method.message_count)
end # handle_purge_ok(method)
def handle_bind_ok(method)
self.exec_callback(:bind)
end # handle_bind_ok(method)
def handle_unbind_ok(method)
self.exec_callback(:unbind)
end # handle_unbind_ok(method)
def handle_delivery(method, header, payload)
self.exec_callback(:delivery, header, payload, method.consumer_tag, method.delivery_tag, method.redelivered, method.exchange, method.routing_key)
end # def handle_delivery
def handle_cancel_ok(method)
@consumer_tag = nil
self.exec_callback(:cancel, method.consumer_tag)
end # handle_cancel_ok(method)
def handle_get_ok(method, header, payload)
self.exec_callback(:get, header, payload, method.delivery_tag, method.redelivered, method.exchange, method.routing_key, method.message_count)
end # handle_get_ok(method, header, payload)
def handle_get_empty(method)
self.exec_callback(:get)
end # handle_get_empty(method)
# Get the first queue which didn't receive Queue.Declare-Ok yet and run its declare callback.
# The cache includes only queues with {nowait: false}.
self.handle(Protocol::Queue::DeclareOk) do |client, frame|
method = frame.decode_payload
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_declare_ok.shift
queue.handle_declare_ok(method)
end
self.handle(Protocol::Queue::DeleteOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_delete_ok.shift
queue.handle_delete_ok(frame.decode_payload)
end
self.handle(Protocol::Queue::BindOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_bind_ok.shift
queue.handle_bind_ok(frame.decode_payload)
end
self.handle(Protocol::Queue::UnbindOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_unbind_ok.shift
queue.handle_unbind_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::ConsumeOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_consume_ok.shift
queue.handle_consume_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::CancelOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_cancel_ok.shift
queue.handle_consume_ok(frame.decode_payload)
end
# Basic.Deliver
self.handle(Protocol::Basic::Deliver) do |client, method_frame, content_frames|
method = method_frame.decode_payload
queue = client.consumers[method.consumer_tag]
header = content_frames.shift
body = content_frames.map {|frame| frame.payload }.join
queue.handle_delivery(method, header, body)
# TODO: ack if necessary
end
self.handle(Protocol::Queue::PurgeOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_purge_ok.shift
queue.handle_purge_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::GetOk) do |client, frame, content_frames|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_get_response.shift
method = frame.decode_payload
header = content_frames.shift
body = content_frames.map {|frame| frame.payload }.join
queue.handle_get_ok(method, header, body)
end
self.handle(Protocol::Basic::GetEmpty) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_get_response.shift
queue.handle_get_empty(frame.decode_payload)
end
self.handle(Protocol::Basic::RecoverOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_recover_ok.shift
queue.exec_callback(:recover)
end
end # Queue
end # Client
end # AMQ
Duplication of not-yet-declared queue is misleading and likely to indicate a maltintention
# encoding: utf-8
require "amq/client/entity"
require "amq/client/adapter"
require "amq/client/mixins/anonymous_entity"
module AMQ
module Client
class Queue < Entity
#
# Behaviors
#
include AnonymousEntityMixin
#
# API
#
# Qeueue name. May be server-generated or assigned directly.
attr_reader :name
# Channel this queue belongs to.
attr_reader :channel
# @param [AMQ::Client::Adapter] AMQ networking adapter to use.
# @param [AMQ::Client::Channel] AMQ channel this queue object uses.
# @param [String] Queue name. Please note that AMQP spec does not require brokers to support Unicode for queue names.
# @api public
def initialize(client, channel, name = AMQ::Protocol::EMPTY_STRING)
super(client)
@name = name
@channel = channel
end
def dup
if @name.empty?
raise RuntimeError.new("You can't clone anonymous queue until it receives server-generated name. Move the code with #dup to the callback for the #declare method.")
end
o = super
o.reset_consumer_tag!
o
end
# @return [Boolean] true if this queue was declared as durable (will survive broker restart).
# @api public
def durable?
@durable
end # durable?
# @return [Boolean] true if this queue was declared as exclusive (limited to just one consumer)
# @api public
def exclusive?
@exclusive
end # exclusive?
# @return [Boolean] true if this queue was declared as automatically deleted (deleted as soon as last consumer unbinds).
# @api public
def auto_delete?
@auto_delete
end # auto_delete?
# Declares this queue.
#
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.1.)
def declare(passive = false, durable = false, exclusive = false, auto_delete = false, nowait = false, arguments = nil, &block)
raise ArgumentError, "declaration with nowait does not make sense for server-named queues! Either specify name other than empty string or use #declare without nowait" if nowait && self.anonymous?
@durable = durable
@exclusive = exclusive
@auto_delete = auto_delete
nowait = true if !block && !@name.empty?
@client.send(Protocol::Queue::Declare.encode(@channel.id, @name, passive, durable, exclusive, auto_delete, nowait, arguments))
if !nowait
self.callbacks[:declare] = block
@channel.queues_awaiting_declare_ok.push(self)
end
if @client.sync?
@client.read_until_receives(Protocol::Queue::DeclareOk) unless nowait
end
self
end
# Deletes this queue.
#
# @param [Boolean] if_unused delete only if queue has no consumers (subscribers).
# @param [Boolean] if_empty delete only if queue has no messages in it.
# @param [Boolean] nowait Don't wait for reply from broker.
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.9.)
def delete(if_unused = false, if_empty = false, nowait = false, &block)
nowait = true unless block
@client.send(Protocol::Queue::Delete.encode(@channel.id, @name, if_unused, if_empty, nowait))
if !nowait
self.callbacks[:delete] = block
# TODO: delete itself from queues cache
@channel.queues_awaiting_delete_ok.push(self)
end
self
end # delete(channel, queue, if_unused, if_empty, nowait, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.3.)
def bind(exchange, routing_key = AMQ::Protocol::EMPTY_STRING, nowait = false, arguments = nil, &block)
nowait = true unless block
exchange_name = if exchange.respond_to?(:name)
exchange.name
else
exchange
end
@client.send(Protocol::Queue::Bind.encode(@channel.id, @name, exchange_name, routing_key, nowait, arguments))
if !nowait
self.callbacks[:bind] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_bind_ok.push(self)
end
self
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.5.)
def unbind(exchange, routing_key = AMQ::Protocol::EMPTY_STRING, arguments = nil, &block)
exchange_name = if exchange.respond_to?(:name)
exchange.name
else
exchange
end
@client.send(Protocol::Queue::Unbind.encode(@channel.id, @name, exchange_name, routing_key, arguments))
self.callbacks[:unbind] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_unbind_ok.push(self)
self
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.3.)
def consume(no_ack = false, exclusive = false, nowait = false, no_local = false, arguments = nil, &block)
raise RuntimeError.new("This instance is already being consumed! Create another one using #dup.") if @consumer_tag
nowait = true unless block
@consumer_tag = "#{name}-#{Time.now.to_i * 1000}-#{Kernel.rand(999_999_999_999)}"
@client.send(Protocol::Basic::Consume.encode(@channel.id, @name, @consumer_tag, no_local, no_ack, exclusive, nowait, arguments))
@client.consumers[@consumer_tag] = self
if !nowait
self.callbacks[:consume] = block
@channel.queues_awaiting_consume_ok.push(self)
end
self
end
# Resets consumer tag by setting it to nil.
# @return [String] Consumer tag this queue previously used.
#
# @api plugin
def reset_consumer_tag!
ct = @consumer_tag.dup
@consumer_tag = nil
ct
end
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.10.)
def get(no_ack = false, &block)
@client.send(Protocol::Basic::Get.encode(@channel.id, @name, no_ack))
self.callbacks[:get] = block
@channel.queues_awaiting_get_response.push(self)
self
end # get(no_ack = false, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.5.)
def cancel(nowait = false, &block)
@client.send(Protocol::Basic::Cancel.encode(@channel.id, @consumer_tag, nowait))
if !nowait
self.callbacks[:consume] = block
@channel.queues_awaiting_cancel_ok.push(self)
else
@consumer_tag = nil
end
self
end # cancel(&block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.7.2.7.)
def purge(nowait = false, &block)
nowait = true unless block
@client.send(Protocol::Queue::Purge.encode(@channel.id, @name, nowait))
if !nowait
self.callbacks[:purge] = block
# TODO: handle channel & connection-level exceptions
@channel.queues_awaiting_purge_ok.push(self)
end
self
end # purge(nowait = false, &block)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.13.)
def acknowledge(delivery_tag)
@channel.acknowledge(delivery_tag)
self
end # acknowledge(delivery_tag)
#
# @return [Queue] self
#
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.14.)
def reject(delivery_tag, requeue = true)
@channel.reject(delivery_tag, requeue)
self
end # reject(delivery_tag, requeue = true)
# Notifies AMQ broker that consumer has recovered and unacknowledged messages need
# to be redelivered.
#
# @return [Queue] self
#
# @note RabbitMQ as of 2.3.1 does not support basic.recover with requeue = false.
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Section 1.8.3.16.)
def recover(requeue = true, &block)
@client.send(Protocol::Basic::Recover.encode(@channel.id, requeue))
self.callbacks[:recover] = block
@channel.queues_awaiting_recover_ok.push(self)
self
end # recover(requeue = false, &block)
# @api public
# @see http://bit.ly/htCzCX AMQP 0.9.1 protocol documentation (Sections 1.8.3.9)
def on_delivery(&block)
self.callbacks[:delivery] = block if block
end # on_delivery(&block)
#
# Implementation
#
def handle_declare_ok(method)
@name = method.queue if self.anonymous?
@channel.register_queue(self)
self.exec_callback(:declare, method.queue, method.consumer_count, method.message_count)
end
def handle_delete_ok(method)
self.exec_callback(:delete, method.message_count)
end # handle_delete_ok(method)
def handle_consume_ok(method)
self.exec_callback(:consume, method.consumer_tag)
end # handle_consume_ok(method)
def handle_purge_ok(method)
self.exec_callback(:purge, method.message_count)
end # handle_purge_ok(method)
def handle_bind_ok(method)
self.exec_callback(:bind)
end # handle_bind_ok(method)
def handle_unbind_ok(method)
self.exec_callback(:unbind)
end # handle_unbind_ok(method)
def handle_delivery(method, header, payload)
self.exec_callback(:delivery, header, payload, method.consumer_tag, method.delivery_tag, method.redelivered, method.exchange, method.routing_key)
end # def handle_delivery
def handle_cancel_ok(method)
@consumer_tag = nil
self.exec_callback(:cancel, method.consumer_tag)
end # handle_cancel_ok(method)
def handle_get_ok(method, header, payload)
self.exec_callback(:get, header, payload, method.delivery_tag, method.redelivered, method.exchange, method.routing_key, method.message_count)
end # handle_get_ok(method, header, payload)
def handle_get_empty(method)
self.exec_callback(:get)
end # handle_get_empty(method)
# Get the first queue which didn't receive Queue.Declare-Ok yet and run its declare callback.
# The cache includes only queues with {nowait: false}.
self.handle(Protocol::Queue::DeclareOk) do |client, frame|
method = frame.decode_payload
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_declare_ok.shift
queue.handle_declare_ok(method)
end
self.handle(Protocol::Queue::DeleteOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_delete_ok.shift
queue.handle_delete_ok(frame.decode_payload)
end
self.handle(Protocol::Queue::BindOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_bind_ok.shift
queue.handle_bind_ok(frame.decode_payload)
end
self.handle(Protocol::Queue::UnbindOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_unbind_ok.shift
queue.handle_unbind_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::ConsumeOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_consume_ok.shift
queue.handle_consume_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::CancelOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_cancel_ok.shift
queue.handle_consume_ok(frame.decode_payload)
end
# Basic.Deliver
self.handle(Protocol::Basic::Deliver) do |client, method_frame, content_frames|
method = method_frame.decode_payload
queue = client.consumers[method.consumer_tag]
header = content_frames.shift
body = content_frames.map {|frame| frame.payload }.join
queue.handle_delivery(method, header, body)
# TODO: ack if necessary
end
self.handle(Protocol::Queue::PurgeOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_purge_ok.shift
queue.handle_purge_ok(frame.decode_payload)
end
self.handle(Protocol::Basic::GetOk) do |client, frame, content_frames|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_get_response.shift
method = frame.decode_payload
header = content_frames.shift
body = content_frames.map {|frame| frame.payload }.join
queue.handle_get_ok(method, header, body)
end
self.handle(Protocol::Basic::GetEmpty) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_get_response.shift
queue.handle_get_empty(frame.decode_payload)
end
self.handle(Protocol::Basic::RecoverOk) do |client, frame|
channel = client.connection.channels[frame.channel]
queue = channel.queues_awaiting_recover_ok.shift
queue.exec_callback(:recover)
end
end # Queue
end # Client
end # AMQ
|
#
# Copyright (C) 2014 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require_dependency 'analytics/assignments'
require_dependency 'analytics/student_collection'
require_dependency 'analytics/tardiness_breakdown'
require_dependency 'analytics/page_view_analysis'
require_dependency 'analytics/assignment_submission'
require_dependency 'analytics/fake_submission'
module Analytics
class Course < Analytics::Base
def self.available_for?(current_user, course)
new(current_user, course).available?
end
def initialize(current_user, course)
super(current_user)
@course = course
end
def available?
# not slaved because it's pretty lightweight and we don't want it to
# depend on the slave being present
cache(:available) { enrollment_scope.first.present? }
end
def enrollments
@enrollments ||= slaved do
rows = enrollment_scope.to_a
ActiveRecord::Associations::Preloader.new(rows, [ :course_section, {:course => :enrollment_term} ]).run
rows
end
end
def start_date
# TODO the javascript will break if this comes back nil, so we need a
# sensible default. using "now" for the time being, but there's gotta be
# something better
slaved(:cache_as => :start_date) do
[
enrollment_scope.minimum(:start_at),
@course.sections_visible_to(@current_user).minimum(:start_at),
@course.start_at,
@course.enrollment_term.start_at,
@course.enrollment_term.enrollment_dates_overrides.where(enrollment_type: 'StudentEnrollment').minimum(:start_at),
].compact.min ||
@course.sections_visible_to(@current_user).minimum(:created_at) ||
@course.created_at ||
Time.zone.now
end
end
def end_date
# TODO ditto. "now" makes more sense this time, but it could also make
# sense to go past "now" if the course has assignments due in the future,
# for instance.
slaved(:cache_as => :end_date) do
[
enrollment_scope.maximum(:end_at),
@course.sections_visible_to(@current_user).maximum(:end_at),
@course.conclude_at,
@course.enrollment_term.end_at,
@course.enrollment_term.enrollment_dates_overrides.where(enrollment_type: 'StudentEnrollment').maximum(:end_at),
].compact.max || Time.zone.now
end
end
def students
slaved(:cache_as => :students) { student_scope.order_by_sortable_name.to_a }
end
def student_ids
slaved(:cache_as => :student_ids) do
# id of any user with an enrollment, order unimportant
enrollment_scope.uniq.pluck(:user_id)
end
end
def participation
slaved(:cache_as => :participation) do
@course.page_views_rollups.
select("date, SUM(views) AS views, SUM(participations) AS participations").
group(:date).
map{ |rollup| rollup.as_json[:page_views_rollup] }
end
end
include Analytics::Assignments
def overridden_assignment(assignment, user)
assignment.overridden_for(user)
end
# Overriding this from Assignments to account for Variable Due Dates
def basic_assignment_data(assignment, submissions=nil)
vdd = overridden_assignment( assignment, @current_user )
super.merge(
:due_at => vdd.due_at,
:multiple_due_dates => vdd.multiple_due_dates_apply_to?(@current_user),
:non_digital_submission => assignment.non_digital_submission?
)
end
def extended_assignment_data(assignment, submissions)
{ tardiness_breakdown: tardiness_breakdowns[:assignments][assignment.id].as_hash_scaled }
end
def student_summaries(sort_column=nil)
# course global counts (by student) and maxima
# we have to select the entire course here, because we need to calculate
# the max over the whole course not just the students the pagination is
# returning.
page_view_counts = self.page_views_by_student
analysis = PageViewAnalysis.new( page_view_counts )
# wrap up the students for pagination, and then tell it how to sort them
# and format them
collection = Analytics::StudentCollection.new(student_scope)
collection.sort_by(sort_column, :page_view_counts => page_view_counts)
collection.format do |student|
{
:id => student.id,
:page_views => page_view_counts[student.id][:page_views],
:max_page_views => analysis.max_page_views,
:participations => page_view_counts[student.id][:participations],
:max_participations => analysis.max_participations,
:tardiness_breakdown => tardiness_breakdowns[:students][student.id].as_hash
}
end
collection
end
def page_views_by_student
slaved(:cache_as => :page_views_by_student) do
PageView.counters_by_context_for_users(@course, student_ids)
end
end
def allow_student_details?
@course.grants_any_right?(@current_user, :manage_grades, :view_all_grades)
end
def cache_prefix
[@course, Digest::MD5.hexdigest(enrollment_scope.to_sql)]
end
def enrollment_scope
@enrollment_scope ||= @course.apply_enrollment_visibility(@course.all_student_enrollments, @current_user).
where(:enrollments => { :workflow_state => ['active', 'completed'] })
end
def submissions(assignments, student_ids=self.student_ids)
@course.shard.activate{ submission_scope(assignments, student_ids).to_a }
end
def submission_scope(assignments, student_ids=self.student_ids)
::Analytics::Course.submission_scope_for(assignments).where(user_id: student_ids)
end
def self.submission_scope_for(assignments)
Submission.
select(Analytics::Assignments::SUBMISSION_COLUMNS_SELECT).
where(:assignment_id => assignments)
end
def student_scope
@student_scope ||= begin
# any user with an enrollment, ordered by name
subselect = enrollment_scope.select([:user_id, :computed_current_score]).uniq.to_sql
User.
select("users.*, enrollments.computed_current_score").
joins("INNER JOIN (#{subselect}) AS enrollments ON enrollments.user_id=users.id")
end
end
def raw_assignments
cache_array = [:raw_assignments]
cache_array << @current_user if differentiated_assignments_applies?
slaved(:cache_as => cache_array) do
assignment_scope.to_a
end
end
def tardiness_breakdowns
@course.shard.activate do
cache_array = [:tardiness_breakdowns]
cache_array << @current_user if differentiated_assignments_applies?
@tardiness_breakdowns ||= slaved(:cache_as => cache_array) do
# initialize breakdown tallies
breakdowns = {
assignments: Hash[raw_assignments.map{ |a| [a.id, TardinessBreakdown.new] }],
students: Hash[student_ids.map{ |s_id| [s_id, TardinessBreakdown.new] }]
}
# load submissions and index them by (assignment, student) tuple
submissions = FakeSubmission.from_scope(submission_scope(raw_assignments))
submissions = submissions.index_by{ |s| [s.assignment_id, s.user_id] }
# tally each submission (or lack thereof) into the columns and rows of
# the breakdown
raw_assignments.each do |assignment|
student_ids.each do |student_id|
submission = submissions[[assignment.id, student_id]]
submission.assignment = assignment if submission
assignment_submission = AssignmentSubmission.new(assignment, submission)
breakdowns[:assignments][assignment.id].tally!(assignment_submission)
breakdowns[:students][student_id].tally!(assignment_submission)
end
end
# done
breakdowns
end
end
end
end
end
use exists? instead of first.present?
fixes CNVS-25932
Change-Id: Ifc28255eb0448667f982a97b80247c737e59da64
Reviewed-on: https://gerrit.instructure.com/69030
Reviewed-by: Rob Orton <7e09c9d3e96378bf549fc283fd6e1e5b7014cc33@instructure.com>
Product-Review: Rob Orton <7e09c9d3e96378bf549fc283fd6e1e5b7014cc33@instructure.com>
QA-Review: Rob Orton <7e09c9d3e96378bf549fc283fd6e1e5b7014cc33@instructure.com>
Tested-by: Jenkins
#
# Copyright (C) 2014 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require_dependency 'analytics/assignments'
require_dependency 'analytics/student_collection'
require_dependency 'analytics/tardiness_breakdown'
require_dependency 'analytics/page_view_analysis'
require_dependency 'analytics/assignment_submission'
require_dependency 'analytics/fake_submission'
module Analytics
class Course < Analytics::Base
def self.available_for?(current_user, course)
new(current_user, course).available?
end
def initialize(current_user, course)
super(current_user)
@course = course
end
def available?
# not slaved because it's pretty lightweight and we don't want it to
# depend on the slave being present
cache(:available) { enrollment_scope.exists? }
end
def enrollments
@enrollments ||= slaved do
rows = enrollment_scope.to_a
ActiveRecord::Associations::Preloader.new(rows, [ :course_section, {:course => :enrollment_term} ]).run
rows
end
end
def start_date
# TODO the javascript will break if this comes back nil, so we need a
# sensible default. using "now" for the time being, but there's gotta be
# something better
slaved(:cache_as => :start_date) do
[
enrollment_scope.minimum(:start_at),
@course.sections_visible_to(@current_user).minimum(:start_at),
@course.start_at,
@course.enrollment_term.start_at,
@course.enrollment_term.enrollment_dates_overrides.where(enrollment_type: 'StudentEnrollment').minimum(:start_at),
].compact.min ||
@course.sections_visible_to(@current_user).minimum(:created_at) ||
@course.created_at ||
Time.zone.now
end
end
def end_date
# TODO ditto. "now" makes more sense this time, but it could also make
# sense to go past "now" if the course has assignments due in the future,
# for instance.
slaved(:cache_as => :end_date) do
[
enrollment_scope.maximum(:end_at),
@course.sections_visible_to(@current_user).maximum(:end_at),
@course.conclude_at,
@course.enrollment_term.end_at,
@course.enrollment_term.enrollment_dates_overrides.where(enrollment_type: 'StudentEnrollment').maximum(:end_at),
].compact.max || Time.zone.now
end
end
def students
slaved(:cache_as => :students) { student_scope.order_by_sortable_name.to_a }
end
def student_ids
slaved(:cache_as => :student_ids) do
# id of any user with an enrollment, order unimportant
enrollment_scope.uniq.pluck(:user_id)
end
end
def participation
slaved(:cache_as => :participation) do
@course.page_views_rollups.
select("date, SUM(views) AS views, SUM(participations) AS participations").
group(:date).
map{ |rollup| rollup.as_json[:page_views_rollup] }
end
end
include Analytics::Assignments
def overridden_assignment(assignment, user)
assignment.overridden_for(user)
end
# Overriding this from Assignments to account for Variable Due Dates
def basic_assignment_data(assignment, submissions=nil)
vdd = overridden_assignment( assignment, @current_user )
super.merge(
:due_at => vdd.due_at,
:multiple_due_dates => vdd.multiple_due_dates_apply_to?(@current_user),
:non_digital_submission => assignment.non_digital_submission?
)
end
def extended_assignment_data(assignment, submissions)
{ tardiness_breakdown: tardiness_breakdowns[:assignments][assignment.id].as_hash_scaled }
end
def student_summaries(sort_column=nil)
# course global counts (by student) and maxima
# we have to select the entire course here, because we need to calculate
# the max over the whole course not just the students the pagination is
# returning.
page_view_counts = self.page_views_by_student
analysis = PageViewAnalysis.new( page_view_counts )
# wrap up the students for pagination, and then tell it how to sort them
# and format them
collection = Analytics::StudentCollection.new(student_scope)
collection.sort_by(sort_column, :page_view_counts => page_view_counts)
collection.format do |student|
{
:id => student.id,
:page_views => page_view_counts[student.id][:page_views],
:max_page_views => analysis.max_page_views,
:participations => page_view_counts[student.id][:participations],
:max_participations => analysis.max_participations,
:tardiness_breakdown => tardiness_breakdowns[:students][student.id].as_hash
}
end
collection
end
def page_views_by_student
slaved(:cache_as => :page_views_by_student) do
PageView.counters_by_context_for_users(@course, student_ids)
end
end
def allow_student_details?
@course.grants_any_right?(@current_user, :manage_grades, :view_all_grades)
end
def cache_prefix
[@course, Digest::MD5.hexdigest(enrollment_scope.to_sql)]
end
def enrollment_scope
@enrollment_scope ||= @course.apply_enrollment_visibility(@course.all_student_enrollments, @current_user).
where(:enrollments => { :workflow_state => ['active', 'completed'] })
end
def submissions(assignments, student_ids=self.student_ids)
@course.shard.activate{ submission_scope(assignments, student_ids).to_a }
end
def submission_scope(assignments, student_ids=self.student_ids)
::Analytics::Course.submission_scope_for(assignments).where(user_id: student_ids)
end
def self.submission_scope_for(assignments)
Submission.
select(Analytics::Assignments::SUBMISSION_COLUMNS_SELECT).
where(:assignment_id => assignments)
end
def student_scope
@student_scope ||= begin
# any user with an enrollment, ordered by name
subselect = enrollment_scope.select([:user_id, :computed_current_score]).uniq.to_sql
User.
select("users.*, enrollments.computed_current_score").
joins("INNER JOIN (#{subselect}) AS enrollments ON enrollments.user_id=users.id")
end
end
def raw_assignments
cache_array = [:raw_assignments]
cache_array << @current_user if differentiated_assignments_applies?
slaved(:cache_as => cache_array) do
assignment_scope.to_a
end
end
def tardiness_breakdowns
@course.shard.activate do
cache_array = [:tardiness_breakdowns]
cache_array << @current_user if differentiated_assignments_applies?
@tardiness_breakdowns ||= slaved(:cache_as => cache_array) do
# initialize breakdown tallies
breakdowns = {
assignments: Hash[raw_assignments.map{ |a| [a.id, TardinessBreakdown.new] }],
students: Hash[student_ids.map{ |s_id| [s_id, TardinessBreakdown.new] }]
}
# load submissions and index them by (assignment, student) tuple
submissions = FakeSubmission.from_scope(submission_scope(raw_assignments))
submissions = submissions.index_by{ |s| [s.assignment_id, s.user_id] }
# tally each submission (or lack thereof) into the columns and rows of
# the breakdown
raw_assignments.each do |assignment|
student_ids.each do |student_id|
submission = submissions[[assignment.id, student_id]]
submission.assignment = assignment if submission
assignment_submission = AssignmentSubmission.new(assignment, submission)
breakdowns[:assignments][assignment.id].tally!(assignment_submission)
breakdowns[:students][student_id].tally!(assignment_submission)
end
end
# done
breakdowns
end
end
end
end
end
|
module Anise
# = Runtime Annotations
#
# The Annotation module is the heart of the Anise system.
# It provides the framework for annotating class or module related
# objects, typically symbols representing methods, with arbitrary
# metadata. These annotations do not do anything in themselves.
# They are simply data. But you can put them to use. For instance
# an attribute validator might check for an annotation called
# :valid and test against it.
#
# == Synopsis
#
# require 'anise/annotation'
#
# class X
# include Anise::Annotation
#
# attr :a
#
# ann :a, :desc => "A Number"
# end
#
# X.ann(:a, :desc) #=> "A Number"
#
# As stated, annotations need not only annotate methods, they are
# arbitrary, so they can be used for any purpose. For example, we
# may want to annotate instance variables.
#
# class X
# include Anise::Annotation
#
# ann :@a, :valid => lambda{ |x| x.is_a?(Integer) }
#
# def validate
# instance_variables.each do |iv|
# if validator = self.class.ann(iv)[:valid]
# value = instance_variable_get(iv)
# unless validator.call(value)
# raise "Invalid value #{value} for #{iv}"
# end
# end
# end
# end
# end
#
# Or, we could even annotate the class itself.
#
# class X
# include Anise::Annotation
#
# ann self, :valid => lambda{ |x| x.is_a?(Enumerable) }
# end
#
# Altough annotations are arbitrary they are tied to the class or
# module they are defined against.
#
#--
# TODO: By using a global variable rather the definining a class
# instance variable for each class/module, it is possible to
# quicky scan all annotations for the entire system. To do
# the same without this would require scanning through
# the ObjectSpace. Should we do this?
# $annotations = Hash.new { |h,k| h[k] = {} }
#
# TODO: The ann(x).name notation is kind of nice. Would like to add that
# back-in if reasonable. This would require @annotations to be an
# OpenHash or OpenObject rather than just a Hash.
#++
module Annotation
def self.append_features(base)
if base == ::Object
append_features(::Module)
elsif base == ::Module
unless ::Module < Annotation
super
end
else
base.extend self
end
end
# Lookup an annotation. Unlike +annotations[ref]+
# this provides a complete annotation <i>heritage</i>,
# pulling annotations of the same reference name
# from ancestor classes and modules.
#
def annotation(ref=nil)
return(@annotations ||= {}) if ref.nil?
ref = ref.to_sym
ann = {}
ancestors.reverse_each do |anc|
next unless anc.is_a?(Annotation)
#anc.annotations[ref] ||= {}
if anc.annotations[ref]
ann.update(anc.annotations[ref]) #.merge(ann)
end
end
return ann
#ancs = ancestors.select{ |a| a.is_a?(Annotations) }
#ancs.inject({}) do |memo, ancestor|
# ancestor.annotations[ref] ||= {}
# ancestor.annotations[ref].merge(memo)
#end
end
# Plural alias for #annotation.
alias_method :annotations, :annotation
# Stores this class' or module's annotations.
#
#def annotations
# #$annotations[self]
# @annotations ||= {}
#end
# Set or read annotations.
#
def ann(ref, keys_or_class=nil, keys=nil)
return annotation(ref) unless keys_or_class or keys
if Class === keys_or_class
keys ||= {}
keys[:class] = keys_or_class
else
keys = keys_or_class
end
if Hash === keys
ref = ref.to_sym
keys = keys.inject({}){ |h,(k,v)| h[k.to_sym] = v; h} #rekey
annotations[ref] ||= {}
annotations[ref].update(keys)
else
key = keys.to_sym
annotation(ref)[key]
end
end
# To change an annotation's value in place for a given class or module
# it first must be duplicated, otherwise the change may effect annotations
# in the class or module's ancestors.
#
def ann!(ref, keys_or_class=nil, keys=nil)
#return annotation(ref) unless keys_or_class or keys
unless keys_or_class or keys
return annotations[ref] ||= {}
end
if Class === keys_or_class
keys ||= {}
keys[:class] = keys_or_class
else
keys = keys_or_class
end
if Hash === keys
ref = ref.to_sym
keys = keys.inject({}){ |h,(k,v)| h[k.to_sym] = v; h} #rekey
annotations[ref] ||= {}
annotations[ref].update(keys)
else
key = keys.to_sym
annotations[ref] ||= {}
begin
annotations[ref][key] = annotation(ref)[key].dup
rescue TypeError
annotations[ref][key] = annotation(ref)[key]
end
end
end
end
end
# 2006-11-07 trans Created this ultra-concise version of annotations.
# Copyright (c) 2005, 2008 TigerOps
added annotation_added callback
module Anise
# = Runtime Annotations
#
# The Annotation module is the heart of the Anise system.
# It provides the framework for annotating class or module related
# objects, typically symbols representing methods, with arbitrary
# metadata. These annotations do not do anything in themselves.
# They are simply data. But you can put them to use. For instance
# an attribute validator might check for an annotation called
# :valid and test against it.
#
# == Synopsis
#
# require 'anise/annotation'
#
# class X
# include Anise::Annotation
#
# attr :a
#
# ann :a, :desc => "A Number"
# end
#
# X.ann(:a, :desc) #=> "A Number"
#
# As stated, annotations need not only annotate methods, they are
# arbitrary, so they can be used for any purpose. For example, we
# may want to annotate instance variables.
#
# class X
# include Anise::Annotation
#
# ann :@a, :valid => lambda{ |x| x.is_a?(Integer) }
#
# def validate
# instance_variables.each do |iv|
# if validator = self.class.ann(iv)[:valid]
# value = instance_variable_get(iv)
# unless validator.call(value)
# raise "Invalid value #{value} for #{iv}"
# end
# end
# end
# end
# end
#
# Or, we could even annotate the class itself.
#
# class X
# include Anise::Annotation
#
# ann self, :valid => lambda{ |x| x.is_a?(Enumerable) }
# end
#
# Altough annotations are arbitrary they are tied to the class or
# module they are defined against.
#
#--
# TODO: By using a global variable rather the definining a class
# instance variable for each class/module, it is possible to
# quicky scan all annotations for the entire system. To do
# the same without this would require scanning through
# the ObjectSpace. Should we do this?
# $annotations = Hash.new { |h,k| h[k] = {} }
#
# TODO: The ann(x).name notation is kind of nice. Would like to add that
# back-in if reasonable. This would require @annotations to be an
# OpenHash or OpenObject rather than just a Hash.
#++
module Annotation
def self.append_features(base)
if base == ::Object
append_features(::Module)
elsif base == ::Module
unless ::Module < Annotation
super
end
else
base.extend self
end
end
# Lookup an annotation. Unlike +annotations[ref]+
# this provides a complete annotation <i>heritage</i>,
# pulling annotations of the same reference name
# from ancestor classes and modules.
#
def annotation(ref=nil)
return(@annotations ||= {}) if ref.nil?
ref = ref.to_sym
ann = {}
ancestors.reverse_each do |anc|
next unless anc.is_a?(Annotation)
#anc.annotations[ref] ||= {}
if anc.annotations[ref]
ann.update(anc.annotations[ref]) #.merge(ann)
end
end
return ann
#ancs = ancestors.select{ |a| a.is_a?(Annotations) }
#ancs.inject({}) do |memo, ancestor|
# ancestor.annotations[ref] ||= {}
# ancestor.annotations[ref].merge(memo)
#end
end
# Plural alias for #annotation.
alias_method :annotations, :annotation
# Stores this class' or module's annotations.
#
#def annotations
# #$annotations[self]
# @annotations ||= {}
#end
# Set or read annotations.
#
def ann(ref, keys_or_class=nil, keys=nil)
return annotation(ref) unless keys_or_class or keys
if Class === keys_or_class
keys ||= {}
keys[:class] = keys_or_class
else
keys = keys_or_class
end
if Hash === keys
ref = ref.to_sym
keys = keys.inject({}){ |h,(k,v)| h[k.to_sym] = v; h} #rekey
annotations[ref] ||= {}
annotations[ref].update(keys)
# callback
annotation_added(ref)
else
key = keys.to_sym
annotation(ref)[key]
end
end
# To change an annotation's value in place for a given class or module
# it first must be duplicated, otherwise the change may effect annotations
# in the class or module's ancestors.
#
def ann!(ref, keys_or_class=nil, keys=nil)
#return annotation(ref) unless keys_or_class or keys
unless keys_or_class or keys
return annotations[ref] ||= {}
end
if Class === keys_or_class
keys ||= {}
keys[:class] = keys_or_class
else
keys = keys_or_class
end
if Hash === keys
ref = ref.to_sym
keys = keys.inject({}){ |h,(k,v)| h[k.to_sym] = v; h} #rekey
annotations[ref] ||= {}
annotations[ref].update(keys)
# callback
annotation_added(ref) if method_defined?(:annotation_added)
else
key = keys.to_sym
annotations[ref] ||= {}
begin
annotations[ref][key] = annotation(ref)[key].dup
rescue TypeError
annotations[ref][key] = annotation(ref)[key]
end
end
end
# callback method
def annotation_added(name)
super if defined?(super)
end
end
end
# 2006-11-07 trans Created this ultra-concise version of annotations.
# Copyright (c) 2005, 2008 TigerOps
|
# Generated by juwelier
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: rails-patterns 0.9.0 ruby lib
Gem::Specification.new do |s|
s.name = "rails-patterns".freeze
s.version = "0.9.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Stevo".freeze]
s.date = "2021-06-25"
s.description = "A collection of lightweight, standardized, rails-oriented patterns.".freeze
s.email = "b.kosmowski@selleo.com".freeze
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".github/workflows/ruby.yml",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"lib/patterns.rb",
"lib/patterns/calculation.rb",
"lib/patterns/collection.rb",
"lib/patterns/form.rb",
"lib/patterns/query.rb",
"lib/patterns/rule.rb",
"lib/patterns/ruleset.rb",
"lib/patterns/service.rb",
"lib/patterns/strong_ruleset.rb",
"lib/rails-patterns.rb",
"rails-patterns.gemspec",
"spec/helpers/custom_calculation.rb",
"spec/helpers/custom_calculation_script.rb",
"spec/helpers/rails_redis_cache_mock.rb",
"spec/patterns/calculation_spec.rb",
"spec/patterns/collection_spec.rb",
"spec/patterns/form_spec.rb",
"spec/patterns/query_spec.rb",
"spec/patterns/rule_spec.rb",
"spec/patterns/ruleset_spec.rb",
"spec/patterns/service_spec.rb",
"spec/patterns/strong_ruleset_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/selleo/pattern".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.5.0".freeze)
s.rubygems_version = "3.0.8".freeze
s.summary = "A collection of lightweight, standardized, rails-oriented patterns.".freeze
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_runtime_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_runtime_dependency(%q<virtus>.freeze, [">= 0"])
s.add_runtime_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_development_dependency(%q<juwelier>.freeze, ["~> 2.1.0"])
else
s.add_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_dependency(%q<virtus>.freeze, [">= 0"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_dependency(%q<juwelier>.freeze, ["~> 2.1.0"])
end
else
s.add_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_dependency(%q<virtus>.freeze, [">= 0"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_dependency(%q<juwelier>.freeze, ["~> 2.1.0"])
end
end
Regenerate gemspec for version 0.9.0
# Generated by juwelier
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: rails-patterns 0.9.0 ruby lib
Gem::Specification.new do |s|
s.name = "rails-patterns".freeze
s.version = "0.9.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Stevo".freeze]
s.date = "2021-06-25"
s.description = "A collection of lightweight, standardized, rails-oriented patterns.".freeze
s.email = "b.kosmowski@selleo.com".freeze
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".github/workflows/ruby.yml",
".rspec",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"lib/patterns.rb",
"lib/patterns/calculation.rb",
"lib/patterns/collection.rb",
"lib/patterns/form.rb",
"lib/patterns/query.rb",
"lib/patterns/rule.rb",
"lib/patterns/ruleset.rb",
"lib/patterns/service.rb",
"lib/patterns/strong_ruleset.rb",
"lib/rails-patterns.rb",
"rails-patterns.gemspec",
"spec/helpers/custom_calculation.rb",
"spec/helpers/custom_calculation_script.rb",
"spec/helpers/rails_redis_cache_mock.rb",
"spec/patterns/calculation_spec.rb",
"spec/patterns/collection_spec.rb",
"spec/patterns/form_spec.rb",
"spec/patterns/query_spec.rb",
"spec/patterns/rule_spec.rb",
"spec/patterns/ruleset_spec.rb",
"spec/patterns/service_spec.rb",
"spec/patterns/strong_ruleset_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/selleo/pattern".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.5.0".freeze)
s.rubygems_version = "3.0.8".freeze
s.summary = "A collection of lightweight, standardized, rails-oriented patterns.".freeze
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_runtime_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_runtime_dependency(%q<virtus>.freeze, [">= 0"])
s.add_runtime_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_development_dependency(%q<juwelier>.freeze, [">= 0"])
else
s.add_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_dependency(%q<virtus>.freeze, [">= 0"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_dependency(%q<juwelier>.freeze, [">= 0"])
end
else
s.add_dependency(%q<activerecord>.freeze, [">= 4.2.6"])
s.add_dependency(%q<actionpack>.freeze, [">= 4.2.6"])
s.add_dependency(%q<virtus>.freeze, [">= 0"])
s.add_dependency(%q<ruby2_keywords>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<bundler>.freeze, ["~> 2.0"])
s.add_dependency(%q<juwelier>.freeze, [">= 0"])
end
end
|
module ApiAuth
# Builds the canonical string given a request object.
class Headers
include RequestDrivers
def initialize(request)
@original_request = request
case request.class.to_s
when /Net::HTTP/
@request = NetHttpRequest.new(request)
when /RestClient/
@request = RestClientRequest.new(request)
when /Curl::Easy/
@request = CurbRequest.new(request)
when /ActionController::Request/
@request = ActionControllerRequest.new(request)
when /ActionController::TestRequest/
if defined?(ActionDispatch)
@request = ActionDispatchRequest.new(request)
else
@request = ActionControllerRequest.new(request)
end
when /ActionDispatch::Request/
@request = ActionDispatchRequest.new(request)
when /Rack::Request/
@request = RackRequest.new(request)
else
raise UnknownHTTPRequest, "#{request.class.to_s} is not yet supported."
end
true
end
# Returns the request timestamp
def timestamp
@request.timestamp
end
# Returns the canonical string computed from the request's headers
def canonical_string
[ @request.content_type,
@request.content_md5,
@request.request_uri.gsub(/http:\/\/[^(,|\?|\/)]*/,''), # remove host
@request.timestamp
].join(",")
end
# Returns the authorization header from the request's headers
def authorization_header
@request.authorization_header
end
def set_date
@request.set_date if @request.timestamp.empty?
end
def calculate_md5
@request.populate_content_md5 if @request.content_md5.empty?
end
def md5_mismatch?
if @request.content_md5.empty?
false
else
@request.md5_mismatch?
end
end
# Sets the request's authorization header with the passed in value.
# The header should be the ApiAuth HMAC signature.
#
# This will return the original request object with the signed Authorization
# header already in place.
def sign_header(header)
@request.set_auth_header header
end
end
end
This should also work for https.
module ApiAuth
# Builds the canonical string given a request object.
class Headers
include RequestDrivers
def initialize(request)
@original_request = request
case request.class.to_s
when /Net::HTTP/
@request = NetHttpRequest.new(request)
when /RestClient/
@request = RestClientRequest.new(request)
when /Curl::Easy/
@request = CurbRequest.new(request)
when /ActionController::Request/
@request = ActionControllerRequest.new(request)
when /ActionController::TestRequest/
if defined?(ActionDispatch)
@request = ActionDispatchRequest.new(request)
else
@request = ActionControllerRequest.new(request)
end
when /ActionDispatch::Request/
@request = ActionDispatchRequest.new(request)
when /Rack::Request/
@request = RackRequest.new(request)
else
raise UnknownHTTPRequest, "#{request.class.to_s} is not yet supported."
end
true
end
# Returns the request timestamp
def timestamp
@request.timestamp
end
# Returns the canonical string computed from the request's headers
def canonical_string
[ @request.content_type,
@request.content_md5,
@request.request_uri.gsub(/https?:\/\/[^(,|\?|\/)]*/,''), # remove host
@request.timestamp
].join(",")
end
# Returns the authorization header from the request's headers
def authorization_header
@request.authorization_header
end
def set_date
@request.set_date if @request.timestamp.empty?
end
def calculate_md5
@request.populate_content_md5 if @request.content_md5.empty?
end
def md5_mismatch?
if @request.content_md5.empty?
false
else
@request.md5_mismatch?
end
end
# Sets the request's authorization header with the passed in value.
# The header should be the ApiAuth HMAC signature.
#
# This will return the original request object with the signed Authorization
# header already in place.
def sign_header(header)
@request.set_auth_header header
end
end
end
|
# -*- encoding: utf-8 -*-
root = File.expand_path('../', __FILE__)
lib = "#{root}/lib"
$:.unshift lib unless $:.include?(lib)
Gem::Specification.new do |s|
s.name = "smart_asset"
s.version = '0.5.5'
s.platform = Gem::Platform::RUBY
s.authors = ["Winton Welsh"]
s.email = ["mail@wintoni.us"]
s.homepage = "http://github.com/winton/smart_asset"
s.summary = %q{Smart asset packaging for Rails, Sinatra, and Stasis}
s.description = %q{Smart asset packaging for Rails, Sinatra, and Stasis.}
s.executables = `cd #{root} && git ls-files bin/*`.split("\n").collect { |f| File.basename(f) }
s.files = `cd #{root} && git ls-files`.split("\n")
s.require_paths = %w(lib)
s.test_files = `cd #{root} && git ls-files -- {features,test,spec}/*`.split("\n")
s.add_development_dependency "framework_fixture"
s.add_development_dependency "rack-test"
s.add_development_dependency "rspec", "~> 1.0"
end
Version 0.5.6
# -*- encoding: utf-8 -*-
root = File.expand_path('../', __FILE__)
lib = "#{root}/lib"
$:.unshift lib unless $:.include?(lib)
Gem::Specification.new do |s|
s.name = "smart_asset"
s.version = '0.5.6'
s.platform = Gem::Platform::RUBY
s.authors = ["Winton Welsh"]
s.email = ["mail@wintoni.us"]
s.homepage = "http://github.com/winton/smart_asset"
s.summary = %q{Smart asset packaging for Rails, Sinatra, and Stasis}
s.description = %q{Smart asset packaging for Rails, Sinatra, and Stasis.}
s.executables = `cd #{root} && git ls-files bin/*`.split("\n").collect { |f| File.basename(f) }
s.files = `cd #{root} && git ls-files`.split("\n")
s.require_paths = %w(lib)
s.test_files = `cd #{root} && git ls-files -- {features,test,spec}/*`.split("\n")
s.add_development_dependency "framework_fixture"
s.add_development_dependency "rack-test"
s.add_development_dependency "rspec", "~> 1.0"
end |
module Atomy
module AST
class Binary < Node
Operators = {
:+ => :meta_send_op_plus,
:- => :meta_send_op_minus,
:== => :meta_send_op_equal,
:=== => :meta_send_op_tequal,
:< => :meta_send_op_lt,
:> => :meta_send_op_gt
}
children :lhs, :rhs
attributes :operator
slots [:private, "false"]
generate
alias :message_name :operator
def bytecode(g)
pos(g)
@lhs.compile(g)
@rhs.compile(g)
if meta = Operators[@operator]
g.__send__ meta, g.find_literal(@operator)
else
g.send @operator, 1
end
end
def macro_name
:"atomy_macro::#{@operator}"
end
end
end
end
private Binary sends
module Atomy
module AST
class Binary < Node
Operators = {
:+ => :meta_send_op_plus,
:- => :meta_send_op_minus,
:== => :meta_send_op_equal,
:=== => :meta_send_op_tequal,
:< => :meta_send_op_lt,
:> => :meta_send_op_gt
}
children :lhs, :rhs
attributes :operator
slots [:private, "false"]
generate
alias :message_name :operator
def bytecode(g)
pos(g)
@lhs.compile(g)
@rhs.compile(g)
g.allow_private if @private
if meta = Operators[@operator]
g.__send__ meta, g.find_literal(@operator)
else
g.send @operator, 1
end
end
def macro_name
:"atomy_macro::#{@operator}"
end
end
end
end
|
module AutoApi
VERSION = "0.0.3"
end
version bump
module AutoApi
VERSION = "0.0.4"
end
|
module Autoupdate
module_function
def start
if File.exist?(Autoupdate::Core.plist)
puts <<-EOS.undent
The command already appears to have been started.
Please run `brew autoupdate --delete` and try again.
EOS
exit 1
end
auto_args = "update"
# Spacing at start of lines is deliberate. Don't undo.
if ARGV.include? "--upgrade"
auto_args << " && #{Autoupdate::Core.brew} upgrade -v"
auto_args << " && #{Autoupdate::Core.brew} cleanup" if ARGV.include? "--cleanup"
end
# It's not something I particularly support but if someone manually loads
# the plist with launchctl themselves we can end up with a log directory
# we can't write to later, so need to ensure a future `start` command
# doesn't silently fail.
logs_parent = File.expand_path("..", Autoupdate::Core.logs)
if File.exist?(Autoupdate::Core.logs) && File.writable?(Autoupdate::Core.logs)
log_err = "#{Autoupdate::Core.logs}/#{Autoupdate::Core.name}.err"
log_std = "#{Autoupdate::Core.logs}/#{Autoupdate::Core.name}.out"
elsif File.writable?(logs_parent)
log_err = "#{logs_parent}/#{Autoupdate::Core.name}.err"
log_std = "#{logs_parent}/#{Autoupdate::Core.name}.out"
else
puts <<-EOS.undent
#{Autoupdate::Core.logs} does not seem to be writable.
You may with to `chown` it back to your user.
EOS
end
script_contents = <<-EOS.undent
#!/bin/bash
/bin/date && #{Autoupdate::Core.brew} #{auto_args}
EOS
FileUtils.mkpath(Autoupdate::Core.logs)
FileUtils.mkpath(Autoupdate::Core.location)
File.open(Autoupdate::Core.location/"updater", "w") { |sc| sc << script_contents }
FileUtils.chmod 0555, Autoupdate::Core.location/"updater"
file = <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{Autoupdate::Core.name}</string>
<key>Program</key>
<string>#{Autoupdate::Core.location}/updater</string>
<key>ProgramArguments</key>
<array>
<string>#{Autoupdate::Core.location}/updater</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>StandardErrorPath</key>
<string>#{log_err}</string>
<key>StandardOutPath</key>
<string>#{log_std}</string>
<key>StartInterval</key>
<integer>86400</integer>
</dict>
</plist>
EOS
File.open(Autoupdate::Core.plist, "w") { |f| f << file }
quiet_system "/bin/launchctl", "load", Autoupdate::Core.plist
puts "Homebrew will now automatically update every 24 hours, or on system boot."
end
end
autoupdate: relax already-started check
The prior method broke the perfectly valid `brew autoupdate stop`
use-case.
module Autoupdate
module_function
def start
# Method from Homebrew.
# https://github.com/Homebrew/brew/blob/c9c7f4/Library/Homebrew/utils/popen.rb
if Utils.popen_read("/bin/launchctl list").include?(Autoupdate::Core.name)
puts <<-EOS.undent
The command already appears to have been started.
Please run `brew autoupdate --delete` and try again.
EOS
exit 1
end
auto_args = "update"
# Spacing at start of lines is deliberate. Don't undo.
if ARGV.include? "--upgrade"
auto_args << " && #{Autoupdate::Core.brew} upgrade -v"
auto_args << " && #{Autoupdate::Core.brew} cleanup" if ARGV.include? "--cleanup"
end
# It's not something I particularly support but if someone manually loads
# the plist with launchctl themselves we can end up with a log directory
# we can't write to later, so need to ensure a future `start` command
# doesn't silently fail.
logs_parent = File.expand_path("..", Autoupdate::Core.logs)
if File.exist?(Autoupdate::Core.logs) && File.writable?(Autoupdate::Core.logs)
log_err = "#{Autoupdate::Core.logs}/#{Autoupdate::Core.name}.err"
log_std = "#{Autoupdate::Core.logs}/#{Autoupdate::Core.name}.out"
elsif File.writable?(logs_parent)
log_err = "#{logs_parent}/#{Autoupdate::Core.name}.err"
log_std = "#{logs_parent}/#{Autoupdate::Core.name}.out"
else
puts <<-EOS.undent
#{Autoupdate::Core.logs} does not seem to be writable.
You may with to `chown` it back to your user.
EOS
end
script_contents = <<-EOS.undent
#!/bin/bash
/bin/date && #{Autoupdate::Core.brew} #{auto_args}
EOS
FileUtils.mkpath(Autoupdate::Core.logs)
FileUtils.mkpath(Autoupdate::Core.location)
File.open(Autoupdate::Core.location/"updater", "w") { |sc| sc << script_contents }
FileUtils.chmod 0555, Autoupdate::Core.location/"updater"
file = <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{Autoupdate::Core.name}</string>
<key>Program</key>
<string>#{Autoupdate::Core.location}/updater</string>
<key>ProgramArguments</key>
<array>
<string>#{Autoupdate::Core.location}/updater</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>StandardErrorPath</key>
<string>#{log_err}</string>
<key>StandardOutPath</key>
<string>#{log_std}</string>
<key>StartInterval</key>
<integer>86400</integer>
</dict>
</plist>
EOS
File.open(Autoupdate::Core.plist, "w") { |f| f << file }
quiet_system "/bin/launchctl", "load", Autoupdate::Core.plist
puts "Homebrew will now automatically update every 24 hours, or on system boot."
end
end
|
require 'ostruct'
require 'yaml'
require 'erb'
module SmokeTest
class State
attr_accessor :slot_data
TEST_DATA = YAML.load(ERB.new(File.read('test_data.yml')).result)
def prisoner
@prisoner ||= Prisoner.new
end
def visitor
@visitor ||= Visitor.new
end
def process_data
@process_data ||= OpenStruct.new(TEST_DATA.fetch 'process_data')
end
def unique_email_address
visitor.email_address
end
def first_slot_date
slot_data.first[:date]
end
alias_method :first_slot_date_prison_format, :first_slot_date
def first_slot_date_visitor_format
Date.parse(first_slot_date).strftime('%-e %B %Y')
end
class Prisoner < SimpleDelegator
def initialize
super(OpenStruct.new State::TEST_DATA.fetch('prisoner_details'))
end
def full_name
"#{first_name} #{last_name}"
end
end
class Visitor < SimpleDelegator
def initialize
super(OpenStruct.new State::TEST_DATA.fetch('visitor_details'))
end
def email_address
@email_address ||= UniqueEmailAddress.new
end
class UniqueEmailAddress < String
def initialize(*)
uuid = SecureRandom.uuid
super \
"#{SMOKE_TEST_EMAIL_LOCAL_PART}+#{uuid}@#{SMOKE_TEST_EMAIL_DOMAIN}"
freeze
end
end
end
end
end
Fix broken smoke test
The date format in the mailer titles had changed, but the smoke tests
had not been updated, this fixes that. It *is* not yet DRY - Evangelos
is going to clean it up next week when he starts to rationalise date
formats in the app (see TODO) note in source.
require 'ostruct'
require 'yaml'
require 'erb'
module SmokeTest
class State
attr_accessor :slot_data
TEST_DATA = YAML.load(ERB.new(File.read('test_data.yml')).result)
def prisoner
@prisoner ||= Prisoner.new
end
def visitor
@visitor ||= Visitor.new
end
def process_data
@process_data ||= OpenStruct.new(TEST_DATA.fetch 'process_data')
end
def unique_email_address
visitor.email_address
end
def first_slot_date
slot_data.first[:date]
end
alias_method :first_slot_date_prison_format, :first_slot_date
def first_slot_date_visitor_format
# TODO: Refactor to use the I18n call. It was refusing to work when
# Evangelos and I tested it - couldn't find the translation, even though
# the exception was showing the correct key in en.yml.
Date.parse(first_slot_date).strftime('%A %-d %B %Y')
end
class Prisoner < SimpleDelegator
def initialize
super(OpenStruct.new State::TEST_DATA.fetch('prisoner_details'))
end
def full_name
"#{first_name} #{last_name}"
end
end
class Visitor < SimpleDelegator
def initialize
super(OpenStruct.new State::TEST_DATA.fetch('visitor_details'))
end
def email_address
@email_address ||= UniqueEmailAddress.new
end
class UniqueEmailAddress < String
def initialize(*)
uuid = SecureRandom.uuid
super \
"#{SMOKE_TEST_EMAIL_LOCAL_PART}+#{uuid}@#{SMOKE_TEST_EMAIL_DOMAIN}"
freeze
end
end
end
end
end
|
# frozen_string_literal: true
module BB
VERSION = '4.0.1'
end
v4.0.2
# frozen_string_literal: true
module BB
VERSION = '4.0.2'
end
|
# frozen_string_literal: true
module BB
VERSION = '3.1.1'
end
v3.1.2
# frozen_string_literal: true
module BB
VERSION = '3.1.2'
end
|
module Blather
class Stanza
# # X Stanza
#
# [XEP-0004 Data Forms](http://xmpp.org/extensions/xep-0004.html)
#
# Data Form node that allows for semi-structured data exchange
#
# @handler :x
class X < XMPPNode
register :x, 'jabber:x:data'
VALID_TYPES = [:cancel, :form, :result, :submit].freeze
# Create a new X node
# @param [:cancel, :form, :result, :submit, nil] type the x:form type
# @param [Array<Array, X::Field>, nil] fields a list of fields.
# These are passed directly to X::Field.new
# @return [X] a new X stanza
def self.new(type = nil, fields = [])
new_node = super :x
case type
when Nokogiri::XML::Node
new_node.inherit type
when Hash
new_node.type = type[:type]
new_node.fields = type[:fields]
else
new_node.type = type
new_node.fields = fields
end
new_node
end
# The Form's type
# @return [Symbol]
def type
read_attr :type, :to_sym
end
# Set the Form's type
# @param [:cancel, :form, :result, :submit] type the new type for the form
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# List of field objects
# @return [Blather::Stanza::X::Field]
def fields
self.find('ns:field', :ns => self.class.registered_ns).map do |field|
Field.new(field)
end
end
# Find a field by var
# @param var the var for the field you wish to find
def field(var)
fields.detect { |f| f.var == var }
end
# Add an array of fields to form
# @param fields the array of fields, passed directly to Field.new
def fields=(fields)
remove_children :field
[fields].flatten.each do |field|
self << (f = Field.new(field))
f.namespace = self.namespace
end
end
# Check if the x is of type :cancel
#
# @return [true, false]
def cancel?
self.type == :cancel
end
# Check if the x is of type :form
#
# @return [true, false]
def form?
self.type == :form
end
# Check if the x is of type :result
#
# @return [true, false]
def result?
self.type == :result
end
# Check if the x is of type :submit
#
# @return [true, false]
def submit?
self.type == :submit
end
# Retrieve the form's instructions
#
# @return [String]
def instructions
content_from 'ns:instructions', :ns => self.registered_ns
end
# Set the form's instructions
#
# @param [String] instructions the form's instructions
def instructions=(instructions)
self.remove_children :instructions
if instructions
self << (i = XMPPNode.new(:instructions, self.document))
i.namespace = self.namespace
i << instructions
end
end
# Retrieve the form's title
#
# @return [String]
def title
content_from 'ns:title', :ns => self.registered_ns
end
# Set the form's title
#
# @param [String] title the form's title
def title=(title)
self.remove_children :title
if title
self << (t = XMPPNode.new(:title))
t.namespace = self.namespace
t << title
end
end
class Field < XMPPNode
VALID_TYPES = [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"].freeze
# Create a new X Field
# @overload new(node)
# Imports the XML::Node to create a Field object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field using a hash of options
# @param [Hash] opts a hash of options
# @option opts [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] :type the type of the field
# @option opts [String] :var the variable for the field
# @option opts [String] :label the label for the field
# @option [String, nil] :value the value for the field
# @option [String, nil] :description the description for the field
# @option [true, false, nil] :required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] :options a list of field options.
# These are passed directly to X::Field::Option.new
# @overload new(type, var = nil, label = nil)
# Create a new Field by name
# @param [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] type the type of the field
# @param [String, nil] var the variable for the field
# @param [String, nil] label the label for the field
# @param [String, nil] value the value for the field
# @param [String, nil] description the description for the field
# @param [true, false, nil] required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] options a list of field options.
# These are passed directly to X::Field::Option.new
def self.new(type, var = nil, label = nil, value = nil, description = nil, required = false, options = [])
new_node = super :field
case type
when Nokogiri::XML::Node
new_node.inherit type
when Hash
new_node.type = type[:type]
new_node.var = type[:var]
new_node.label = type[:label]
new_node.value = type[:value]
new_node.desc = type[:description]
new_node.required = type[:required]
new_node.options = type[:options]
else
new_node.type = type
new_node.var = var
new_node.label = label
new_node.value = value
new_node.desc = description
new_node.required = required
new_node.options = options
end
new_node
end
# The Field's type
# @return [String]
def type
read_attr :type
end
# Set the Field's type
# @param [#to_sym] type the new type for the field
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# The Field's var
# @return [String]
def var
read_attr :var
end
# Set the Field's var
# @param [String] var the new var for the field
def var=(var)
write_attr :var, var
end
# The Field's label
# @return [String]
def label
read_attr :label
end
# Set the Field's label
# @param [String] label the new label for the field
def label=(label)
write_attr :label, label
end
# Get the field's value
#
# @param [String]
def value
if self.namespace
content_from 'ns:value', :ns => self.namespace.href
else
content_from :value
end
end
# Set the field's value
#
# @param [String] value the field's value
def value=(value)
self.remove_children :value
if value
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v << value
end
end
# Get the field's description
#
# @param [String]
def desc
if self.namespace
content_from 'ns:desc', :ns => self.namespace.href
else
content_from :desc
end
end
# Set the field's description
#
# @param [String] description the field's description
def desc=(description)
self.remove_children :desc
if description
self << (d = XMPPNode.new(:desc))
d.namespace = self.namespace
d << description
end
end
# Get the field's required flag
#
# @param [true, false]
def required?
!self.find_first('required').nil?
end
# Set the field's required flag
#
# @param [true, false] required the field's required flag
def required=(required)
self.remove_children(:required) unless required
self << XMPPNode.new(:required) if required
end
# Extract list of option objects
#
# @return [Blather::Stanza::X::Field::Option]
def options
self.find(:option).map { |f| Option.new(f) }
end
# Add an array of options to field
# @param options the array of options, passed directly to Option.new
def options=(options)
remove_children :option
[options].flatten.each { |o| self << Option.new(o) }
end
# Compare two Field objects by type, var and label
# @param [X::Field] o the Field object to compare against
# @return [true, false]
def eql?(o)
raise "Cannot compare #{self.class} with #{o.class}" unless o.is_a?(self.class)
![:type, :var, :label, :desc, :required?, :value].detect { |m| o.send(m) != self.send(m) }
end
alias_method :==, :eql?
class Option < XMPPNode
# Create a new X Field Option
# @overload new(node)
# Imports the XML::Node to create a Field option object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field option using a hash of options
# @param [Hash] opts a hash of options
# @option opts [String] :value the value of the field option
# @option opts [String] :label the human readable label for the field option
# @overload new(value, label = nil)
# Create a new Field option by name
# @param [String] value the value of the field option
# @param [String, nil] label the human readable label for the field option
def self.new(value, label = nil)
new_node = super :option
case value
when Nokogiri::XML::Node
new_node.inherit value
when Hash
new_node.value = value[:value]
new_node.label = value[:label]
else
new_node.value = value
new_node.label = label
end
new_node
end
# The Field Option's value
# @return [String]
def value
if self.namespace
content_from 'ns:value', :ns => self.namespace.href
else
content_from :value
end
end
# Set the Field Option's value
# @param [String] value the new value for the field option
def value=(value)
self.remove_children :value
if value
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v << value
end
end
# The Field Option's label
# @return [String]
def label
read_attr :label
end
# Set the Field Option's label
# @param [String] label the new label for the field option
def label=(label)
write_attr :label, label
end
end # Option
end # Field
end # X
end #Stanza
end
only add options when they are provided
module Blather
class Stanza
# # X Stanza
#
# [XEP-0004 Data Forms](http://xmpp.org/extensions/xep-0004.html)
#
# Data Form node that allows for semi-structured data exchange
#
# @handler :x
class X < XMPPNode
register :x, 'jabber:x:data'
VALID_TYPES = [:cancel, :form, :result, :submit].freeze
# Create a new X node
# @param [:cancel, :form, :result, :submit, nil] type the x:form type
# @param [Array<Array, X::Field>, nil] fields a list of fields.
# These are passed directly to X::Field.new
# @return [X] a new X stanza
def self.new(type = nil, fields = [])
new_node = super :x
case type
when Nokogiri::XML::Node
new_node.inherit type
when Hash
new_node.type = type[:type]
new_node.fields = type[:fields]
else
new_node.type = type
new_node.fields = fields
end
new_node
end
# The Form's type
# @return [Symbol]
def type
read_attr :type, :to_sym
end
# Set the Form's type
# @param [:cancel, :form, :result, :submit] type the new type for the form
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# List of field objects
# @return [Blather::Stanza::X::Field]
def fields
self.find('ns:field', :ns => self.class.registered_ns).map do |field|
Field.new(field)
end
end
# Find a field by var
# @param var the var for the field you wish to find
def field(var)
fields.detect { |f| f.var == var }
end
# Add an array of fields to form
# @param fields the array of fields, passed directly to Field.new
def fields=(fields)
remove_children :field
[fields].flatten.each do |field|
self << (f = Field.new(field))
f.namespace = self.namespace
end
end
# Check if the x is of type :cancel
#
# @return [true, false]
def cancel?
self.type == :cancel
end
# Check if the x is of type :form
#
# @return [true, false]
def form?
self.type == :form
end
# Check if the x is of type :result
#
# @return [true, false]
def result?
self.type == :result
end
# Check if the x is of type :submit
#
# @return [true, false]
def submit?
self.type == :submit
end
# Retrieve the form's instructions
#
# @return [String]
def instructions
content_from 'ns:instructions', :ns => self.registered_ns
end
# Set the form's instructions
#
# @param [String] instructions the form's instructions
def instructions=(instructions)
self.remove_children :instructions
if instructions
self << (i = XMPPNode.new(:instructions, self.document))
i.namespace = self.namespace
i << instructions
end
end
# Retrieve the form's title
#
# @return [String]
def title
content_from 'ns:title', :ns => self.registered_ns
end
# Set the form's title
#
# @param [String] title the form's title
def title=(title)
self.remove_children :title
if title
self << (t = XMPPNode.new(:title))
t.namespace = self.namespace
t << title
end
end
class Field < XMPPNode
VALID_TYPES = [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"].freeze
# Create a new X Field
# @overload new(node)
# Imports the XML::Node to create a Field object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field using a hash of options
# @param [Hash] opts a hash of options
# @option opts [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] :type the type of the field
# @option opts [String] :var the variable for the field
# @option opts [String] :label the label for the field
# @option [String, nil] :value the value for the field
# @option [String, nil] :description the description for the field
# @option [true, false, nil] :required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] :options a list of field options.
# These are passed directly to X::Field::Option.new
# @overload new(type, var = nil, label = nil)
# Create a new Field by name
# @param [:boolean, :fixed, :hidden, :"jid-multi", :"jid-single", :"list-multi", :"list-single", :"text-multi", :"text-private", :"text-single"] type the type of the field
# @param [String, nil] var the variable for the field
# @param [String, nil] label the label for the field
# @param [String, nil] value the value for the field
# @param [String, nil] description the description for the field
# @param [true, false, nil] required the required flag for the field
# @param [Array<Array, X::Field::Option>, nil] options a list of field options.
# These are passed directly to X::Field::Option.new
def self.new(type, var = nil, label = nil, value = nil, description = nil, required = false, options = [])
new_node = super :field
case type
when Nokogiri::XML::Node
new_node.inherit type
when Hash
new_node.type = type[:type]
new_node.var = type[:var]
new_node.label = type[:label]
new_node.value = type[:value]
new_node.desc = type[:description]
new_node.required = type[:required]
new_node.options = type[:options]
else
new_node.type = type
new_node.var = var
new_node.label = label
new_node.value = value
new_node.desc = description
new_node.required = required
new_node.options = options
end
new_node
end
# The Field's type
# @return [String]
def type
read_attr :type
end
# Set the Field's type
# @param [#to_sym] type the new type for the field
def type=(type)
if type && !VALID_TYPES.include?(type.to_sym)
raise ArgumentError, "Invalid Type (#{type}), use: #{VALID_TYPES*' '}"
end
write_attr :type, type
end
# The Field's var
# @return [String]
def var
read_attr :var
end
# Set the Field's var
# @param [String] var the new var for the field
def var=(var)
write_attr :var, var
end
# The Field's label
# @return [String]
def label
read_attr :label
end
# Set the Field's label
# @param [String] label the new label for the field
def label=(label)
write_attr :label, label
end
# Get the field's value
#
# @param [String]
def value
if self.namespace
content_from 'ns:value', :ns => self.namespace.href
else
content_from :value
end
end
# Set the field's value
#
# @param [String] value the field's value
def value=(value)
self.remove_children :value
if value
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v << value
end
end
# Get the field's description
#
# @param [String]
def desc
if self.namespace
content_from 'ns:desc', :ns => self.namespace.href
else
content_from :desc
end
end
# Set the field's description
#
# @param [String] description the field's description
def desc=(description)
self.remove_children :desc
if description
self << (d = XMPPNode.new(:desc))
d.namespace = self.namespace
d << description
end
end
# Get the field's required flag
#
# @param [true, false]
def required?
!self.find_first('required').nil?
end
# Set the field's required flag
#
# @param [true, false] required the field's required flag
def required=(required)
self.remove_children(:required) unless required
self << XMPPNode.new(:required) if required
end
# Extract list of option objects
#
# @return [Blather::Stanza::X::Field::Option]
def options
self.find(:option).map { |f| Option.new(f) }
end
# Add an array of options to field
# @param options the array of options, passed directly to Option.new
def options=(options)
remove_children :option
if options
[options].flatten.each { |o| self << Option.new(o) }
end
end
# Compare two Field objects by type, var and label
# @param [X::Field] o the Field object to compare against
# @return [true, false]
def eql?(o)
raise "Cannot compare #{self.class} with #{o.class}" unless o.is_a?(self.class)
![:type, :var, :label, :desc, :required?, :value].detect { |m| o.send(m) != self.send(m) }
end
alias_method :==, :eql?
class Option < XMPPNode
# Create a new X Field Option
# @overload new(node)
# Imports the XML::Node to create a Field option object
# @param [XML::Node] node the node object to import
# @overload new(opts = {})
# Creates a new Field option using a hash of options
# @param [Hash] opts a hash of options
# @option opts [String] :value the value of the field option
# @option opts [String] :label the human readable label for the field option
# @overload new(value, label = nil)
# Create a new Field option by name
# @param [String] value the value of the field option
# @param [String, nil] label the human readable label for the field option
def self.new(value, label = nil)
new_node = super :option
case value
when Nokogiri::XML::Node
new_node.inherit value
when Hash
new_node.value = value[:value]
new_node.label = value[:label]
else
new_node.value = value
new_node.label = label
end
new_node
end
# The Field Option's value
# @return [String]
def value
if self.namespace
content_from 'ns:value', :ns => self.namespace.href
else
content_from :value
end
end
# Set the Field Option's value
# @param [String] value the new value for the field option
def value=(value)
self.remove_children :value
if value
self << (v = XMPPNode.new(:value))
v.namespace = self.namespace
v << value
end
end
# The Field Option's label
# @return [String]
def label
read_attr :label
end
# Set the Field Option's label
# @param [String] label the new label for the field option
def label=(label)
write_attr :label, label
end
end # Option
end # Field
end # X
end #Stanza
end |
json.extract! @recipe, :id, :name, :description, :created_at, :updated_at,
:style_name, :abv, :ibu, :og, :fg, :color, :batch_size, :style_guide
json.url recipe_url(@recipe)
json.beerxml_url recipe_url(@recipe, format: :xml)
json.image full_url_for(@recipe.main_image(:large))
json.brewer do
json.name @recipe.brewer_name
json.url user_url(@recipe.user)
json.brewery @recipe.user.brewery
json.avatar full_url_for(@recipe.user.avatar_image)
end
json.comments @recipe.thread.comments do |comment|
json.body comment.body
json.created_at comment.created_at
json.creator do
json.name comment.creator.name
json.avatar full_url_for(comment.creator.avatar_image)
end
end
Fix reading commontator comments thread from recipe json after upgrade
json.extract! @recipe, :id, :name, :description, :created_at, :updated_at,
:style_name, :abv, :ibu, :og, :fg, :color, :batch_size, :style_guide
json.url recipe_url(@recipe)
json.beerxml_url recipe_url(@recipe, format: :xml)
json.image full_url_for(@recipe.main_image(:large))
json.brewer do
json.name @recipe.brewer_name
json.url user_url(@recipe.user)
json.brewery @recipe.user.brewery
json.avatar full_url_for(@recipe.user.avatar_image)
end
json.comments @recipe.commontator_thread.comments do |comment|
json.body comment.body
json.created_at comment.created_at
json.creator do
json.name comment.creator.name
json.avatar full_url_for(comment.creator.avatar_image)
end
end
|
module Booqmail
VERSION = "1.2.4"
end
bump version
module Booqmail
VERSION = "1.2.5"
end
|
module BrickFTP
class Client
# Login and store authentication session.
# @see https://brickftp.com/ja/docs/rest-api/authentication/
# @param username [String] username of BrickFTP's user.
# @param password [String] password of BrickFTP's user.
def login(username, password)
BrickFTP::API::Authentication.login(username, password)
end
# Logout and discard authentication session.
# @see https://brickftp.com/ja/docs/rest-api/authentication/
def logout
BrickFTP::API::Authentication.logout
end
# List all users on the current site.
# @see https://brickftp.com/ja/docs/rest-api/users/
# @return [Array] array of BrickFTP::API::User
def list_users
BrickFTP::API::User.all
end
# Show a single user.
# @see https://brickftp.com/ja/docs/rest-api/users/
# @param id user id.
# @return [BrickFTP::API::User] user object.
def show_user(id)
BrickFTP::API::User.find(id)
end
# Create a new user on the current site.
# @see https://brickftp.com/ja/docs/rest-api/users/
# @param attributes [Hash] User's attributes.
def create_user(attributes)
BrickFTP::API::User.create(attributes)
end
# Update an existing user.
# @see https://brickftp.com/ja/docs/rest-api/users/
# @param user_or_id [BrickFTP::API::User, Integer] user object or user id.
# @param attributes [Hash] User's attributes.
# @return [BrickFTP::API::User] user object.
def update_user(user_or_id, attributes)
instantize_user(user_or_id).update(attributes)
end
# Delete a user.
# @see https://brickftp.com/ja/docs/rest-api/users/
# @param user_or_id [BrickFTP::API::User, Integer] user object or user id.
# @return [Boolean] return true.
def delete_user(user_or_id)
instantize_user(user_or_id).destroy
end
# List all groups on the current site.
# @see https://brickftp.com/ja/docs/rest-api/groups/
def list_groups
BrickFTP::API::Group.all
end
# Show a single group.
# @see https://brickftp.com/ja/docs/rest-api/groups/
# @param id group id.
# @return [BrickFTP::API::Group] group object.
def show_group(id)
BrickFTP::API::Group.find(id)
end
# Create a new group on the current site.
# @see https://brickftp.com/ja/docs/rest-api/groups/
# @param attributes [Hash] Group's attributes.
def create_group(attributes)
BrickFTP::API::Group.create(attributes)
end
# Update an existing group.
# @see https://brickftp.com/ja/docs/rest-api/groups/
# @param group_or_id [BrickFTP::API::Group, Integer] group object or group id.
# @param attributes [Hash] Group's attributes.
# @return [BrickFTP::API::Group] group object.
def update_group(group_or_id, attributes)
instantize_group(group_or_id).update(attributes)
end
# Delete a group.
# @see https://brickftp.com/ja/docs/rest-api/groups/
# @param group_or_id [BrickFTP::API::Group, Integer] group object or group id.
# @return [Boolean] return true.
def delete_group(group_or_id)
instantize_group(group_or_id).destroy
end
# List all permissions on the current site.
# @see https://brickftp.com/ja/docs/rest-api/permissions/
def list_permissions
BrickFTP::API::Permission.all
end
# Create a new permission on the current site.
# @see https://brickftp.com/ja/docs/rest-api/permissions/
# @param attributes [Hash] Permission's attributes.
def create_permission(attributes)
BrickFTP::API::Permission.create(attributes)
end
# Delete a permission.
# @see https://brickftp.com/ja/docs/rest-api/permissions/
# @param permission_or_id [BrickFTP::API::Permission, Integer] permission object or permission id.
# @return [Boolean] return true.
def delete_permission(permission_or_id)
instantize_permission(permission_or_id).destroy
end
# List all notifications on the current site.
# @see https://brickftp.com/ja/docs/rest-api/notifications/
def list_notifications
BrickFTP::API::Notification.all
end
# Create a new notification on the current site.
# @see https://brickftp.com/ja/docs/rest-api/notifications/
# @param attributes [Hash] Notification's attributes.
def create_notification(attributes)
BrickFTP::API::Notification.create(attributes)
end
# Delete a notification.
# @see https://brickftp.com/ja/docs/rest-api/notifications/
# @param notification_or_id [BrickFTP::API::Notification, Integer] notification object or notification id.
# @return [Boolean] return true.
def delete_notification(notification_or_id)
instantize_notification(notification_or_id).destroy
end
# Show the entire history for the current site.
# @see https://brickftp.com/ja/docs/rest-api/history/
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request. Default: 1000, maximum: 10000.
# Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Site`
def list_site_history(page: nil, per_page: nil, start_at: nil)
query = { page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Site.all(query)
end
# Show login history only.
# @see https://brickftp.com/ja/docs/rest-api/history/
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request. Default: 1000, maximum: 10000.
# Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Login`
def list_login_history(page: nil, per_page: nil, start_at: nil)
query = { page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Login.all(query)
end
# Show all history for a specific user.
# @see https://brickftp.com/ja/docs/rest-api/history/
# @param user_id [Integer] User ID.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::User`
def list_user_history(user_id:, page: nil, per_page: nil, start_at: nil)
query = { user_id: user_id, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::User.all(query)
end
# Show all history for a specific folder.
# @see https://brickftp.com/ja/docs/rest-api/history/
# @param path [String] path of folder.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Folder`
def list_folder_history(path:, page: nil, per_page: nil, start_at: nil)
query = { path: path, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Folder.all(query)
end
# Show all history for a specific file.
# @see https://brickftp.com/ja/docs/rest-api/history/
# @param path [String] path of file.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::File`
def list_file_history(path:, page: nil, per_page: nil, start_at: nil)
query = { path: path, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::File.all(query)
end
# List all bundles on the current site.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @return [Array] array of BrickFTP::API::Bundle
def list_bundles
BrickFTP::API::Bundle.all
end
# Show a single bundle.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @param id bundle id.
# @return [BrickFTP::API::Bundle] bundle object.
def show_bundle(id)
BrickFTP::API::Bundle.find(id)
end
# Create a new bundle on the current site.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @param attributes [Hash] Bundle's attributes.
def create_bundle(attributes)
BrickFTP::API::Bundle.create(attributes)
end
# Delete a bundle.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @param bundle_or_id [BrickFTP::API::Bundle, Integer] bundle object or bundle id.
# @return [Boolean] return true.
def delete_bundle(bundle_or_id)
instantize_bundle(bundle_or_id).destroy
end
# List the contents of a bundle.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @param path [String]
# @param code [String]
# @param host [String]
# @return [Array] array of `BrickFTP::API::BundleContent`.
def list_bundle_contents(path: nil, code:, host:)
BrickFTP::API::BundleContent.all(path: path, code: code, host: host)
end
# Provides download URLs that will enable you to download the files in a bundle.
# @see https://brickftp.com/ja/docs/rest-api/bundles/
# @param code [String]
# @param host [String]
# @param paths [Array] array of path string.
# @return [Array] array of `BrickFTP::API::BundleDownload`.
def list_bundle_downloads(code:, host:, paths: [])
BrickFTP::API::BundleDownload.all(code: code, host: host, paths: paths)
end
# List all behaviors on the current site.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @return [Array] array of BrickFTP::API::Behavior
def list_behaviors
BrickFTP::API::Behavior.all
end
# Show a single behavior.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @param id behavior id.
# @return [BrickFTP::API::Behavior] behavior object.
def show_behavior(id)
BrickFTP::API::Behavior.find(id)
end
# Create a new behavior on the current site.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @param attributes [Hash] Behavior's attributes.
def create_behavior(attributes)
BrickFTP::API::Behavior.create(attributes)
end
# Update an existing behavior.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @param behavior_or_id [BrickFTP::API::Behavior, Integer] behavior object or behavior id.
# @param attributes [Hash] Behavior's attributes.
# @return [BrickFTP::API::Behavior] behavior object.
def update_behavior(behavior_or_id, attributes)
instantize_behavior(behavior_or_id).update(attributes)
end
# Delete a behavior.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @param behavior_or_id [BrickFTP::API::Behavior, Integer] behavior object or behavior id.
# @return [Boolean] return true.
def delete_behavior(behavior_or_id)
instantize_behavior(behavior_or_id).destroy
end
# shows the behaviors that apply to the given path.
# @see https://brickftp.com/ja/docs/rest-api/behaviors/
# @return [Array] array of BrickFTP::API::FolderBehavior
def list_folder_behaviors(path:)
BrickFTP::API::FolderBehavior.all(path: path)
end
# Lists the contents of the folder provided in the URL.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param path [String]
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Maximum: 5000, leave blank for default (strongly recommended).
# @param search [String] Only return items matching the given search text.
# @param sort_by_path [String] Sort by file name, and value is either asc or desc to indicate normal or reverse sort.
# (Note that sort_by[path] = asc is the default.)
# @param sort_by_size [String] Sort by file size, and value is either asc or desc to indicate smaller files
# first or larger files first, respectively.
# @param sort_by_modified_at_datetime [String] Sort by modification time, and value is either asc or desc to
# indicate older files first or newer files first, respectively.
# @return [Array] array of BrickFTP::API::Folder.
def list_folders(path:, page: nil, per_page: nil, search: nil, sort_by_path: nil, sort_by_size: nil, sort_by_modified_at_datetime: nil) # rubocop:disable Metrics/LineLength
query = { path: path, page: page, per_page: per_page, search: search }.reject { |_, v| v.nil? }
query[:'sort_by[path]'] = sort_by_path if sort_by_path
query[:'sort_by[size]'] = sort_by_size if sort_by_size
query[:'sort_by[modified_at_datetime]'] = sort_by_modified_at_datetime if sort_by_modified_at_datetime
BrickFTP::API::Folder.all(query)
end
# Create a folder.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param path [String]
# @return [BrickFTP::API::Folder]
def create_folder(path:)
BrickFTP::API::Folder.create(path: path)
end
# provides a download URL that will enable you to download a file.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param path [String] path for file.
# @param omit_download_uri [Boolean] If true, omit download_uri. (Add query `action=stat`)
# @return [BrickFTP::API::File] file object.
def show_file(path, omit_download_uri: false)
params = {}
params[:action] = 'stat' if omit_download_uri
BrickFTP::API::File.find(path, params: params)
end
# Move or renames a file or folder to the destination provided in the move_destination parameter.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param path [String]
# @param move_destination [String]
# @return [BrickFTP::API::FileMove]
def move_file(path:, move_destination:)
BrickFTP::API::FileOperation::Move.create(path: path, 'move-destination': move_destination)
end
# Copy a file or folder to the destination provided in the copy_destination parameter.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param path [String]
# @param copy_destination [String]
# @return [BrickFTP::API::FileCopy]
def copy_file(path:, copy_destination:)
BrickFTP::API::FileOperation::Copy.create(path: path, 'copy-destination': copy_destination)
end
# Delete a file.
# @see https://brickftp.com/ja/docs/rest-api/file-operations/
# @param file_or_path [BrickFTP::API::File, String] file object or file(folder) path.
# @param recursive [Boolean]
# @return [Boolean] return true.
def delete_file(file_or_path, recursive: false)
instantize_file(file_or_path).destroy(recursive: recursive)
end
# Upload file.
# @see https://brickftp.com/ja/docs/rest-api/file-uploading/
# @param path [String]
# @param source [IO] source `data` (not `path`) to upload
# @param chunk_size [Integer] Size of chunk to multi-part upload.
# @return [BrickFTP::API::FileUpload]
def upload_file(path:, source:, chunk_size: nil)
BrickFTP::API::FileOperation::Upload.create(path: path, source: source, chunk_size: chunk_size)
end
# Get usage of site.
# @return [BrickFTP::API::SiteUsage]
def site_usage
BrickFTP::API::SiteUsage.find
end
private
def instantize_user(user_or_id)
return user_or_id if user_or_id.is_a?(BrickFTP::API::User)
BrickFTP::API::User.new(id: user_or_id)
end
def instantize_group(group_or_id)
return group_or_id if group_or_id.is_a?(BrickFTP::API::Group)
BrickFTP::API::Group.new(id: group_or_id)
end
def instantize_permission(permission_or_id)
return permission_or_id if permission_or_id.is_a?(BrickFTP::API::Permission)
BrickFTP::API::Permission.new(id: permission_or_id)
end
def instantize_notification(notification_or_id)
return notification_or_id if notification_or_id.is_a?(BrickFTP::API::Notification)
BrickFTP::API::Notification.new(id: notification_or_id)
end
def instantize_bundle(bundle_or_id)
return bundle_or_id if bundle_or_id.is_a?(BrickFTP::API::Bundle)
BrickFTP::API::Bundle.new(id: bundle_or_id)
end
def instantize_behavior(behavior_or_id)
return behavior_or_id if behavior_or_id.is_a?(BrickFTP::API::Behavior)
BrickFTP::API::Behavior.new(id: behavior_or_id)
end
def instantize_file(file_or_path)
return file_or_path if file_or_path.is_a?(BrickFTP::API::File)
BrickFTP::API::File.new(path: file_or_path)
end
end
end
Update client documentation links
module BrickFTP
class Client
# Login and store authentication session.
# @see https://developers.brickftp.com/#authentication-with-a-session
# @param username [String] username of BrickFTP's user.
# @param password [String] password of BrickFTP's user.
def login(username, password)
BrickFTP::API::Authentication.login(username, password)
end
# Logout and discard authentication session.
# @see https://developers.brickftp.com/#authentication-with-a-session
def logout
BrickFTP::API::Authentication.logout
end
# List all users on the current site.
# @see https://developers.brickftp.com/#users
# @return [Array] array of BrickFTP::API::User
def list_users
BrickFTP::API::User.all
end
# Show a single user.
# @see https://developers.brickftp.com/#users
# @param id user id.
# @return [BrickFTP::API::User] user object.
def show_user(id)
BrickFTP::API::User.find(id)
end
# Create a new user on the current site.
# @see https://developers.brickftp.com/#users
# @param attributes [Hash] User's attributes.
def create_user(attributes)
BrickFTP::API::User.create(attributes)
end
# Update an existing user.
# @see https://developers.brickftp.com/#users
# @param user_or_id [BrickFTP::API::User, Integer] user object or user id.
# @param attributes [Hash] User's attributes.
# @return [BrickFTP::API::User] user object.
def update_user(user_or_id, attributes)
instantize_user(user_or_id).update(attributes)
end
# Delete a user.
# @see https://developers.brickftp.com/#users
# @param user_or_id [BrickFTP::API::User, Integer] user object or user id.
# @return [Boolean] return true.
def delete_user(user_or_id)
instantize_user(user_or_id).destroy
end
# List all groups on the current site.
# @see https://developers.brickftp.com/#groups
def list_groups
BrickFTP::API::Group.all
end
# Show a single group.
# @see https://developers.brickftp.com/#groups
# @param id group id.
# @return [BrickFTP::API::Group] group object.
def show_group(id)
BrickFTP::API::Group.find(id)
end
# Create a new group on the current site.
# @see https://developers.brickftp.com/#groups
# @param attributes [Hash] Group's attributes.
def create_group(attributes)
BrickFTP::API::Group.create(attributes)
end
# Update an existing group.
# @see https://developers.brickftp.com/#groups
# @param group_or_id [BrickFTP::API::Group, Integer] group object or group id.
# @param attributes [Hash] Group's attributes.
# @return [BrickFTP::API::Group] group object.
def update_group(group_or_id, attributes)
instantize_group(group_or_id).update(attributes)
end
# Delete a group.
# @see https://developers.brickftp.com/#groups
# @param group_or_id [BrickFTP::API::Group, Integer] group object or group id.
# @return [Boolean] return true.
def delete_group(group_or_id)
instantize_group(group_or_id).destroy
end
# List all permissions on the current site.
# @see https://developers.brickftp.com/#permissions
def list_permissions
BrickFTP::API::Permission.all
end
# Create a new permission on the current site.
# @see https://developers.brickftp.com/#permissions
# @param attributes [Hash] Permission's attributes.
def create_permission(attributes)
BrickFTP::API::Permission.create(attributes)
end
# Delete a permission.
# @see https://developers.brickftp.com/#permissions
# @param permission_or_id [BrickFTP::API::Permission, Integer] permission object or permission id.
# @return [Boolean] return true.
def delete_permission(permission_or_id)
instantize_permission(permission_or_id).destroy
end
# List all notifications on the current site.
# @see https://developers.brickftp.com/#notifications
def list_notifications
BrickFTP::API::Notification.all
end
# Create a new notification on the current site.
# @see https://developers.brickftp.com/#notifications
# @param attributes [Hash] Notification's attributes.
def create_notification(attributes)
BrickFTP::API::Notification.create(attributes)
end
# Delete a notification.
# @see https://developers.brickftp.com/#notifications
# @param notification_or_id [BrickFTP::API::Notification, Integer] notification object or notification id.
# @return [Boolean] return true.
def delete_notification(notification_or_id)
instantize_notification(notification_or_id).destroy
end
# Show the entire history for the current site.
# @see https://developers.brickftp.com/#history
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request. Default: 1000, maximum: 10000.
# Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Site`
def list_site_history(page: nil, per_page: nil, start_at: nil)
query = { page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Site.all(query)
end
# Show login history only.
# @see https://developers.brickftp.com/#history
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request. Default: 1000, maximum: 10000.
# Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Login`
def list_login_history(page: nil, per_page: nil, start_at: nil)
query = { page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Login.all(query)
end
# Show all history for a specific user.
# @see https://developers.brickftp.com/#history
# @param user_id [Integer] User ID.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::User`
def list_user_history(user_id:, page: nil, per_page: nil, start_at: nil)
query = { user_id: user_id, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::User.all(query)
end
# Show all history for a specific folder.
# @see https://developers.brickftp.com/#history
# @param path [String] path of folder.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::Folder`
def list_folder_history(path:, page: nil, per_page: nil, start_at: nil)
query = { path: path, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::Folder.all(query)
end
# Show all history for a specific file.
# @see https://developers.brickftp.com/#history
# @param path [String] path of file.
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Default: 1000, maximum: 10000. Leave blank for default (strongly recommended).
# @param start_at [String] Date and time in the history to start from.
# @return [Array] array of `BrickFTP::API::History::File`
def list_file_history(path:, page: nil, per_page: nil, start_at: nil)
query = { path: path, page: page, per_page: per_page, start_at: start_at }.reject { |_, v| v.nil? }
BrickFTP::API::History::File.all(query)
end
# List all bundles on the current site.
# @see https://developers.brickftp.com/#bundles
# @return [Array] array of BrickFTP::API::Bundle
def list_bundles
BrickFTP::API::Bundle.all
end
# Show a single bundle.
# @see https://developers.brickftp.com/#bundles
# @param id bundle id.
# @return [BrickFTP::API::Bundle] bundle object.
def show_bundle(id)
BrickFTP::API::Bundle.find(id)
end
# Create a new bundle on the current site.
# @see https://developers.brickftp.com/#bundles
# @param attributes [Hash] Bundle's attributes.
def create_bundle(attributes)
BrickFTP::API::Bundle.create(attributes)
end
# Delete a bundle.
# @see https://developers.brickftp.com/#bundles
# @param bundle_or_id [BrickFTP::API::Bundle, Integer] bundle object or bundle id.
# @return [Boolean] return true.
def delete_bundle(bundle_or_id)
instantize_bundle(bundle_or_id).destroy
end
# List the contents of a bundle.
# @see https://developers.brickftp.com/#bundles
# @param path [String]
# @param code [String]
# @param host [String]
# @return [Array] array of `BrickFTP::API::BundleContent`.
def list_bundle_contents(path: nil, code:, host:)
BrickFTP::API::BundleContent.all(path: path, code: code, host: host)
end
# Provides download URLs that will enable you to download the files in a bundle.
# @see https://developers.brickftp.com/#bundles
# @param code [String]
# @param host [String]
# @param paths [Array] array of path string.
# @return [Array] array of `BrickFTP::API::BundleDownload`.
def list_bundle_downloads(code:, host:, paths: [])
BrickFTP::API::BundleDownload.all(code: code, host: host, paths: paths)
end
# List all behaviors on the current site.
# @see https://developers.brickftp.com/#behaviors
# @return [Array] array of BrickFTP::API::Behavior
def list_behaviors
BrickFTP::API::Behavior.all
end
# Show a single behavior.
# @see https://developers.brickftp.com/#behaviors
# @param id behavior id.
# @return [BrickFTP::API::Behavior] behavior object.
def show_behavior(id)
BrickFTP::API::Behavior.find(id)
end
# Create a new behavior on the current site.
# @see https://developers.brickftp.com/#behaviors
# @param attributes [Hash] Behavior's attributes.
def create_behavior(attributes)
BrickFTP::API::Behavior.create(attributes)
end
# Update an existing behavior.
# @see https://developers.brickftp.com/#behaviors
# @param behavior_or_id [BrickFTP::API::Behavior, Integer] behavior object or behavior id.
# @param attributes [Hash] Behavior's attributes.
# @return [BrickFTP::API::Behavior] behavior object.
def update_behavior(behavior_or_id, attributes)
instantize_behavior(behavior_or_id).update(attributes)
end
# Delete a behavior.
# @see https://developers.brickftp.com/#behaviors
# @param behavior_or_id [BrickFTP::API::Behavior, Integer] behavior object or behavior id.
# @return [Boolean] return true.
def delete_behavior(behavior_or_id)
instantize_behavior(behavior_or_id).destroy
end
# shows the behaviors that apply to the given path.
# @see https://developers.brickftp.com/#behaviors
# @return [Array] array of BrickFTP::API::FolderBehavior
def list_folder_behaviors(path:)
BrickFTP::API::FolderBehavior.all(path: path)
end
# Lists the contents of the folder provided in the URL.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param path [String]
# @param page [Integer] Page number of items to return in this request.
# @param per_page [Integer] Requested number of items returned per request.
# Maximum: 5000, leave blank for default (strongly recommended).
# @param search [String] Only return items matching the given search text.
# @param sort_by_path [String] Sort by file name, and value is either asc or desc to indicate normal or reverse sort.
# (Note that sort_by[path] = asc is the default.)
# @param sort_by_size [String] Sort by file size, and value is either asc or desc to indicate smaller files
# first or larger files first, respectively.
# @param sort_by_modified_at_datetime [String] Sort by modification time, and value is either asc or desc to
# indicate older files first or newer files first, respectively.
# @return [Array] array of BrickFTP::API::Folder.
def list_folders(path:, page: nil, per_page: nil, search: nil, sort_by_path: nil, sort_by_size: nil, sort_by_modified_at_datetime: nil) # rubocop:disable Metrics/LineLength
query = { path: path, page: page, per_page: per_page, search: search }.reject { |_, v| v.nil? }
query[:'sort_by[path]'] = sort_by_path if sort_by_path
query[:'sort_by[size]'] = sort_by_size if sort_by_size
query[:'sort_by[modified_at_datetime]'] = sort_by_modified_at_datetime if sort_by_modified_at_datetime
BrickFTP::API::Folder.all(query)
end
# Create a folder.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param path [String]
# @return [BrickFTP::API::Folder]
def create_folder(path:)
BrickFTP::API::Folder.create(path: path)
end
# provides a download URL that will enable you to download a file.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param path [String] path for file.
# @param omit_download_uri [Boolean] If true, omit download_uri. (Add query `action=stat`)
# @return [BrickFTP::API::File] file object.
def show_file(path, omit_download_uri: false)
params = {}
params[:action] = 'stat' if omit_download_uri
BrickFTP::API::File.find(path, params: params)
end
# Move or renames a file or folder to the destination provided in the move_destination parameter.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param path [String]
# @param move_destination [String]
# @return [BrickFTP::API::FileMove]
def move_file(path:, move_destination:)
BrickFTP::API::FileOperation::Move.create(path: path, 'move-destination': move_destination)
end
# Copy a file or folder to the destination provided in the copy_destination parameter.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param path [String]
# @param copy_destination [String]
# @return [BrickFTP::API::FileCopy]
def copy_file(path:, copy_destination:)
BrickFTP::API::FileOperation::Copy.create(path: path, 'copy-destination': copy_destination)
end
# Delete a file.
# @see https://developers.brickftp.com/#file-and-folder-operations
# @param file_or_path [BrickFTP::API::File, String] file object or file(folder) path.
# @param recursive [Boolean]
# @return [Boolean] return true.
def delete_file(file_or_path, recursive: false)
instantize_file(file_or_path).destroy(recursive: recursive)
end
# Upload file.
# @see https://developers.brickftp.com/#file-uploading
# @param path [String]
# @param source [IO] source `data` (not `path`) to upload
# @param chunk_size [Integer] Size of chunk to multi-part upload.
# @return [BrickFTP::API::FileUpload]
def upload_file(path:, source:, chunk_size: nil)
BrickFTP::API::FileOperation::Upload.create(path: path, source: source, chunk_size: chunk_size)
end
# Get usage of site.
# @return [BrickFTP::API::SiteUsage]
def site_usage
BrickFTP::API::SiteUsage.find
end
private
def instantize_user(user_or_id)
return user_or_id if user_or_id.is_a?(BrickFTP::API::User)
BrickFTP::API::User.new(id: user_or_id)
end
def instantize_group(group_or_id)
return group_or_id if group_or_id.is_a?(BrickFTP::API::Group)
BrickFTP::API::Group.new(id: group_or_id)
end
def instantize_permission(permission_or_id)
return permission_or_id if permission_or_id.is_a?(BrickFTP::API::Permission)
BrickFTP::API::Permission.new(id: permission_or_id)
end
def instantize_notification(notification_or_id)
return notification_or_id if notification_or_id.is_a?(BrickFTP::API::Notification)
BrickFTP::API::Notification.new(id: notification_or_id)
end
def instantize_bundle(bundle_or_id)
return bundle_or_id if bundle_or_id.is_a?(BrickFTP::API::Bundle)
BrickFTP::API::Bundle.new(id: bundle_or_id)
end
def instantize_behavior(behavior_or_id)
return behavior_or_id if behavior_or_id.is_a?(BrickFTP::API::Behavior)
BrickFTP::API::Behavior.new(id: behavior_or_id)
end
def instantize_file(file_or_path)
return file_or_path if file_or_path.is_a?(BrickFTP::API::File)
BrickFTP::API::File.new(path: file_or_path)
end
end
end
|
class BroadwayNow::CLI
def call
greet
make_shows
add_attributes
list_shows
menu
goodbye
end
def greet
puts "Welcome to Broadway Now"
end
def make_shows
shows_array = BroadwayNow::Scraper.main_scraper
BroadwayNow::Show.create_shows(shows_array)
end
def add_attributes
BroadwayNow::Show.all.each do |show|
url = show.url
extra_info_hash = BroadwayNow::Scraper.additional_scraper(url)
show.add_info(extra_info_hash)
end
end
def list_shows
puts "-----Shows currently running:-----"
@shows = BroadwayNow::Show.all
@shows.each.with_index(1) do |show,i|
puts "#{i}. #{show.name}"
end
puts "----------------------------------"
end
def menu
input = nil
while input != "exit"
puts "Enter show number for more info, 'all' to see all shows, or 'exit' :"
input = gets.strip.downcase
if input.to_i > 0 && input.to_i < 21 #update if you add more shows
show = @shows[input.to_i-1]
puts "--------------------Details--------------------"
puts " Show: #{show.name}"
puts " Theater: #{show.theater}"
puts " Price: #{show.price}"
puts " Website: #{show.url}"
puts " Running Time: #{show.running_time}"
puts "\nStory: \n"
puts "#{show.story}"
puts "---------------------------------------------"
elsif input == "all"
list_shows
elsif input == "exit"
goodbye
else
puts "Oops! incorrect input! Enter show number for more info, 'all' to see all shows, or 'exit' :"
end
end
end
def goodbye
puts "Goodbye!"
exit 0
end
end
update cli
class BroadwayNow::CLI
def call
greet
make_shows
add_attributes
list_shows
menu
goodbye
end
def greet
puts "-----Welcome to Broadway Now!-----"
end
def make_shows
shows_array = BroadwayNow::Scraper.main_scraper
BroadwayNow::Show.create_shows(shows_array)
end
def add_attributes
BroadwayNow::Show.all.each do |show|
url = show.url
extra_info_hash = BroadwayNow::Scraper.additional_scraper(url)
show.add_info(extra_info_hash)
end
end
def list_shows
puts "-----Shows currently running:-----"
@shows = BroadwayNow::Show.all
@shows.each.with_index(1) do |show,i|
puts "#{i}. #{show.name}"
end
puts "----------------------------------"
end
def menu
input = nil
while input != "exit"
puts "Enter show number for more info, 'all' to see all shows, or 'exit' :"
input = gets.strip.downcase
if input.to_i > 0 && input.to_i < 21 #update if you add more shows
show = @shows[input.to_i-1]
puts "--------------------Details--------------------"
puts " Show: #{show.name}"
puts " Theater: #{show.theater}"
puts " Price: #{show.price}"
puts " Website: #{show.url}"
puts " Running Time: #{show.running_time}"
puts "\nStory: \n"
puts "#{show.story}"
puts "---------------------------------------------"
elsif input == "all"
list_shows
elsif input == "exit"
goodbye
else
puts "Oops! incorrect input! Enter show number for more info, 'all' to see all shows, or 'exit' :"
end
end
end
def goodbye
puts "Goodbye!"
exit 0
end
end |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require 'rbconfig'
require 'pathname'
autoload :Tempfile, 'tempfile'
autoload :YAML, 'yaml'
autoload :REXML, 'rexml/document'
gem 'xml-simple' ; autoload :XmlSimple, 'xmlsimple'
gem 'builder' ; autoload :Builder, 'builder' # A different kind of buildr, one we use to create XML.
require 'highline/import'
module Buildr
module Util
extend self
def java_platform?
RUBY_PLATFORM =~ /java/
end
# In order to determine if we are running on a windows OS,
# prefer this function instead of using Gem.win_platform?.
#
# Gem.win_platform? only checks these RUBY_PLATFORM global,
# that in some cases like when running on JRuby is not
# succifient for our purpose:
#
# For JRuby, the value for RUBY_PLATFORM will always be 'java'
# That's why this function checks on Config::CONFIG['host_os']
def win_os?
Config::CONFIG['host_os'] =~ /windows|cygwin|bccwin|cygwin|djgpp|mingw|mswin|wince/i
end
# Runs Ruby with these command line arguments. The last argument may be a hash,
# supporting the following keys:
# :command -- Runs the specified script (e.g., :command=>'gem')
# :sudo -- Run as sudo on operating systems that require it.
# :verbose -- Override Rake's verbose flag.
def ruby(*args)
options = Hash === args.last ? args.pop : {}
cmd = []
ruby_bin = File.expand_path(Config::CONFIG['ruby_install_name'], Config::CONFIG['bindir'])
if options.delete(:sudo) && !(win_os? || Process.uid == File.stat(ruby_bin).uid)
cmd << 'sudo' << '-u' << "##{File.stat(ruby_bin).uid}"
end
cmd << ruby_bin
cmd << '-S' << options.delete(:command) if options[:command]
cmd.concat args.flatten
cmd.push options
sh *cmd do |ok, status|
ok or fail "Command ruby failed with status (#{status ? status.exitstatus : 'unknown'}): [#{cmd.join(" ")}]"
end
end
# Just like File.expand_path, but for windows systems it
# capitalizes the drive name and ensures backslashes are used
def normalize_path(path, *dirs)
path = File.expand_path(path, *dirs)
if win_os?
path.gsub!('/', '\\').gsub!(/^[a-zA-Z]+:/) { |s| s.upcase }
else
path
end
end
# Return the timestamp of file, without having to create a file task
def timestamp(file)
if File.exist?(file)
File.mtime(file)
else
Rake::EARLY
end
end
# Return the path to the first argument, starting from the path provided by the
# second argument.
#
# For example:
# relative_path('foo/bar', 'foo')
# => 'bar'
# relative_path('foo/bar', 'baz')
# => '../foo/bar'
# relative_path('foo/bar')
# => 'foo/bar'
# relative_path('/foo/bar', 'baz')
# => '/foo/bar'
def relative_path(to, from = '.')
to = Pathname.new(to).cleanpath
return to.to_s if from.nil?
to_path = Pathname.new(File.expand_path(to.to_s, "/"))
from_path = Pathname.new(File.expand_path(from.to_s, "/"))
to_path.relative_path_from(from_path).to_s
end
# Generally speaking, it's not a good idea to operate on dot files (files starting with dot).
# These are considered invisible files (.svn, .hg, .irbrc, etc). Dir.glob/FileList ignore them
# on purpose. There are few cases where we do have to work with them (filter, zip), a better
# solution is welcome, maybe being more explicit with include. For now, this will do.
def recursive_with_dot_files(*dirs)
FileList[dirs.map { |dir| File.join(dir, '/**/{*,.*}') }].reject { |file| File.basename(file) =~ /^[.]{1,2}$/ }
end
# Utility methods for running gem commands
module Gems #:nodoc:
extend self
# Install gems specified by each Gem::Dependency if they are missing. This method prompts the user
# for permission before installing anything.
#
# Returns the installed Gem::Dependency objects or fails if permission not granted or when buildr
# is not running interactively (on a tty)
def install(*dependencies)
raise ArgumentError, "Expected at least one argument" if dependencies.empty?
remote = dependencies.map { |dep| Gem::SourceInfoCache.search(dep).last || dep }
not_found_deps, to_install = remote.partition { |gem| gem.is_a?(Gem::Dependency) }
fail Gem::LoadError, "Build requires the gems #{not_found_deps.join(', ')}, which cannot be found in local or remote repository." unless not_found_deps.empty?
uses = "This build requires the gems #{to_install.map(&:full_name).join(', ')}:"
fail Gem::LoadError, "#{uses} to install, run Buildr interactively." unless $stdout.isatty
unless agree("#{uses} do you want me to install them? [Y/n]", true)
fail Gem::LoadError, 'Cannot build without these gems.'
end
to_install.each do |spec|
say "Installing #{spec.full_name} ... " if verbose
command 'install', spec.name, '-v', spec.version.to_s, :verbose => false
Gem.source_index.load_gems_in Gem::SourceIndex.installed_spec_directories
end
to_install
end
# Execute a GemRunner command
def command(cmd, *args)
options = Hash === args.last ? args.pop : {}
gem_home = ENV['GEM_HOME'] || Gem.path.find { |f| File.writable?(f) }
options[:sudo] = :root unless Util.win_os? || gem_home
options[:command] = 'gem'
args << options
args.unshift '-i', gem_home if cmd == 'install' && gem_home && !args.any?{ |a| a[/-i|--install-dir/] }
Util.ruby cmd, *args
end
end # Gems
end # Util
end
class Object #:nodoc:
unless defined? instance_exec # 1.9
module InstanceExecMethods #:nodoc:
end
include InstanceExecMethods
# Evaluate the block with the given arguments within the context of
# this object, so self is set to the method receiver.
#
# From Mauricio's http://eigenclass.org/hiki/bounded+space+instance_exec
def instance_exec(*args, &block)
begin
old_critical, Thread.critical = Thread.critical, true
n = 0
n += 1 while respond_to?(method_name = "__instance_exec#{n}")
InstanceExecMethods.module_eval { define_method(method_name, &block) }
ensure
Thread.critical = old_critical
end
begin
send(method_name, *args)
ensure
InstanceExecMethods.module_eval { remove_method(method_name) } rescue nil
end
end
end
end
module Kernel #:nodoc:
unless defined? tap # 1.9
def tap
yield self if block_given?
self
end
end
end
class Symbol #:nodoc:
unless defined? to_proc # 1.9
# Borrowed from Ruby 1.9.
def to_proc
Proc.new{|*args| args.shift.__send__(self, *args)}
end
end
end
unless defined? BasicObject # 1.9
class BasicObject #:nodoc:
(instance_methods - ['__send__', '__id__', '==', 'send', 'send!', 'respond_to?', 'equal?', 'object_id']).
each do |method|
undef_method method
end
def self.ancestors
[Kernel]
end
end
end
class OpenObject < Hash
def initialize(source=nil, &block)
super &block
update source if source
end
def method_missing(symbol, *args)
if symbol.to_s =~ /=$/
self[symbol.to_s[0..-2].to_sym] = args.first
else
self[symbol]
end
end
end
class Hash
class << self
# :call-seq:
# Hash.from_java_properties(string)
#
# Returns a hash from a string in the Java properties file format. For example:
# str = 'foo=bar\nbaz=fab'
# Hash.from_properties(str)
# => { 'foo'=>'bar', 'baz'=>'fab' }.to_properties
def from_java_properties(string)
hash = {}
input_stream = Java.java.io.StringBufferInputStream.new(string)
java_properties = Java.java.util.Properties.new
java_properties.load input_stream
keys = java_properties.keySet.iterator
while keys.hasNext
# Calling key.next in JRuby returns a java.lang.String, behaving as a Ruby string and life is good.
# MRI, unfortunately, treats next() like the interface says returning an object that's not a String,
# and the Hash doesn't work the way we need it to. Unfortunately, we can call toString on MRI's object,
# but not on the JRuby one; calling to_s on the JRuby object returns what we need, but ... you guessed it.
# So this seems like the one hack to unite them both.
#key = Java.java.lang.String.valueOf(keys.next.to_s)
key = keys.next
key = key.toString unless String === key
hash[key] = java_properties.getProperty(key)
end
hash
end
end
# :call-seq:
# only(keys*) => hash
#
# Returns a new hash with only the specified keys.
#
# For example:
# { :a=>1, :b=>2, :c=>3, :d=>4 }.only(:a, :c)
# => { :a=>1, :c=>3 }
def only(*keys)
keys.inject({}) { |hash, key| has_key?(key) ? hash.merge(key=>self[key]) : hash }
end
# :call-seq:
# except(keys*) => hash
#
# Returns a new hash without the specified keys.
#
# For example:
# { :a=>1, :b=>2, :c=>3, :d=>4 }.except(:a, :c)
# => { :b=>2, :d=>4 }
def except(*keys)
(self.keys - keys).inject({}) { |hash, key| hash.merge(key=>self[key]) }
end
# :call-seq:
# to_java_properties => string
#
# Convert hash to string format used for Java properties file. For example:
# { 'foo'=>'bar', 'baz'=>'fab' }.to_properties
# => foo=bar
# baz=fab
def to_java_properties
keys.sort.map { |key|
value = self[key].gsub(/[\t\r\n\f\\]/) { |escape| "\\" + {"\t"=>"t", "\r"=>"r", "\n"=>"n", "\f"=>"f", "\\"=>"\\"}[escape] }
"#{key}=#{value}"
}.join("\n")
end
end
if Buildr::Util.java_platform?
require 'ffi'
# Fix for BUILDR-292.
# JRuby fails to rename a file on different devices
# this monkey-patch wont be needed when JRUBY-3381 gets resolved.
module FileUtils #:nodoc:
alias_method :__mv_native, :mv
def mv(from, to, options = nil)
dir_to = File.directory?(to) ? to : File.dirname(to)
Array(from).each do |from|
dir_from = File.dirname(from)
if File.stat(dir_from).dev != File.stat(dir_to).dev
cp from, to, options
rm from, options
else
__mv_native from, to, options
end
end
end
private :mv
end
module RakeFileUtils #:nodoc:
def rake_merge_option(args, defaults)
defaults[:verbose] = false if defaults[:verbose] == :default
if Hash === args.last
defaults.update(args.last)
args.pop
end
args.push defaults
args
end
private :rake_merge_option
end
module Buildr
class ProcessStatus
attr_reader :pid, :termsig, :stopsig, :exitstatus
def initialize(pid, success, exitstatus)
@pid = pid
@success = success
@exitstatus = exitstatus
@termsig = nil
@stopsig = nil
end
def &(num)
pid & num
end
def ==(other)
pid == other.pid
end
def >>(num)
pid >> num
end
def coredump?
false
end
def exited?
true
end
def stopped?
false
end
def success?
@success
end
def to_i
pid
end
def to_int
pid
end
def to_s
pid.to_s
end
end
end
module FileUtils
extend FFI::Library
alias_method :__jruby_system__, :system
attach_function :system, [:string], :int
alias_method :__native_system__, :system
alias_method :system, :__jruby_system__
# code "borrowed" directly from Rake
def sh(*cmd, &block)
options = (Hash === cmd.last) ? cmd.pop : {}
unless block_given?
show_command = cmd.join(" ")
show_command = show_command[0,42] + "..."
block = lambda { |ok, status|
ok or fail "Command failed with status (#{status.exitstatus}): [#{show_command}]"
}
end
if RakeFileUtils.verbose_flag == :default
options[:verbose] = false
else
options[:verbose] ||= RakeFileUtils.verbose_flag
end
options[:noop] ||= RakeFileUtils.nowrite_flag
rake_check_options options, :noop, :verbose
rake_output_message cmd.join(" ") if options[:verbose]
unless options[:noop]
cd = "cd '#{Dir.pwd}' && "
args = if cmd.size > 1 then cmd[1..cmd.size] else [] end
res = if Buildr::Util.win_os? && cmd.size == 1
__native_system__("#{cd} call #{cmd.first}")
else
arg_str = args.map { |a| "'#{a}'" }
__native_system__(cd + cmd.first + ' ' + arg_str.join(' '))
end
$? = Buildr::ProcessStatus.new(0, res == 0, res) # KLUDGE
block.call(res == 0, $?)
end
end
end
end
Added special monkey-patched FileUtils#sh on MRI to avoid spurious messages
git-svn-id: d8f3215415546ce936cf3b822120ca56e5ebeaa0@917050 13f79535-47bb-0310-9956-ffa450edef68
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require 'rbconfig'
require 'pathname'
autoload :Tempfile, 'tempfile'
autoload :YAML, 'yaml'
autoload :REXML, 'rexml/document'
gem 'xml-simple' ; autoload :XmlSimple, 'xmlsimple'
gem 'builder' ; autoload :Builder, 'builder' # A different kind of buildr, one we use to create XML.
require 'highline/import'
module Buildr
module Util
extend self
def java_platform?
RUBY_PLATFORM =~ /java/
end
# In order to determine if we are running on a windows OS,
# prefer this function instead of using Gem.win_platform?.
#
# Gem.win_platform? only checks these RUBY_PLATFORM global,
# that in some cases like when running on JRuby is not
# succifient for our purpose:
#
# For JRuby, the value for RUBY_PLATFORM will always be 'java'
# That's why this function checks on Config::CONFIG['host_os']
def win_os?
Config::CONFIG['host_os'] =~ /windows|cygwin|bccwin|cygwin|djgpp|mingw|mswin|wince/i
end
# Runs Ruby with these command line arguments. The last argument may be a hash,
# supporting the following keys:
# :command -- Runs the specified script (e.g., :command=>'gem')
# :sudo -- Run as sudo on operating systems that require it.
# :verbose -- Override Rake's verbose flag.
def ruby(*args)
options = Hash === args.last ? args.pop : {}
cmd = []
ruby_bin = File.expand_path(Config::CONFIG['ruby_install_name'], Config::CONFIG['bindir'])
if options.delete(:sudo) && !(win_os? || Process.uid == File.stat(ruby_bin).uid)
cmd << 'sudo' << '-u' << "##{File.stat(ruby_bin).uid}"
end
cmd << ruby_bin
cmd << '-S' << options.delete(:command) if options[:command]
cmd.concat args.flatten
cmd.push options
sh *cmd do |ok, status|
ok or fail "Command ruby failed with status (#{status ? status.exitstatus : 'unknown'}): [#{cmd.join(" ")}]"
end
end
# Just like File.expand_path, but for windows systems it
# capitalizes the drive name and ensures backslashes are used
def normalize_path(path, *dirs)
path = File.expand_path(path, *dirs)
if win_os?
path.gsub!('/', '\\').gsub!(/^[a-zA-Z]+:/) { |s| s.upcase }
else
path
end
end
# Return the timestamp of file, without having to create a file task
def timestamp(file)
if File.exist?(file)
File.mtime(file)
else
Rake::EARLY
end
end
# Return the path to the first argument, starting from the path provided by the
# second argument.
#
# For example:
# relative_path('foo/bar', 'foo')
# => 'bar'
# relative_path('foo/bar', 'baz')
# => '../foo/bar'
# relative_path('foo/bar')
# => 'foo/bar'
# relative_path('/foo/bar', 'baz')
# => '/foo/bar'
def relative_path(to, from = '.')
to = Pathname.new(to).cleanpath
return to.to_s if from.nil?
to_path = Pathname.new(File.expand_path(to.to_s, "/"))
from_path = Pathname.new(File.expand_path(from.to_s, "/"))
to_path.relative_path_from(from_path).to_s
end
# Generally speaking, it's not a good idea to operate on dot files (files starting with dot).
# These are considered invisible files (.svn, .hg, .irbrc, etc). Dir.glob/FileList ignore them
# on purpose. There are few cases where we do have to work with them (filter, zip), a better
# solution is welcome, maybe being more explicit with include. For now, this will do.
def recursive_with_dot_files(*dirs)
FileList[dirs.map { |dir| File.join(dir, '/**/{*,.*}') }].reject { |file| File.basename(file) =~ /^[.]{1,2}$/ }
end
# Utility methods for running gem commands
module Gems #:nodoc:
extend self
# Install gems specified by each Gem::Dependency if they are missing. This method prompts the user
# for permission before installing anything.
#
# Returns the installed Gem::Dependency objects or fails if permission not granted or when buildr
# is not running interactively (on a tty)
def install(*dependencies)
raise ArgumentError, "Expected at least one argument" if dependencies.empty?
remote = dependencies.map { |dep| Gem::SourceInfoCache.search(dep).last || dep }
not_found_deps, to_install = remote.partition { |gem| gem.is_a?(Gem::Dependency) }
fail Gem::LoadError, "Build requires the gems #{not_found_deps.join(', ')}, which cannot be found in local or remote repository." unless not_found_deps.empty?
uses = "This build requires the gems #{to_install.map(&:full_name).join(', ')}:"
fail Gem::LoadError, "#{uses} to install, run Buildr interactively." unless $stdout.isatty
unless agree("#{uses} do you want me to install them? [Y/n]", true)
fail Gem::LoadError, 'Cannot build without these gems.'
end
to_install.each do |spec|
say "Installing #{spec.full_name} ... " if verbose
command 'install', spec.name, '-v', spec.version.to_s, :verbose => false
Gem.source_index.load_gems_in Gem::SourceIndex.installed_spec_directories
end
to_install
end
# Execute a GemRunner command
def command(cmd, *args)
options = Hash === args.last ? args.pop : {}
gem_home = ENV['GEM_HOME'] || Gem.path.find { |f| File.writable?(f) }
options[:sudo] = :root unless Util.win_os? || gem_home
options[:command] = 'gem'
args << options
args.unshift '-i', gem_home if cmd == 'install' && gem_home && !args.any?{ |a| a[/-i|--install-dir/] }
Util.ruby cmd, *args
end
end # Gems
end # Util
end
class Object #:nodoc:
unless defined? instance_exec # 1.9
module InstanceExecMethods #:nodoc:
end
include InstanceExecMethods
# Evaluate the block with the given arguments within the context of
# this object, so self is set to the method receiver.
#
# From Mauricio's http://eigenclass.org/hiki/bounded+space+instance_exec
def instance_exec(*args, &block)
begin
old_critical, Thread.critical = Thread.critical, true
n = 0
n += 1 while respond_to?(method_name = "__instance_exec#{n}")
InstanceExecMethods.module_eval { define_method(method_name, &block) }
ensure
Thread.critical = old_critical
end
begin
send(method_name, *args)
ensure
InstanceExecMethods.module_eval { remove_method(method_name) } rescue nil
end
end
end
end
module Kernel #:nodoc:
unless defined? tap # 1.9
def tap
yield self if block_given?
self
end
end
end
class Symbol #:nodoc:
unless defined? to_proc # 1.9
# Borrowed from Ruby 1.9.
def to_proc
Proc.new{|*args| args.shift.__send__(self, *args)}
end
end
end
unless defined? BasicObject # 1.9
class BasicObject #:nodoc:
(instance_methods - ['__send__', '__id__', '==', 'send', 'send!', 'respond_to?', 'equal?', 'object_id']).
each do |method|
undef_method method
end
def self.ancestors
[Kernel]
end
end
end
class OpenObject < Hash
def initialize(source=nil, &block)
super &block
update source if source
end
def method_missing(symbol, *args)
if symbol.to_s =~ /=$/
self[symbol.to_s[0..-2].to_sym] = args.first
else
self[symbol]
end
end
end
class Hash
class << self
# :call-seq:
# Hash.from_java_properties(string)
#
# Returns a hash from a string in the Java properties file format. For example:
# str = 'foo=bar\nbaz=fab'
# Hash.from_properties(str)
# => { 'foo'=>'bar', 'baz'=>'fab' }.to_properties
def from_java_properties(string)
hash = {}
input_stream = Java.java.io.StringBufferInputStream.new(string)
java_properties = Java.java.util.Properties.new
java_properties.load input_stream
keys = java_properties.keySet.iterator
while keys.hasNext
# Calling key.next in JRuby returns a java.lang.String, behaving as a Ruby string and life is good.
# MRI, unfortunately, treats next() like the interface says returning an object that's not a String,
# and the Hash doesn't work the way we need it to. Unfortunately, we can call toString on MRI's object,
# but not on the JRuby one; calling to_s on the JRuby object returns what we need, but ... you guessed it.
# So this seems like the one hack to unite them both.
#key = Java.java.lang.String.valueOf(keys.next.to_s)
key = keys.next
key = key.toString unless String === key
hash[key] = java_properties.getProperty(key)
end
hash
end
end
# :call-seq:
# only(keys*) => hash
#
# Returns a new hash with only the specified keys.
#
# For example:
# { :a=>1, :b=>2, :c=>3, :d=>4 }.only(:a, :c)
# => { :a=>1, :c=>3 }
def only(*keys)
keys.inject({}) { |hash, key| has_key?(key) ? hash.merge(key=>self[key]) : hash }
end
# :call-seq:
# except(keys*) => hash
#
# Returns a new hash without the specified keys.
#
# For example:
# { :a=>1, :b=>2, :c=>3, :d=>4 }.except(:a, :c)
# => { :b=>2, :d=>4 }
def except(*keys)
(self.keys - keys).inject({}) { |hash, key| hash.merge(key=>self[key]) }
end
# :call-seq:
# to_java_properties => string
#
# Convert hash to string format used for Java properties file. For example:
# { 'foo'=>'bar', 'baz'=>'fab' }.to_properties
# => foo=bar
# baz=fab
def to_java_properties
keys.sort.map { |key|
value = self[key].gsub(/[\t\r\n\f\\]/) { |escape| "\\" + {"\t"=>"t", "\r"=>"r", "\n"=>"n", "\f"=>"f", "\\"=>"\\"}[escape] }
"#{key}=#{value}"
}.join("\n")
end
end
if Buildr::Util.java_platform?
require 'ffi'
# Fix for BUILDR-292.
# JRuby fails to rename a file on different devices
# this monkey-patch wont be needed when JRUBY-3381 gets resolved.
module FileUtils #:nodoc:
alias_method :__mv_native, :mv
def mv(from, to, options = nil)
dir_to = File.directory?(to) ? to : File.dirname(to)
Array(from).each do |from|
dir_from = File.dirname(from)
if File.stat(dir_from).dev != File.stat(dir_to).dev
cp from, to, options
rm from, options
else
__mv_native from, to, options
end
end
end
private :mv
end
module RakeFileUtils #:nodoc:
def rake_merge_option(args, defaults)
defaults[:verbose] = false if defaults[:verbose] == :default
if Hash === args.last
defaults.update(args.last)
args.pop
end
args.push defaults
args
end
private :rake_merge_option
end
module Buildr
class ProcessStatus
attr_reader :pid, :termsig, :stopsig, :exitstatus
def initialize(pid, success, exitstatus)
@pid = pid
@success = success
@exitstatus = exitstatus
@termsig = nil
@stopsig = nil
end
def &(num)
pid & num
end
def ==(other)
pid == other.pid
end
def >>(num)
pid >> num
end
def coredump?
false
end
def exited?
true
end
def stopped?
false
end
def success?
@success
end
def to_i
pid
end
def to_int
pid
end
def to_s
pid.to_s
end
end
end
module FileUtils
extend FFI::Library
alias_method :__jruby_system__, :system
attach_function :system, [:string], :int
alias_method :__native_system__, :system
alias_method :system, :__jruby_system__
# code "borrowed" directly from Rake
def sh(*cmd, &block)
options = (Hash === cmd.last) ? cmd.pop : {}
unless block_given?
show_command = cmd.join(" ")
show_command = show_command[0,42] + "..."
block = lambda { |ok, status|
ok or fail "Command failed with status (#{status.exitstatus}): [#{show_command}]"
}
end
if RakeFileUtils.verbose_flag == :default
options[:verbose] = false
else
options[:verbose] ||= RakeFileUtils.verbose_flag
end
options[:noop] ||= RakeFileUtils.nowrite_flag
rake_check_options options, :noop, :verbose
rake_output_message cmd.join(" ") if options[:verbose]
unless options[:noop]
cd = "cd '#{Dir.pwd}' && "
args = if cmd.size > 1 then cmd[1..cmd.size] else [] end
res = if Buildr::Util.win_os? && cmd.size == 1
__native_system__("#{cd} call #{cmd.first}")
else
arg_str = args.map { |a| "'#{a}'" }
__native_system__(cd + cmd.first + ' ' + arg_str.join(' '))
end
$? = Buildr::ProcessStatus.new(0, res == 0, res) # KLUDGE
block.call(res == 0, $?)
end
end
end
else
module FileUtils
# code "borrowed" directly from Rake
def sh(*cmd, &block)
options = (Hash === cmd.last) ? cmd.pop : {}
unless block_given?
show_command = cmd.join(" ")
show_command = show_command[0,42] + "..."
block = lambda { |ok, status|
ok or fail "Command failed with status (#{status.exitstatus}): [#{show_command}]"
}
end
if RakeFileUtils.verbose_flag == :default
options[:verbose] = false
else
options[:verbose] ||= RakeFileUtils.verbose_flag
end
options[:noop] ||= RakeFileUtils.nowrite_flag
rake_check_options options, :noop, :verbose
rake_output_message cmd.join(" ") if options[:verbose]
unless options[:noop]
cd = "cd '#{Dir.pwd}' && "
args = if cmd.size > 1 then cmd[1..cmd.size] else [] end
res = if Buildr::Util.win_os? && cmd.size == 1
system("#{cd} call #{cmd.first}")
else
arg_str = args.map { |a| "'#{a}'" }
system(cd + cmd.first + ' ' + arg_str.join(' '))
end
block.call(res, $?)
end
end
end
end
|
# frozen_string_literal: true
module Bundler
class Injector
def self.inject(new_deps, options = {})
injector = new(new_deps, options)
injector.inject(Bundler.default_gemfile, Bundler.default_lockfile)
end
def initialize(new_deps, options = {})
@new_deps = new_deps
@options = options
end
def inject(gemfile_path, lockfile_path)
if Bundler.frozen_bundle?
# ensure the lock and Gemfile are synced
Bundler.definition.ensure_equivalent_gemfile_and_lockfile(true)
end
# temporarily unfreeze
Bundler.settings.temporary(:deployment => false, :frozen => false) do
# evaluate the Gemfile we have now
builder = Dsl.new
builder.eval_gemfile(gemfile_path)
# don't inject any gems that are already in the Gemfile
@new_deps -= builder.dependencies
# add new deps to the end of the in-memory Gemfile
# Set conservative versioning to false because we want to let the resolver resolve the version first
builder.eval_gemfile("injected gems", build_gem_lines(false)) if @new_deps.any?
# resolve to see if the new deps broke anything
@definition = builder.to_definition(lockfile_path, {})
@definition.resolve_remotely!
# since nothing broke, we can add those gems to the gemfile
append_to(gemfile_path, build_gem_lines(@options[:conservative_versioning])) if @new_deps.any?
# since we resolved successfully, write out the lockfile
@definition.lock(Bundler.default_lockfile)
# invalidate the cached Bundler.definition
Bundler.reset_paths!
# return an array of the deps that we added
@new_deps
end
end
private
def conservative_version(spec)
version = spec.version
return ">= 0" if version.nil?
segments = version.segments
seg_end_index = version >= Gem::Version.new("1.0") ? 1 : 2
prerelease_suffix = version.to_s.gsub(version.release.to_s, "") if version.prerelease?
"~> #{segments[0..seg_end_index].join(".")}#{prerelease_suffix}"
end
def build_gem_lines(conservative_versioning)
@new_deps.map do |d|
name = d.name.dump
requirement = if conservative_versioning
", \"#{conservative_version(@definition.specs[d.name][0])}\""
else
", #{d.requirement.as_list.map(&:dump).join(", ")}"
end
if d.groups != Array(:default)
group = d.groups.size == 1 ? ", :group => #{d.groups.inspect}" : ", :groups => #{d.groups.inspect}"
end
source = ", :source => \"#{d.source}\"" unless d.source.nil?
%(gem #{name}#{requirement}#{group}#{source})
end.join("\n")
end
def append_to(gemfile_path, new_gem_lines)
gemfile_path.open("a") do |f|
f.puts
if @options["timestamp"] || @options["timestamp"].nil?
f.puts "# Added at #{Time.now} by #{`whoami`.chomp}:"
end
f.puts new_gem_lines
end
end
end
end
Auto merge of #6300 - cpgo:remove-add-timestamp-comment, r=colby-swandale
Remove comment with timestamp on `bundle add`
As discussed on #6193 this PR simply removes the comment timestamp when adding a gem via command line.
# frozen_string_literal: true
module Bundler
class Injector
def self.inject(new_deps, options = {})
injector = new(new_deps, options)
injector.inject(Bundler.default_gemfile, Bundler.default_lockfile)
end
def initialize(new_deps, options = {})
@new_deps = new_deps
@options = options
end
def inject(gemfile_path, lockfile_path)
if Bundler.frozen_bundle?
# ensure the lock and Gemfile are synced
Bundler.definition.ensure_equivalent_gemfile_and_lockfile(true)
end
# temporarily unfreeze
Bundler.settings.temporary(:deployment => false, :frozen => false) do
# evaluate the Gemfile we have now
builder = Dsl.new
builder.eval_gemfile(gemfile_path)
# don't inject any gems that are already in the Gemfile
@new_deps -= builder.dependencies
# add new deps to the end of the in-memory Gemfile
# Set conservative versioning to false because we want to let the resolver resolve the version first
builder.eval_gemfile("injected gems", build_gem_lines(false)) if @new_deps.any?
# resolve to see if the new deps broke anything
@definition = builder.to_definition(lockfile_path, {})
@definition.resolve_remotely!
# since nothing broke, we can add those gems to the gemfile
append_to(gemfile_path, build_gem_lines(@options[:conservative_versioning])) if @new_deps.any?
# since we resolved successfully, write out the lockfile
@definition.lock(Bundler.default_lockfile)
# invalidate the cached Bundler.definition
Bundler.reset_paths!
# return an array of the deps that we added
@new_deps
end
end
private
def conservative_version(spec)
version = spec.version
return ">= 0" if version.nil?
segments = version.segments
seg_end_index = version >= Gem::Version.new("1.0") ? 1 : 2
prerelease_suffix = version.to_s.gsub(version.release.to_s, "") if version.prerelease?
"~> #{segments[0..seg_end_index].join(".")}#{prerelease_suffix}"
end
def build_gem_lines(conservative_versioning)
@new_deps.map do |d|
name = d.name.dump
requirement = if conservative_versioning
", \"#{conservative_version(@definition.specs[d.name][0])}\""
else
", #{d.requirement.as_list.map(&:dump).join(", ")}"
end
if d.groups != Array(:default)
group = d.groups.size == 1 ? ", :group => #{d.groups.inspect}" : ", :groups => #{d.groups.inspect}"
end
source = ", :source => \"#{d.source}\"" unless d.source.nil?
%(gem #{name}#{requirement}#{group}#{source})
end.join("\n")
end
def append_to(gemfile_path, new_gem_lines)
gemfile_path.open("a") do |f|
f.puts
f.puts new_gem_lines
end
end
end
end
|
module Bundler
class Resolver
require "bundler/vendored_molinillo"
class Molinillo::VersionConflict
def message
conflicts.values.flatten.reduce("") do |o, conflict|
o << %(Bundler could not find compatible versions for gem "#{conflict.requirement.name}":\n)
if conflict.locked_requirement
o << %( In snapshot (#{Bundler.default_lockfile.basename}):\n)
o << %( #{conflict.locked_requirement}\n)
o << %(\n)
end
o << %( In Gemfile:\n)
o << conflict.requirement_trees.map do |tree|
t = ""
depth = 2
tree.each do |req|
t << " " * depth << req.to_s
t << %( depends on) unless tree.last == req
t << %(\n)
depth += 1
end
t
end.join("\n")
if conflict.requirement.name == "bundler"
o << %(\n Current Bundler version:\n bundler (#{Bundler::VERSION}))
other_bundler_required = !conflict.requirement.requirement.satisfied_by?(Gem::Version.new Bundler::VERSION)
end
if conflict.requirement.name == "bundler" && other_bundler_required
o << "\n"
o << "This Gemfile requires a different version of Bundler.\n"
o << "Perhaps you need to update Bundler by running `gem install bundler`?\n"
end
if conflict.locked_requirement
o << "\n"
o << %(Running `bundle update` will rebuild your snapshot from scratch, using only\n)
o << %(the gems in your Gemfile, which may resolve the conflict.\n)
elsif !conflict.existing
o << "\n"
if conflict.requirement_trees.first.size > 1
o << "Could not find gem '#{conflict.requirement}', which is required by "
o << "gem '#{conflict.requirement_trees.first[-2]}', in any of the sources."
else
o << "Could not find gem '#{conflict.requirement}' in any of the sources\n"
end
end
o
end
end
end
ALL = Bundler::Dependency::PLATFORM_MAP.values.uniq.freeze
class SpecGroup < Array
include GemHelpers
attr_reader :activated, :required_by
def initialize(a)
super
@required_by = []
@activated = []
@dependencies = nil
@specs = {}
ALL.each do |p|
@specs[p] = reverse.find {|s| s.match_platform(p) }
end
end
def initialize_copy(o)
super
@required_by = o.required_by.dup
@activated = o.activated.dup
end
def to_specs
specs = {}
@activated.each do |p|
if s = @specs[p]
platform = generic(Gem::Platform.new(s.platform))
next if specs[platform]
lazy_spec = LazySpecification.new(name, version, platform, source)
lazy_spec.dependencies.replace s.dependencies
specs[platform] = lazy_spec
end
end
specs.values
end
def activate_platform(platform)
unless @activated.include?(platform)
if for?(platform)
@activated << platform
return __dependencies[platform] || []
end
end
[]
end
def name
@name ||= first.name
end
def version
@version ||= first.version
end
def source
@source ||= first.source
end
def for?(platform)
@specs[platform]
end
def to_s
"#{name} (#{version})"
end
def dependencies_for_activated_platforms
@activated.map {|p| __dependencies[p] }.flatten
end
def platforms_for_dependency_named(dependency)
__dependencies.select {|_, deps| deps.map(&:name).include? dependency }.keys
end
private
def __dependencies
@dependencies ||= begin
dependencies = {}
ALL.each do |p|
if spec = @specs[p]
dependencies[p] = []
spec.dependencies.each do |dep|
next if dep.type == :development
dependencies[p] << DepProxy.new(dep, p)
end
end
end
dependencies
end
end
end
# Figures out the best possible configuration of gems that satisfies
# the list of passed dependencies and any child dependencies without
# causing any gem activation errors.
#
# ==== Parameters
# *dependencies<Gem::Dependency>:: The list of dependencies to resolve
#
# ==== Returns
# <GemBundle>,nil:: If the list of dependencies can be resolved, a
# collection of gemspecs is returned. Otherwise, nil is returned.
def self.resolve(requirements, index, source_requirements = {}, base = [])
base = SpecSet.new(base) unless base.is_a?(SpecSet)
resolver = new(index, source_requirements, base)
result = resolver.start(requirements)
SpecSet.new(result)
end
def initialize(index, source_requirements, base)
@index = index
@source_requirements = source_requirements
@base = base
@resolver = Molinillo::Resolver.new(self, self)
@search_for = {}
@base_dg = Molinillo::DependencyGraph.new
@base.each {|ls| @base_dg.add_root_vertex ls.name, Dependency.new(ls.name, ls.version) }
end
def start(requirements)
verify_gemfile_dependencies_are_found!(requirements)
dg = @resolver.resolve(requirements, @base_dg)
dg.map(&:payload).map(&:to_specs).flatten
rescue Molinillo::VersionConflict => e
raise VersionConflict.new(e.conflicts.keys.uniq, e.message)
rescue Molinillo::CircularDependencyError => e
names = e.dependencies.sort_by(&:name).map {|d| "gem '#{d.name}'" }
raise CyclicDependencyError, "Your bundle requires gems that depend" \
" on each other, creating an infinite loop. Please remove" \
" #{names.count > 1 ? "either " : "" }#{names.join(" or ")}" \
" and try again."
end
include Molinillo::UI
# Conveys debug information to the user.
#
# @param [Integer] depth the current depth of the resolution process.
# @return [void]
def debug(depth = 0)
if debug?
debug_info = yield
debug_info = debug_info.inspect unless debug_info.is_a?(String)
STDERR.puts debug_info.split("\n").map {|s| " " * depth + s }
end
end
def debug?
ENV["DEBUG_RESOLVER"] || ENV["DEBUG_RESOLVER_TREE"]
end
def before_resolution
Bundler.ui.info "Resolving dependencies...", false
end
def after_resolution
Bundler.ui.info ""
end
def indicate_progress
Bundler.ui.info ".", false
end
private
include Molinillo::SpecificationProvider
def dependencies_for(specification)
specification.dependencies_for_activated_platforms
end
def search_for(dependency)
platform = dependency.__platform
dependency = dependency.dep unless dependency.is_a? Gem::Dependency
search = @search_for[dependency] ||= begin
index = @source_requirements[dependency.name] || @index
results = index.search(dependency, @base[dependency.name])
if vertex = @base_dg.vertex_named(dependency.name)
locked_requirement = vertex.payload.requirement
end
if results.any?
version = results.first.version
nested = [[]]
results.each do |spec|
if spec.version != version
nested << []
version = spec.version
end
nested.last << spec
end
groups = nested.map {|a| SpecGroup.new(a) }
!locked_requirement ? groups : groups.select {|sg| locked_requirement.satisfied_by? sg.version }
else
[]
end
end
search.select {|sg| sg.for?(platform) }.each {|sg| sg.activate_platform(platform) }
end
def name_for(dependency)
dependency.name
end
def name_for_explicit_dependency_source
Bundler.default_gemfile.basename.to_s rescue "Gemfile"
end
def name_for_locking_dependency_source
Bundler.default_lockfile.basename.to_s rescue "Gemfile.lock"
end
def requirement_satisfied_by?(requirement, activated, spec)
requirement.matches_spec?(spec)
end
def sort_dependencies(dependencies, activated, conflicts)
dependencies.sort_by do |dependency|
name = name_for(dependency)
[
activated.vertex_named(name).payload ? 0 : 1,
amount_constrained(dependency),
conflicts[name] ? 0 : 1,
activated.vertex_named(name).payload ? 0 : search_for(dependency).count,
]
end
end
def amount_constrained(dependency)
@amount_constrained ||= {}
@amount_constrained[dependency.name] ||= begin
if base = @base[dependency.name] and !base.empty?
dependency.requirement.satisfied_by?(base.first.version) ? 0 : 1
else
base_dep = Dependency.new dependency.name, ">= 0.a"
all = search_for(DepProxy.new base_dep, dependency.__platform).size.to_f
if all.zero?
0
elsif (search = search_for(dependency).size.to_f) == all && all == 1
0
else
search / all
end
end
end
end
def verify_gemfile_dependencies_are_found!(requirements)
requirements.each do |requirement|
next if requirement.name == "bundler"
if search_for(requirement).empty?
if base = @base[requirement.name] and !base.empty?
version = base.first.version
message = "You have requested:\n" \
" #{requirement.name} #{requirement.requirement}\n\n" \
"The bundle currently has #{requirement.name} locked at #{version}.\n" \
"Try running `bundle update #{requirement.name}`"
elsif requirement.source
name = requirement.name
versions = @source_requirements[name][name].map(&:version)
message = "Could not find gem '#{requirement}' in #{requirement.source}.\n"
if versions.any?
message << "Source contains '#{name}' at: #{versions.join(", ")}"
else
message << "Source does not contain any versions of '#{requirement}'"
end
else
message = "Could not find gem '#{requirement}' in any of the gem sources " \
"listed in your Gemfile or available on this machine."
end
raise GemNotFound, message
end
end
end
end
end
[Resolver] No need to grab the name off of the conflicting requirement
module Bundler
class Resolver
require "bundler/vendored_molinillo"
class Molinillo::VersionConflict
def message
conflicts.reduce("") do |o, (name, conflict)|
o << %(Bundler could not find compatible versions for gem "#{name}":\n)
if conflict.locked_requirement
o << %( In snapshot (#{Bundler.default_lockfile.basename}):\n)
o << %( #{conflict.locked_requirement}\n)
o << %(\n)
end
o << %( In Gemfile:\n)
o << conflict.requirement_trees.map do |tree|
t = ""
depth = 2
tree.each do |req|
t << " " * depth << req.to_s
t << %( depends on) unless tree.last == req
t << %(\n)
depth += 1
end
t
end.join("\n")
if name == "bundler"
o << %(\n Current Bundler version:\n bundler (#{Bundler::VERSION}))
other_bundler_required = !conflict.requirement.requirement.satisfied_by?(Gem::Version.new Bundler::VERSION)
end
if name == "bundler" && other_bundler_required
o << "\n"
o << "This Gemfile requires a different version of Bundler.\n"
o << "Perhaps you need to update Bundler by running `gem install bundler`?\n"
end
if conflict.locked_requirement
o << "\n"
o << %(Running `bundle update` will rebuild your snapshot from scratch, using only\n)
o << %(the gems in your Gemfile, which may resolve the conflict.\n)
elsif !conflict.existing
o << "\n"
if conflict.requirement_trees.first.size > 1
o << "Could not find gem '#{conflict.requirement}', which is required by "
o << "gem '#{conflict.requirement_trees.first[-2]}', in any of the sources."
else
o << "Could not find gem '#{conflict.requirement}' in any of the sources\n"
end
end
o
end
end
end
ALL = Bundler::Dependency::PLATFORM_MAP.values.uniq.freeze
class SpecGroup < Array
include GemHelpers
attr_reader :activated, :required_by
def initialize(a)
super
@required_by = []
@activated = []
@dependencies = nil
@specs = {}
ALL.each do |p|
@specs[p] = reverse.find {|s| s.match_platform(p) }
end
end
def initialize_copy(o)
super
@required_by = o.required_by.dup
@activated = o.activated.dup
end
def to_specs
specs = {}
@activated.each do |p|
if s = @specs[p]
platform = generic(Gem::Platform.new(s.platform))
next if specs[platform]
lazy_spec = LazySpecification.new(name, version, platform, source)
lazy_spec.dependencies.replace s.dependencies
specs[platform] = lazy_spec
end
end
specs.values
end
def activate_platform(platform)
unless @activated.include?(platform)
if for?(platform)
@activated << platform
return __dependencies[platform] || []
end
end
[]
end
def name
@name ||= first.name
end
def version
@version ||= first.version
end
def source
@source ||= first.source
end
def for?(platform)
@specs[platform]
end
def to_s
"#{name} (#{version})"
end
def dependencies_for_activated_platforms
@activated.map {|p| __dependencies[p] }.flatten
end
def platforms_for_dependency_named(dependency)
__dependencies.select {|_, deps| deps.map(&:name).include? dependency }.keys
end
private
def __dependencies
@dependencies ||= begin
dependencies = {}
ALL.each do |p|
if spec = @specs[p]
dependencies[p] = []
spec.dependencies.each do |dep|
next if dep.type == :development
dependencies[p] << DepProxy.new(dep, p)
end
end
end
dependencies
end
end
end
# Figures out the best possible configuration of gems that satisfies
# the list of passed dependencies and any child dependencies without
# causing any gem activation errors.
#
# ==== Parameters
# *dependencies<Gem::Dependency>:: The list of dependencies to resolve
#
# ==== Returns
# <GemBundle>,nil:: If the list of dependencies can be resolved, a
# collection of gemspecs is returned. Otherwise, nil is returned.
def self.resolve(requirements, index, source_requirements = {}, base = [])
base = SpecSet.new(base) unless base.is_a?(SpecSet)
resolver = new(index, source_requirements, base)
result = resolver.start(requirements)
SpecSet.new(result)
end
def initialize(index, source_requirements, base)
@index = index
@source_requirements = source_requirements
@base = base
@resolver = Molinillo::Resolver.new(self, self)
@search_for = {}
@base_dg = Molinillo::DependencyGraph.new
@base.each {|ls| @base_dg.add_root_vertex ls.name, Dependency.new(ls.name, ls.version) }
end
def start(requirements)
verify_gemfile_dependencies_are_found!(requirements)
dg = @resolver.resolve(requirements, @base_dg)
dg.map(&:payload).map(&:to_specs).flatten
rescue Molinillo::VersionConflict => e
raise VersionConflict.new(e.conflicts.keys.uniq, e.message)
rescue Molinillo::CircularDependencyError => e
names = e.dependencies.sort_by(&:name).map {|d| "gem '#{d.name}'" }
raise CyclicDependencyError, "Your bundle requires gems that depend" \
" on each other, creating an infinite loop. Please remove" \
" #{names.count > 1 ? "either " : "" }#{names.join(" or ")}" \
" and try again."
end
include Molinillo::UI
# Conveys debug information to the user.
#
# @param [Integer] depth the current depth of the resolution process.
# @return [void]
def debug(depth = 0)
if debug?
debug_info = yield
debug_info = debug_info.inspect unless debug_info.is_a?(String)
STDERR.puts debug_info.split("\n").map {|s| " " * depth + s }
end
end
def debug?
ENV["DEBUG_RESOLVER"] || ENV["DEBUG_RESOLVER_TREE"]
end
def before_resolution
Bundler.ui.info "Resolving dependencies...", false
end
def after_resolution
Bundler.ui.info ""
end
def indicate_progress
Bundler.ui.info ".", false
end
private
include Molinillo::SpecificationProvider
def dependencies_for(specification)
specification.dependencies_for_activated_platforms
end
def search_for(dependency)
platform = dependency.__platform
dependency = dependency.dep unless dependency.is_a? Gem::Dependency
search = @search_for[dependency] ||= begin
index = @source_requirements[dependency.name] || @index
results = index.search(dependency, @base[dependency.name])
if vertex = @base_dg.vertex_named(dependency.name)
locked_requirement = vertex.payload.requirement
end
if results.any?
version = results.first.version
nested = [[]]
results.each do |spec|
if spec.version != version
nested << []
version = spec.version
end
nested.last << spec
end
groups = nested.map {|a| SpecGroup.new(a) }
!locked_requirement ? groups : groups.select {|sg| locked_requirement.satisfied_by? sg.version }
else
[]
end
end
search.select {|sg| sg.for?(platform) }.each {|sg| sg.activate_platform(platform) }
end
def name_for(dependency)
dependency.name
end
def name_for_explicit_dependency_source
Bundler.default_gemfile.basename.to_s rescue "Gemfile"
end
def name_for_locking_dependency_source
Bundler.default_lockfile.basename.to_s rescue "Gemfile.lock"
end
def requirement_satisfied_by?(requirement, activated, spec)
requirement.matches_spec?(spec)
end
def sort_dependencies(dependencies, activated, conflicts)
dependencies.sort_by do |dependency|
name = name_for(dependency)
[
activated.vertex_named(name).payload ? 0 : 1,
amount_constrained(dependency),
conflicts[name] ? 0 : 1,
activated.vertex_named(name).payload ? 0 : search_for(dependency).count,
]
end
end
def amount_constrained(dependency)
@amount_constrained ||= {}
@amount_constrained[dependency.name] ||= begin
if base = @base[dependency.name] and !base.empty?
dependency.requirement.satisfied_by?(base.first.version) ? 0 : 1
else
base_dep = Dependency.new dependency.name, ">= 0.a"
all = search_for(DepProxy.new base_dep, dependency.__platform).size.to_f
if all.zero?
0
elsif (search = search_for(dependency).size.to_f) == all && all == 1
0
else
search / all
end
end
end
end
def verify_gemfile_dependencies_are_found!(requirements)
requirements.each do |requirement|
next if requirement.name == "bundler"
if search_for(requirement).empty?
if base = @base[requirement.name] and !base.empty?
version = base.first.version
message = "You have requested:\n" \
" #{requirement.name} #{requirement.requirement}\n\n" \
"The bundle currently has #{requirement.name} locked at #{version}.\n" \
"Try running `bundle update #{requirement.name}`"
elsif requirement.source
name = requirement.name
versions = @source_requirements[name][name].map(&:version)
message = "Could not find gem '#{requirement}' in #{requirement.source}.\n"
if versions.any?
message << "Source contains '#{name}' at: #{versions.join(", ")}"
else
message << "Source does not contain any versions of '#{requirement}'"
end
else
message = "Could not find gem '#{requirement}' in any of the gem sources " \
"listed in your Gemfile or available on this machine."
end
raise GemNotFound, message
end
end
end
end
end
|
module Bundleup
VERSION = "0.7.0".freeze
end
Preparing v0.8.0
module Bundleup
VERSION = "0.8.0".freeze
end
|
class ClassificationWorker
include Sidekiq::Worker
sidekiq_options queue: :high, retry: 3
def perform(id, action)
classification = ClassificationLifecycle.new(Classification.find(id))
case action
when :update
classification.transact!
when :create
classification.transact! { create_project_preference }
end
end
end
retry classification processing for as long as we can
class ClassificationWorker
include Sidekiq::Worker
sidekiq_options queue: :high
def perform(id, action)
classification = ClassificationLifecycle.new(Classification.find(id))
case action
when :update
classification.transact!
when :create
classification.transact! { create_project_preference }
end
end
end
|
module CaChing
module Version
MAJOR = 0
MINOR = 1
PATCH = 0
PRE = nil
def self.to_s
[MAJOR, MINOR, PATCH, PRE].compact.join('.')
end
end
end
Bump version number
module CaChing
module Version
MAJOR = 0
MINOR = 1
PATCH = 1
PRE = nil
def self.to_s
[MAJOR, MINOR, PATCH, PRE].compact.join('.')
end
end
end
|
module Calvin
class AST
class Range
include Enumerable
attr_accessor :first, :second, :last
def initialize(*numbers)
if numbers.size == 2
@first, @last = numbers
elsif numbers.size == 3
@first, @second, @last = numbers
else
raise Core::ImpossibleException.new "Only 2 or 3 parameters are allowed. You passed in #{numbers.size}."
end
end
def each
if @second.nil?
method = @first > @last ? :downto : :upto
@first.send(method, @last) do |i|
yield i
end
else
@first.step(@last, @second - @first) do |i|
yield i
end
end
end
def size
1 + ((last - first) / step).abs
end
def step
second ? second - first : (first > last ? -1 : 1)
end
def ==(other)
if @second.nil?
other.respond_to?(:first) && other.first == @first &&
other.respond_to?(:last) && other.last == @last
else
case other
when Array
to_a == other
when AST::Range
other.first == first && other.step == step &&
other.last == last
else
false
end
end
end
def inspect
to_a.inspect
end
end
end
end
Major refactor of AST::Range.
module Calvin
class AST
class Range
include Enumerable
attr_reader :first, :last, :step, :size
def initialize(*numbers)
if numbers.size == 2
@first, @last = numbers
@step = @first < @last ? 1 : -1
@size = 1 + (@last - @first).abs
elsif numbers.size == 3
@first, second, @last = numbers
@step = second - @first
@size = 1 + (@last - @first) / @step
@last = @first + @step * (@size - 1)
else
raise Core::ImpossibleException.new "Only 2 or 3 parameters are allowed. You passed in #{numbers.size}."
end
end
def each
@first.step(@last, @step) do |i|
yield i
end
end
def ==(other)
case other
when AST::Range
first == other.first && step == other.step &&
last == other.last
when ::Range
(step == 1 || step == -1) && first == other.first &&
last == other.last
when Array
to_a == other
else
false
end
end
def inspect
to_a.inspect
end
end
end
end
|
module Cans
class Application < Sinatra::Base
set :views, File.dirname(__FILE__) + '/views'
get '/' do
@constants = Object.constants
@modules = @constants.map{ |c| Object.const_get c}.select{ |c| c.kind_of? Module}.sort_by(&:name)
haml :index
end
get '/module/*' do
@address = Address.new(params[:splat].first)
@module = @address.target_module
@local_instance_methods = @module.instance_methods false
@all_instance_methods = @module.instance_methods true
@super_instance_methods = @all_instance_methods - @local_instance_methods
@class_methods = @module.methods
@ancestors = @module.ancestors
@child_modules = @module.constants.map{ |c| @module.const_get c}.select{ |c| c.kind_of? Module}.sort_by(&:name)
haml :module
end
get '/method/*' do
@address = Address.new(params[:splat].first)
@module = @address.target_module
@method = @address.target_method
haml :method
end
helpers do
def link(destination, content)
prefix = request.env['rack.mount.prefix'] || ''
href = prefix + destination
"<a href='#{href}'>#{content}</a>"
end
end
end
end
indent
module Cans
class Application < Sinatra::Base
set :views, File.dirname(__FILE__) + '/views'
get '/' do
@constants = Object.constants
@modules = @constants.map{ |c| Object.const_get c}.select{ |c| c.kind_of? Module}.sort_by(&:name)
haml :index
end
get '/module/*' do
@address = Address.new(params[:splat].first)
@module = @address.target_module
@local_instance_methods = @module.instance_methods false
@all_instance_methods = @module.instance_methods true
@super_instance_methods = @all_instance_methods - @local_instance_methods
@class_methods = @module.methods
@ancestors = @module.ancestors
@child_modules = @module.constants.map{ |c| @module.const_get c}.select{ |c| c.kind_of? Module}.sort_by(&:name)
haml :module
end
get '/method/*' do
@address = Address.new(params[:splat].first)
@module = @address.target_module
@method = @address.target_method
haml :method
end
helpers do
def link(destination, content)
prefix = request.env['rack.mount.prefix'] || ''
href = prefix + destination
"<a href='#{href}'>#{content}</a>"
end
end
end
end
|
require "capistrano-rbenv/version"
require "capistrano/configuration"
require "capistrano/recipes/deploy/scm"
module Capistrano
module RbEnv
def self.extended(configuration)
configuration.load {
namespace(:rbenv) {
_cset(:rbenv_path) {
capture("echo $HOME/.rbenv").chomp()
}
_cset(:rbenv_bin) {
File.join(rbenv_path, 'bin', 'rbenv')
}
_cset(:rbenv_cmd) { # to use custom rbenv_path, we use `env` instead of cap's default_environment.
"env RBENV_VERSION=#{rbenv_ruby_version.dump} #{rbenv_bin}"
}
_cset(:rbenv_repository, 'git://github.com/sstephenson/rbenv.git')
_cset(:rbenv_branch, 'master')
_cset(:rbenv_plugins) {{
"ruby-build" => { :repository => "git://github.com/sstephenson/ruby-build.git", :branch => "master" },
}}
_cset(:rbenv_plugins_options, {}) # for backward compatibility. plugin options can be configured from :rbenv_plugins.
_cset(:rbenv_plugins_path) {
File.join(rbenv_path, 'plugins')
}
_cset(:rbenv_ruby_version, "1.9.3-p327")
_cset(:rbenv_use_bundler, true)
set(:bundle_cmd) { # override bundle_cmd in "bundler/capistrano"
rbenv_use_bundler ? "#{rbenv_cmd} exec bundle" : 'bundle'
}
_cset(:rbenv_install_dependencies, true)
desc("Setup rbenv.")
task(:setup, :except => { :no_release => true }) {
dependencies if rbenv_install_dependencies
update
configure
build
setup_bundler if rbenv_use_bundler
}
after 'deploy:setup', 'rbenv:setup'
def rbenv_update_repository(destination, options={})
configuration = Capistrano::Configuration.new()
options = {
:source => proc { Capistrano::Deploy::SCM.new(configuration[:scm], configuration) },
:revision => proc { configuration[:source].head },
:real_revision => proc {
configuration[:source].local.query_revision(configuration[:revision]) { |cmd| with_env("LC_ALL", "C") { run_locally(cmd) } }
},
}.merge(options)
variables.merge(options).each do |key, val|
configuration.set(key, val)
end
source = configuration[:source]
revision = configuration[:real_revision]
#
# we cannot use source.sync since it cleans up untacked files in the repository.
# currently we are just calling git sub-commands directly to avoid the problems.
#
verbose = configuration[:scm_verbose] ? nil : "-q"
run((<<-EOS).gsub(/\s+/, ' ').strip)
if [ -d #{destination} ]; then
cd #{destination} &&
#{source.command} fetch #{verbose} #{source.origin} &&
#{source.command} fetch --tags #{verbose} #{source.origin} &&
#{source.command} reset #{verbose} --hard #{revision};
else
#{source.checkout(revision, destination)};
fi
EOS
end
desc("Update rbenv installation.")
task(:update, :except => { :no_release => true }) {
rbenv_update_repository(rbenv_path, :scm => :git, :repository => rbenv_repository, :branch => rbenv_branch)
plugins.update
}
_cset(:rbenv_define_default_environment, true)
def setup_default_environment
if rbenv_define_default_environment
env = fetch(:default_environment)
env['RBENV_ROOT'] = rbenv_path
env['PATH'] = "#{rbenv_path}/shims:#{rbenv_path}/bin:#{env['PATH'] || '$PATH'}"
end
end
on :start do
if top.namespaces.key?(:multistage)
after "multistage:ensure" do
setup_default_environment
end
else
setup_default_environment
end
end
desc("Purge rbenv.")
task(:purge, :except => { :no_release => true }) {
run("rm -rf #{rbenv_path}")
}
namespace(:plugins) {
desc("Update rbenv plugins.")
task(:update, :except => { :no_release => true }) {
rbenv_plugins.each do |name, repository|
# for backward compatibility, obtain plugin options from :rbenv_plugins_options first
options = rbenv_plugins_options.fetch(name, {})
options = options.merge(Hash === repository ? repository : {:repository => repository})
rbenv_update_repository(File.join(rbenv_plugins_path, name), options.merge(:scm => :git))
end
}
}
_cset(:rbenv_configure_home) { capture("echo $HOME").chomp }
_cset(:rbenv_configure_shell) { capture("echo $SHELL").chomp }
_cset(:rbenv_configure_files) {
if fetch(:rbenv_configure_basenames, nil)
[ rbenv_configure_basenames ].flatten.map { |basename|
File.join(rbenv_configure_home, basename)
}
else
bash_profile = File.join(rbenv_configure_home, '.bash_profile')
profile = File.join(rbenv_configure_home, '.profile')
case File.basename(rbenv_configure_shell)
when /bash/
[ capture("test -f #{profile.dump} && echo #{profile.dump} || echo #{bash_profile.dump}").chomp ]
when /zsh/
[ File.join(rbenv_configure_home, '.zshenv') ]
else # other sh compatible shell such like dash
[ profile ]
end
end
}
_cset(:rbenv_configure_script) {
(<<-EOS).gsub(/^\s*/, '')
# Configured by capistrano-rbenv. Do not edit directly.
export PATH="#{rbenv_path}/bin:$PATH"
eval "$(rbenv init -)"
EOS
}
_cset(:rbenv_configure_signature, '##rbenv:configure')
task(:configure, :except => { :no_release => true }) {
if fetch(:rbenv_use_configure, true)
script = File.join('/tmp', "rbenv.#{$$}")
config = [ rbenv_configure_files ].flatten
config_map = Hash[ config.map { |f| [f, File.join('/tmp', "#{File.basename(f)}.#{$$}")] } ]
begin
execute = []
put(rbenv_configure_script, script)
config_map.each { |file, temp|
## (1) copy original config to temporaly file and then modify
execute << "( test -f #{file} || touch #{file} )"
execute << "cp -fp #{file} #{temp}"
execute << "sed -i -e '/^#{Regexp.escape(rbenv_configure_signature)}/,/^#{Regexp.escape(rbenv_configure_signature)}/d' #{temp}"
execute << "echo #{rbenv_configure_signature.dump} >> #{temp}"
execute << "cat #{script} >> #{temp}"
execute << "echo #{rbenv_configure_signature.dump} >> #{temp}"
## (2) update config only if it is needed
execute << "cp -fp #{file} #{file}.orig"
execute << "( diff -u #{file} #{temp} || mv -f #{temp} #{file} )"
}
run(execute.join(' && '))
ensure
remove = [ script ] + config_map.values
run("rm -f #{remove.join(' ')}") rescue nil
end
end
}
_cset(:rbenv_platform) {
capture((<<-EOS).gsub(/\s+/, ' ')).strip
if test -f /etc/debian_version; then
if test -f /etc/lsb-release && grep -i -q DISTRIB_ID=Ubuntu /etc/lsb-release; then
echo ubuntu;
else
echo debian;
fi;
elif test -f /etc/redhat-release; then
echo redhat;
else
echo unknown;
fi;
EOS
}
_cset(:rbenv_ruby_dependencies) {
case rbenv_platform
when /(debian|ubuntu)/i
%w(git-core build-essential libreadline6-dev zlib1g-dev libssl-dev bison)
when /redhat/i
%w(git-core autoconf glibc-devel patch readline readline-devel zlib zlib-devel openssl bison)
else
[]
end
}
task(:dependencies, :except => { :no_release => true }) {
unless rbenv_ruby_dependencies.empty?
case rbenv_platform
when /(debian|ubuntu)/i
begin
run("dpkg-query -s #{rbenv_ruby_dependencies.join(' ')} > /dev/null")
rescue
run("#{sudo} apt-get install -q -y #{rbenv_ruby_dependencies.join(' ')}")
end
when /redhat/i
begin
run("rpm -qi #{rbenv_ruby_dependencies.join(' ')} > /dev/null")
rescue
run("#{sudo} yum install -q -y #{rbenv_ruby_dependencies.join(' ')}")
end
else
# nop
end
end
}
desc("Build ruby within rbenv.")
task(:build, :except => { :no_release => true }) {
ruby = fetch(:rbenv_ruby_cmd, 'ruby')
if rbenv_ruby_version != 'system'
run("#{rbenv_bin} whence #{ruby} | fgrep -q #{rbenv_ruby_version} || #{rbenv_bin} install #{rbenv_ruby_version}")
end
run("#{rbenv_cmd} exec #{ruby} --version && #{rbenv_cmd} global #{rbenv_ruby_version}")
}
_cset(:rbenv_bundler_gem, 'bundler')
task(:setup_bundler, :except => { :no_release => true }) {
gem = "#{rbenv_cmd} exec gem"
if v = fetch(:rbenv_bundler_version, nil)
q = "-n #{rbenv_bundler_gem} -v #{v}"
f = "fgrep #{rbenv_bundler_gem} | fgrep #{v}"
i = "-v #{v} #{rbenv_bundler_gem}"
else
q = "-n #{rbenv_bundler_gem}"
f = "fgrep #{rbenv_bundler_gem}"
i = "#{rbenv_bundler_gem}"
end
run("unset -v GEM_HOME; #{gem} query #{q} 2>/dev/null | #{f} || #{gem} install -q #{i}")
run("#{rbenv_cmd} rehash && #{bundle_cmd} version")
}
}
}
end
end
end
if Capistrano::Configuration.instance
Capistrano::Configuration.instance.extend(Capistrano::RbEnv)
end
# vim:set ft=ruby ts=2 sw=2 :
add `:rbenv_bin_path` and `:rbenv_shims_path` to manage paths for rbenv executables.
require "capistrano-rbenv/version"
require "capistrano/configuration"
require "capistrano/recipes/deploy/scm"
module Capistrano
module RbEnv
def self.extended(configuration)
configuration.load {
namespace(:rbenv) {
_cset(:rbenv_root, "$HOME/.rbenv")
_cset(:rbenv_path) {
# expand to actual path to use this value since rbenv may be executed by users other than `:user`.
capture("echo #{rbenv_root.dump}").strip
}
_cset(:rbenv_bin_path) { File.join(rbenv_path, "bin") }
_cset(:rbenv_shims_path) { File.join(rbenv_path, "shims") }
_cset(:rbenv_bin) {
File.join(rbenv_bin_path, "rbenv")
}
_cset(:rbenv_cmd) {
"env RBENV_VERSION=#{rbenv_ruby_version.dump} #{rbenv_bin}"
}
_cset(:rbenv_repository, 'git://github.com/sstephenson/rbenv.git')
_cset(:rbenv_branch, 'master')
_cset(:rbenv_plugins) {{
"ruby-build" => { :repository => "git://github.com/sstephenson/ruby-build.git", :branch => "master" },
}}
_cset(:rbenv_plugins_options, {}) # for backward compatibility. plugin options can be configured from :rbenv_plugins.
_cset(:rbenv_plugins_path) {
File.join(rbenv_path, 'plugins')
}
_cset(:rbenv_ruby_version, "1.9.3-p327")
_cset(:rbenv_use_bundler, true)
set(:bundle_cmd) { # override bundle_cmd in "bundler/capistrano"
rbenv_use_bundler ? "#{rbenv_cmd} exec bundle" : 'bundle'
}
_cset(:rbenv_install_dependencies, true)
desc("Setup rbenv.")
task(:setup, :except => { :no_release => true }) {
dependencies if rbenv_install_dependencies
update
configure
build
setup_bundler if rbenv_use_bundler
}
after 'deploy:setup', 'rbenv:setup'
def rbenv_update_repository(destination, options={})
configuration = Capistrano::Configuration.new()
options = {
:source => proc { Capistrano::Deploy::SCM.new(configuration[:scm], configuration) },
:revision => proc { configuration[:source].head },
:real_revision => proc {
configuration[:source].local.query_revision(configuration[:revision]) { |cmd| with_env("LC_ALL", "C") { run_locally(cmd) } }
},
}.merge(options)
variables.merge(options).each do |key, val|
configuration.set(key, val)
end
source = configuration[:source]
revision = configuration[:real_revision]
#
# we cannot use source.sync since it cleans up untacked files in the repository.
# currently we are just calling git sub-commands directly to avoid the problems.
#
verbose = configuration[:scm_verbose] ? nil : "-q"
run((<<-EOS).gsub(/\s+/, ' ').strip)
if [ -d #{destination} ]; then
cd #{destination} &&
#{source.command} fetch #{verbose} #{source.origin} &&
#{source.command} fetch --tags #{verbose} #{source.origin} &&
#{source.command} reset #{verbose} --hard #{revision};
else
#{source.checkout(revision, destination)};
fi
EOS
end
desc("Update rbenv installation.")
task(:update, :except => { :no_release => true }) {
rbenv_update_repository(rbenv_path, :scm => :git, :repository => rbenv_repository, :branch => rbenv_branch)
plugins.update
}
def setup_default_environment
env = fetch(:default_environment, {}).dup
env["RBENV_ROOT"] = rbenv_path
env["PATH"] = [ rbenv_shims_path, rbenv_bin_path, env.fetch("PATH", "$PATH") ].join(":")
set(:default_environment, env)
end
on :start do
if fetch(:rbenv_define_default_environment, true)
# workaround for `multistage` of capistrano-ext.
# https://github.com/yyuu/capistrano-rbenv/pull/5
if top.namespaces.key?(:multistage)
after "multistage:ensure" do
setup_default_environment
end
else
setup_default_environment
end
end
end
desc("Purge rbenv.")
task(:purge, :except => { :no_release => true }) {
run("rm -rf #{rbenv_path.dump}")
}
namespace(:plugins) {
desc("Update rbenv plugins.")
task(:update, :except => { :no_release => true }) {
rbenv_plugins.each do |name, repository|
# for backward compatibility, obtain plugin options from :rbenv_plugins_options first
options = rbenv_plugins_options.fetch(name, {})
options = options.merge(Hash === repository ? repository : {:repository => repository})
rbenv_update_repository(File.join(rbenv_plugins_path, name), options.merge(:scm => :git))
end
}
}
_cset(:rbenv_configure_home) { capture("echo $HOME").chomp }
_cset(:rbenv_configure_shell) { capture("echo $SHELL").chomp }
_cset(:rbenv_configure_files) {
if fetch(:rbenv_configure_basenames, nil)
[ rbenv_configure_basenames ].flatten.map { |basename|
File.join(rbenv_configure_home, basename)
}
else
bash_profile = File.join(rbenv_configure_home, '.bash_profile')
profile = File.join(rbenv_configure_home, '.profile')
case File.basename(rbenv_configure_shell)
when /bash/
[ capture("test -f #{profile.dump} && echo #{profile.dump} || echo #{bash_profile.dump}").chomp ]
when /zsh/
[ File.join(rbenv_configure_home, '.zshenv') ]
else # other sh compatible shell such like dash
[ profile ]
end
end
}
_cset(:rbenv_configure_script) {
(<<-EOS).gsub(/^\s*/, '')
# Configured by capistrano-rbenv. Do not edit directly.
export PATH=#{[ rbenv_bin_path, "$PATH"].join(":").dump}
eval "$(rbenv init -)"
EOS
}
_cset(:rbenv_configure_signature, '##rbenv:configure')
task(:configure, :except => { :no_release => true }) {
if fetch(:rbenv_use_configure, true)
script = File.join('/tmp', "rbenv.#{$$}")
config = [ rbenv_configure_files ].flatten
config_map = Hash[ config.map { |f| [f, File.join('/tmp', "#{File.basename(f)}.#{$$}")] } ]
begin
execute = []
put(rbenv_configure_script, script)
config_map.each { |file, temp|
## (1) copy original config to temporaly file and then modify
execute << "( test -f #{file} || touch #{file} )"
execute << "cp -fp #{file} #{temp}"
execute << "sed -i -e '/^#{Regexp.escape(rbenv_configure_signature)}/,/^#{Regexp.escape(rbenv_configure_signature)}/d' #{temp}"
execute << "echo #{rbenv_configure_signature.dump} >> #{temp}"
execute << "cat #{script} >> #{temp}"
execute << "echo #{rbenv_configure_signature.dump} >> #{temp}"
## (2) update config only if it is needed
execute << "cp -fp #{file} #{file}.orig"
execute << "( diff -u #{file} #{temp} || mv -f #{temp} #{file} )"
}
run(execute.join(' && '))
ensure
remove = [ script ] + config_map.values
run("rm -f #{remove.join(' ')}") rescue nil
end
end
}
_cset(:rbenv_platform) {
capture((<<-EOS).gsub(/\s+/, ' ')).strip
if test -f /etc/debian_version; then
if test -f /etc/lsb-release && grep -i -q DISTRIB_ID=Ubuntu /etc/lsb-release; then
echo ubuntu;
else
echo debian;
fi;
elif test -f /etc/redhat-release; then
echo redhat;
else
echo unknown;
fi;
EOS
}
_cset(:rbenv_ruby_dependencies) {
case rbenv_platform
when /(debian|ubuntu)/i
%w(git-core build-essential libreadline6-dev zlib1g-dev libssl-dev bison)
when /redhat/i
%w(git-core autoconf glibc-devel patch readline readline-devel zlib zlib-devel openssl bison)
else
[]
end
}
task(:dependencies, :except => { :no_release => true }) {
unless rbenv_ruby_dependencies.empty?
case rbenv_platform
when /(debian|ubuntu)/i
begin
run("dpkg-query -s #{rbenv_ruby_dependencies.join(' ')} > /dev/null")
rescue
run("#{sudo} apt-get install -q -y #{rbenv_ruby_dependencies.join(' ')}")
end
when /redhat/i
begin
run("rpm -qi #{rbenv_ruby_dependencies.join(' ')} > /dev/null")
rescue
run("#{sudo} yum install -q -y #{rbenv_ruby_dependencies.join(' ')}")
end
else
# nop
end
end
}
desc("Build ruby within rbenv.")
task(:build, :except => { :no_release => true }) {
ruby = fetch(:rbenv_ruby_cmd, 'ruby')
if rbenv_ruby_version != 'system'
run("#{rbenv_bin} whence #{ruby} | fgrep -q #{rbenv_ruby_version} || #{rbenv_bin} install #{rbenv_ruby_version}")
end
run("#{rbenv_cmd} exec #{ruby} --version && #{rbenv_cmd} global #{rbenv_ruby_version}")
}
_cset(:rbenv_bundler_gem, 'bundler')
task(:setup_bundler, :except => { :no_release => true }) {
gem = "#{rbenv_cmd} exec gem"
if v = fetch(:rbenv_bundler_version, nil)
q = "-n #{rbenv_bundler_gem} -v #{v}"
f = "fgrep #{rbenv_bundler_gem} | fgrep #{v}"
i = "-v #{v} #{rbenv_bundler_gem}"
else
q = "-n #{rbenv_bundler_gem}"
f = "fgrep #{rbenv_bundler_gem}"
i = "#{rbenv_bundler_gem}"
end
run("unset -v GEM_HOME; #{gem} query #{q} 2>/dev/null | #{f} || #{gem} install -q #{i}")
run("#{rbenv_cmd} rehash && #{bundle_cmd} version")
}
}
}
end
end
end
if Capistrano::Configuration.instance
Capistrano::Configuration.instance.extend(Capistrano::RbEnv)
end
# vim:set ft=ruby ts=2 sw=2 :
|
#
# Author:: Xabier de Zuazo (<xabier@onddo.com>)
# Copyright:: Copyright (c) 2013 Onddo Labs, SL.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/handler'
require 'chef/resource/directory'
require 'right_aws'
class Chef
class Handler
class Sns < ::Chef::Handler
attr_writer :access_key, :secret_key, :region, :token, :topic_arn
def initialize(config={})
Chef::Log.debug("#{self.class.to_s} initialized.")
@access_key = config[:access_key]
@secret_key = config[:secret_key]
@region = config[:region] if config.has_key?(:region)
@token = config[:token] if config.has_key?(:token)
@topic_arn = config[:topic_arn]
end
def report
check_config
Chef::Log.debug("#{self.class.to_s} reporting.")
sns.publish(@topic_arn, sns_body, sns_subject)
end
protected
def check_config
Chef::Log.debug("#{self.class.to_s} checking handler configuration.")
raise "access_key not properly set" unless @access_key.kind_of?(String)
raise "secret_key not properly set" unless @secret_key.kind_of?(String)
raise "region not properly set" unless @region.kind_of?(String) or @region.nil?
raise "topic_arn not properly set" unless @topic_arn.kind_of?(String)
raise "token not properly set" unless @token.kind_of?(String) or @token.nil?
end
def sns
params = {
:logger => Chef::Log,
:region => @region || node.ec2.placement_availability_zone.chop
}
params[:token] = @token if @token
@sns ||= RightAws::SnsInterface.new(@access_key, @secret_key, params)
end
def sns_subject
chef_client = Chef::Config[:solo] ? 'Chef Solo' : 'Chef Client'
status = run_status.success? ? 'success' : 'failure'
"#{chef_client} #{status} in #{node.name}"
end
def sns_body
message = ''
message << "Node Name: #{node.name}\n"
message << "Hostname: #{node.fqdn}\n"
message << "\n"
message << "Chef Run List: #{node.run_list.to_s}\n"
message << "Chef Environment: #{node.chef_environment}\n"
message << "\n"
if node.attribute?('ec2')
message << "Instance Id: #{node.ec2.instance_id}\n"
message << "Instance Public Hostname: #{node.ec2.public_hostname}\n"
message << "Instance Hostname: #{node.ec2.hostname}\n"
message << "Instance Public IPv4: #{node.ec2.public_ipv4}\n"
message << "Instance Local IPv4: #{node.ec2.local_ipv4}\n"
end
message << "\n"
message << "Chef Client Elapsed Time: #{elapsed_time.to_s}\n"
message << "Chef Client Start Time: #{start_time.to_s}\n"
message << "Chef Client Start Time: #{end_time.to_s}\n"
message << "\n"
if exception
message << "Exception: #{run_status.formatted_exception}\n"
message << "Stacktrace:\n"
message << Array(backtrace).join("\n")
message << "\n"
end
end
end
end
end
@sns lazy object configuration and creation inside begin/end
#
# Author:: Xabier de Zuazo (<xabier@onddo.com>)
# Copyright:: Copyright (c) 2013 Onddo Labs, SL.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/handler'
require 'chef/resource/directory'
require 'right_aws'
class Chef
class Handler
class Sns < ::Chef::Handler
attr_writer :access_key, :secret_key, :region, :token, :topic_arn
def initialize(config={})
Chef::Log.debug("#{self.class.to_s} initialized.")
@access_key = config[:access_key]
@secret_key = config[:secret_key]
@region = config[:region] if config.has_key?(:region)
@token = config[:token] if config.has_key?(:token)
@topic_arn = config[:topic_arn]
end
def report
check_config
Chef::Log.debug("#{self.class.to_s} reporting.")
sns.publish(@topic_arn, sns_body, sns_subject)
end
protected
def check_config
Chef::Log.debug("#{self.class.to_s} checking handler configuration.")
raise "access_key not properly set" unless @access_key.kind_of?(String)
raise "secret_key not properly set" unless @secret_key.kind_of?(String)
raise "region not properly set" unless @region.kind_of?(String) or @region.nil?
raise "topic_arn not properly set" unless @topic_arn.kind_of?(String)
raise "token not properly set" unless @token.kind_of?(String) or @token.nil?
end
def sns
@sns ||= begin
params = {
:logger => Chef::Log,
:region => @region || node.ec2.placement_availability_zone.chop
}
params[:token] = @token if @token
RightAws::SnsInterface.new(@access_key, @secret_key, params)
end
end
def sns_subject
chef_client = Chef::Config[:solo] ? 'Chef Solo' : 'Chef Client'
status = run_status.success? ? 'success' : 'failure'
"#{chef_client} #{status} in #{node.name}"
end
def sns_body
message = ''
message << "Node Name: #{node.name}\n"
message << "Hostname: #{node.fqdn}\n"
message << "\n"
message << "Chef Run List: #{node.run_list.to_s}\n"
message << "Chef Environment: #{node.chef_environment}\n"
message << "\n"
if node.attribute?('ec2')
message << "Instance Id: #{node.ec2.instance_id}\n"
message << "Instance Public Hostname: #{node.ec2.public_hostname}\n"
message << "Instance Hostname: #{node.ec2.hostname}\n"
message << "Instance Public IPv4: #{node.ec2.public_ipv4}\n"
message << "Instance Local IPv4: #{node.ec2.local_ipv4}\n"
end
message << "\n"
message << "Chef Client Elapsed Time: #{elapsed_time.to_s}\n"
message << "Chef Client Start Time: #{start_time.to_s}\n"
message << "Chef Client Start Time: #{end_time.to_s}\n"
message << "\n"
if exception
message << "Exception: #{run_status.formatted_exception}\n"
message << "Stacktrace:\n"
message << Array(backtrace).join("\n")
message << "\n"
end
end
end
end
end
|
module Nucleon
module Plugin
class Base < Core
# All Plugin classes should directly or indirectly extend Base
def initialize(namespace, plugin_type, provider, options)
config = Util::Data.clean(Config.ensure(options), false)
name = Util::Data.ensure_value(config.delete(:plugin_name), config.delete(:name, provider))
@quiet = config.delete(:quiet, false)
set_meta(config.delete(:meta, Config.new))
# No logging statements aove this line!!
super(config.import({ :logger => "#{namespace}->#{plugin_type}->#{plugin_provider}" }), {}, true, false)
myself.plugin_name = name
ObjectSpace.define_finalizer(self, self.class.finalize(namespace, plugin_type, plugin_instance_name))
logger.debug("Normalizing #{namespace} #{plugin_type} plugin #{plugin_name}")
normalize(false)
@initialized = true
end
#---
def self.finalize(namespace, plugin_type, plugin_instance_name)
proc do
Nucleon.remove_plugin_by_name(namespace, plugin_type, plugin_instance_name)
end
end
#---
def method_missing(method, *args, &block)
return nil
end
#---
def remove_plugin
# Implement in sub classes if needed for cleanup
end
#-----------------------------------------------------------------------------
# Checks
def quiet?
@quiet
end
#-----------------------------------------------------------------------------
# Property accessor / modifiers
def myself
Nucleon.handle(self)
end
alias_method :me, :myself
#---
def quiet=quiet
@quiet = quiet
end
#---
def meta
return @meta
end
#---
def set_meta(meta)
@meta = Config.ensure(meta)
end
#---
def plugin_namespace
return meta.get(:namespace)
end
#---
def plugin_type
return meta.get(:type)
end
#---
def plugin_provider
return meta.get(:provider)
end
#---
def plugin_name
return meta.get(:name)
end
def plugin_name=plugin_name
meta.set(:name, string(plugin_name))
end
#---
def plugin_directory
return meta.get(:directory)
end
#---
def plugin_file
return meta.get(:file)
end
#---
def plugin_instance_name
return meta.get(:instance_name)
end
#---
def plugin_parent=parent
meta.set(:parent, parent) if parent.is_a?(Nucleon::Plugin::Base)
end
def plugin_parent
return meta.get(:parent)
end
#-----------------------------------------------------------------------------
# Status codes
def code
Nucleon.code
end
def codes(*codes)
Nucleon.codes(*codes)
end
#---
def status=status
meta.set(:status, status)
end
def status
meta.get(:status, code.unknown_status)
end
#-----------------------------------------------------------------------------
# Plugin operations
def normalize(reload)
# Implement in sub classes
end
#-----------------------------------------------------------------------------
# Extensions
def hook_method(hook)
"#{plugin_type}_#{plugin_provider}_#{hook}"
end
#---
def extension(hook, options = {}, &code)
Nucleon.exec(hook_method(hook), Config.ensure(options).import({ :plugin => myself }), &code)
end
#---
def extended_config(type, options = {})
config = Nucleon.config(type, Config.ensure(options).import({ :plugin => myself }))
config.delete(:plugin)
config
end
#---
def extension_check(hook, options = {})
Nucleon.check(hook_method(hook), Config.ensure(options).import({ :plugin => myself }))
end
#---
def extension_set(hook, value, options = {})
Nucleon.value(hook_method(hook), value, Config.ensure(options).import({ :plugin => myself }))
end
#---
def extension_collect(hook, options = {})
Nucleon.collect(hook_method(hook), Config.ensure(options).import({ :plugin => myself }))
end
#-----------------------------------------------------------------------------
# Output
def render_options
export
end
protected :render_options
#---
def render(display, options = {})
ui.info(display.strip, options) unless quiet? || display.strip.empty?
end
#---
def info(name, options = {})
ui.info(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def alert(display, options = {})
ui.warn(display.strip, options) unless quiet? || display.strip.empty?
end
#---
def warn(name, options = {})
ui.warn(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def error(name, options = {})
ui.error(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def success(name, options = {})
ui.success(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#-----------------------------------------------------------------------------
# Utilities
def self.build_info(namespace, plugin_type, data)
plugins = []
if data.is_a?(Hash)
data = [ data ]
end
logger.debug("Building plugin list of #{plugin_type}")
if data.is_a?(Array)
data.each do |info|
unless Util::Data.empty?(info)
info = translate(info)
if Util::Data.empty?(info[:provider])
info[:provider] = Nucleon.type_default(namespace, plugin_type)
end
logger.debug("Translated plugin info: #{info.inspect}")
plugins << info
end
end
end
return plugins
end
#---
def self.translate(data)
logger.debug("Translating data to internal plugin structure: #{data.inspect}")
return ( data.is_a?(Hash) ? symbol_map(data) : data )
end
#---
def self.init_plugin_collection(*external_block_methods)
logger.debug("Initializing plugin collection interface at #{Time.now}")
include Parallel
external_block_exec(*external_block_methods)
include Mixin::Settings
include Mixin::SubConfig
extend Mixin::Macro::PluginInterface
end
#---
def safe_exec(return_result = true, &code)
begin
result = code.call
return result if return_result
return true
rescue => error
logger.error(error.inspect)
logger.error(error.message)
ui.error(error.message, { :prefix => false }) if error.message
end
return false
end
#---
def admin_exec(return_result = true, &block)
if Nucleon.admin?
safe_exec(return_result, &block)
else
ui.warn("The #{plugin_provider} action must be run as a machine administrator")
myself.status = code.access_denied
end
end
end
end
end
Using myself reference to proxy instead of self in the finalizer for the base plugin.
module Nucleon
module Plugin
class Base < Core
# All Plugin classes should directly or indirectly extend Base
def initialize(namespace, plugin_type, provider, options)
config = Util::Data.clean(Config.ensure(options), false)
name = Util::Data.ensure_value(config.delete(:plugin_name), config.delete(:name, provider))
@quiet = config.delete(:quiet, false)
set_meta(config.delete(:meta, Config.new))
# No logging statements aove this line!!
super(config.import({ :logger => "#{namespace}->#{plugin_type}->#{plugin_provider}" }), {}, true, false)
myself.plugin_name = name
ObjectSpace.define_finalizer(myself, self.class.finalize(namespace, plugin_type, plugin_instance_name))
logger.debug("Normalizing #{namespace} #{plugin_type} plugin #{plugin_name}")
normalize(false)
@initialized = true
end
#---
def self.finalize(namespace, plugin_type, plugin_instance_name)
proc do
Nucleon.remove_plugin_by_name(namespace, plugin_type, plugin_instance_name)
end
end
#---
def method_missing(method, *args, &block)
return nil
end
#---
def remove_plugin
# Implement in sub classes if needed for cleanup
end
#-----------------------------------------------------------------------------
# Checks
def quiet?
@quiet
end
#-----------------------------------------------------------------------------
# Property accessor / modifiers
def myself
Nucleon.handle(self)
end
alias_method :me, :myself
#---
def quiet=quiet
@quiet = quiet
end
#---
def meta
return @meta
end
#---
def set_meta(meta)
@meta = Config.ensure(meta)
end
#---
def plugin_namespace
return meta.get(:namespace)
end
#---
def plugin_type
return meta.get(:type)
end
#---
def plugin_provider
return meta.get(:provider)
end
#---
def plugin_name
return meta.get(:name)
end
def plugin_name=plugin_name
meta.set(:name, string(plugin_name))
end
#---
def plugin_directory
return meta.get(:directory)
end
#---
def plugin_file
return meta.get(:file)
end
#---
def plugin_instance_name
return meta.get(:instance_name)
end
#---
def plugin_parent=parent
meta.set(:parent, parent) if parent.is_a?(Nucleon::Plugin::Base)
end
def plugin_parent
return meta.get(:parent)
end
#-----------------------------------------------------------------------------
# Status codes
def code
Nucleon.code
end
def codes(*codes)
Nucleon.codes(*codes)
end
#---
def status=status
meta.set(:status, status)
end
def status
meta.get(:status, code.unknown_status)
end
#-----------------------------------------------------------------------------
# Plugin operations
def normalize(reload)
# Implement in sub classes
end
#-----------------------------------------------------------------------------
# Extensions
def hook_method(hook)
"#{plugin_type}_#{plugin_provider}_#{hook}"
end
#---
def extension(hook, options = {}, &code)
Nucleon.exec(hook_method(hook), Config.ensure(options).import({ :plugin => myself }), &code)
end
#---
def extended_config(type, options = {})
config = Nucleon.config(type, Config.ensure(options).import({ :plugin => myself }))
config.delete(:plugin)
config
end
#---
def extension_check(hook, options = {})
Nucleon.check(hook_method(hook), Config.ensure(options).import({ :plugin => myself }))
end
#---
def extension_set(hook, value, options = {})
Nucleon.value(hook_method(hook), value, Config.ensure(options).import({ :plugin => myself }))
end
#---
def extension_collect(hook, options = {})
Nucleon.collect(hook_method(hook), Config.ensure(options).import({ :plugin => myself }))
end
#-----------------------------------------------------------------------------
# Output
def render_options
export
end
protected :render_options
#---
def render(display, options = {})
ui.info(display.strip, options) unless quiet? || display.strip.empty?
end
#---
def info(name, options = {})
ui.info(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def alert(display, options = {})
ui.warn(display.strip, options) unless quiet? || display.strip.empty?
end
#---
def warn(name, options = {})
ui.warn(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def error(name, options = {})
ui.error(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#---
def success(name, options = {})
ui.success(I18n.t(name, Util::Data.merge([ Config.ensure(render_options).export, options ], true))) unless quiet?
end
#-----------------------------------------------------------------------------
# Utilities
def self.build_info(namespace, plugin_type, data)
plugins = []
if data.is_a?(Hash)
data = [ data ]
end
logger.debug("Building plugin list of #{plugin_type}")
if data.is_a?(Array)
data.each do |info|
unless Util::Data.empty?(info)
info = translate(info)
if Util::Data.empty?(info[:provider])
info[:provider] = Nucleon.type_default(namespace, plugin_type)
end
logger.debug("Translated plugin info: #{info.inspect}")
plugins << info
end
end
end
return plugins
end
#---
def self.translate(data)
logger.debug("Translating data to internal plugin structure: #{data.inspect}")
return ( data.is_a?(Hash) ? symbol_map(data) : data )
end
#---
def self.init_plugin_collection(*external_block_methods)
logger.debug("Initializing plugin collection interface at #{Time.now}")
include Parallel
external_block_exec(*external_block_methods)
include Mixin::Settings
include Mixin::SubConfig
extend Mixin::Macro::PluginInterface
end
#---
def safe_exec(return_result = true, &code)
begin
result = code.call
return result if return_result
return true
rescue => error
logger.error(error.inspect)
logger.error(error.message)
ui.error(error.message, { :prefix => false }) if error.message
end
return false
end
#---
def admin_exec(return_result = true, &block)
if Nucleon.admin?
safe_exec(return_result, &block)
else
ui.warn("The #{plugin_provider} action must be run as a machine administrator")
myself.status = code.access_denied
end
end
end
end
end
|
module Cosme
class Middleware
include ActionView::Helpers::TagHelper
include Cosme::Helpers
def initialize(app)
@app = app
end
def call(env)
@env = env
response = @app.call(env)
return response unless Cosme.auto_cosmeticize?
html = response_to_html(response)
return response unless html
new_html = insert_cosmeticize_tag(html)
new_response(response, new_html)
end
private
def response_to_html(response)
status, headers, body = response
return if status != 200
return unless html_headers? headers
take_html(body)
end
def insert_cosmeticize_tag(html)
cosmeticizer = cosmeticize(controller)
html.sub(/<body[^>]*>/) { [$~, cosmeticizer].join }
end
def new_response(response, new_html)
status, headers, _ = response
headers['Content-Length'] = new_html.bytesize.to_s
[status, headers, [new_html]]
end
def html_headers?(headers)
return false unless headers['Content-Type']
return false unless headers['Content-Type'].include? 'text/html'
return false if headers['Content-Transfer-Encoding'] == 'binary'
true
end
# body is one of the following:
# - Array
# - ActionDispatch::Response
# - ActionDispatch::Response::RackBody
def take_html(body)
strings = []
body.each { |buf| strings << buf }
strings.join
end
# Use in Cosme::Helpers#cosmeticize
def render(options = {})
view_context = ActionView::Base.new(ActionController::Base.view_paths, assigns, controller)
view_context.render(options)
end
def controller
@env['action_controller.instance']
end
def assigns
return {} unless controller
controller.view_context.assigns
end
def controller
return unless @env
@env['action_controller.instance']
end
end
end
Support helper methods in a cosmetic view file
module Cosme
class Middleware
include ActionView::Helpers::TagHelper
include Cosme::Helpers
def initialize(app)
@app = app
end
def call(env)
@env = env
response = @app.call(env)
return response unless Cosme.auto_cosmeticize?
html = response_to_html(response)
return response unless html
new_html = insert_cosmeticize_tag(html)
new_response(response, new_html)
end
private
def response_to_html(response)
status, headers, body = response
return if status != 200
return unless html_headers? headers
take_html(body)
end
def insert_cosmeticize_tag(html)
cosmeticizer = cosmeticize(controller)
html.sub(/<body[^>]*>/) { [$~, cosmeticizer].join }
end
def new_response(response, new_html)
status, headers, _ = response
headers['Content-Length'] = new_html.bytesize.to_s
[status, headers, [new_html]]
end
def html_headers?(headers)
return false unless headers['Content-Type']
return false unless headers['Content-Type'].include? 'text/html'
return false if headers['Content-Transfer-Encoding'] == 'binary'
true
end
# body is one of the following:
# - Array
# - ActionDispatch::Response
# - ActionDispatch::Response::RackBody
def take_html(body)
strings = []
body.each { |buf| strings << buf }
strings.join
end
# Use in Cosme::Helpers#cosmeticize
def render(options = {})
_helpers = helpers
view_context = ActionView::Base.new(ActionController::Base.view_paths, assigns, controller)
view_context.class_eval { _helpers.each { |h| include h } }
view_context.render(options)
end
def controller
return unless @env
@env['action_controller.instance']
end
def assigns
return {} unless controller
controller.view_context.assigns
end
def helpers
[
controller.try(:_helpers),
Rails.application.routes.url_helpers,
engines_helpers
].compact
end
def engines_helpers
wodule = Module.new
isolated_engine_instances.each do |instance|
routes = instance.routes
name = instance.engine_name
wodule.class_eval do
define_method "_#{name}" do
routes
end
end
wodule.class_eval(<<-RUBY, __FILE__, __LINE__ + 1)
def #{name}
@_#{name} ||= _#{name}
end
RUBY
end
wodule
end
def isolated_engine_instances
Rails::Engine.subclasses.map(&:instance).select(&:isolated?)
end
end
end
|
# Copyright (c) 2012 Bingoentreprenøren AS
# Copyright (c) 2012 Patrick Hanevold
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module Crayfish
VERSION = "0.2.5"
end
version bump
# Copyright (c) 2012 Bingoentreprenøren AS
# Copyright (c) 2012 Patrick Hanevold
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
module Crayfish
VERSION = "0.2.6"
end
|
require 'base64'
r8_require('../../utils/performance_service')
module XYZ
class ActionsetController < Controller
def process(*route)
route_key = route[0..1].join("/")
action_set_params = route[2..route.size-1]||[]
model_name = route[0].to_sym
route = R8::ReactorRoute.validate_route(request.request_method, route_key)
#return 404 Resource Not Found if route is not valid
respond("#{route_key}!", 404) unless route
# we set new model
model_name = route.first.to_sym
# we rewrite route key to new mapped one
route_key = route.join('/')
ramaze_user = user_object()
# DEBUG SNIPPET >>>> REMOVE <<<<
#ramaze_user = User.get_user_by_id( { :model_name => :user, :c => 2 }, 2147483717)
#user_login(ramaze_user.merge(:access_time => Time.now))
unless route.first == "user"
unless logged_in?
unless R8::Config[:session][:cookie][:disabled]
if request.cookies["dtk-user-info"]
#Log.debug "Session cookie is beeing used to revive this session"
# using cookie to take session information
# composed data is consistent form user_id, expire timestamp, and tenant id
# URL encoding is transfering + sign to ' ', so we correct that via gsub
cookie_data = Base64.decode64(request.cookies["dtk-user-info"].gsub(' ','+'))
composed_data = ::AESCrypt.decrypt(cookie_data, ENCRYPTION_SALT, ENCRYPTION_SALT)
user_id, time_integer, c = composed_data.split('_')
# make sure that cookie has not expired
if (time_integer.to_i >= Time.now.to_i)
# due to tight coupling between model_handle and user_object we will set
# model handle manually
ramaze_user = User.get_user_by_id( { :model_name => :user, :c => c }, user_id)
# TODO: [Haris] This is workaround to make sure that user is logged in, due to Ramaze design
# this is easiest way to do it. But does feel dirty.
# TODO: [Haris] This does not work since user is not persisted, look into this after cookie bug is resolved
user_login(ramaze_user.merge(:access_time => Time.now))
# we set :last_ts as access time for later check
session.store(:last_ts, Time.now.to_i)
#Log.debug "Session cookie has been used to temporary revive user session"
end
end
end
end
session = CurrentSession.new
session.set_user_object(ramaze_user)
session.set_auth_filters(:c,:group_ids)
login_first unless R8::Config[:development_test_user]
end
@json_response = true if ajax_request?
#seperate route in 'route_key' (e.g., object/action, object) and its params 'action_set_params'
#first two (or single items make up route_key; the rest are params
action_set_def = R8::Routes[route_key] || Hash.new
@action_set_param_map = ret_action_set_param_map(action_set_def,action_set_params)
@layout = (R8::Routes[route_key] ? R8::Routes[route_key][:layout] : nil) || R8::Config[:default_layout]
#if a config is defined for route, use values from config
if action_set_def[:action_set]
run_action_set(action_set_def[:action_set],model_name)
else #create an action set of length one and run it
action_set = compute_singleton_action_set(action_set_def,route_key,action_set_params)
run_action_set(action_set)
end
Log.info("[#{Time.now.strftime("%Y/%m/%d %H:%M:%S")}] USER -- : '#{ramaze_user[:username]}'")
end
private
def compute_singleton_action_set(action_set_def,route_key,action_set_params)
action_params = action_set_params
query_string = ret_parsed_query_string_from_uri()
action_params << query_string unless query_string.empty?
action = {
:route => action_set_def[:route] || route_key,
:action_params => action_params
}
unless rest_request?
action.merge!(
:panel => action_set_def[:panel] || :main_body,
:assign_type => action_set_def[:assign_type] || :replace
)
end
[action]
end
#parent_model_name only set when top level action decomposed as opposed to when an action set of length one is created
def run_action_set(action_set,parent_model_name=nil)
# Amar: PERFORMANCE
PerformanceService.log("OPERATION=#{action_set.first[:route]}")
PerformanceService.log("REQUEST_PARAMS=#{request.params.to_json}")
if rest_request?
unless (action_set||[]).size == 1
raise Error.new("If rest response action set must just have one element")
end
PerformanceService.start("PERF_OPERATION_DUR")
run_rest_action(action_set.first,parent_model_name)
PerformanceService.end("PERF_OPERATION_DUR")
return
end
@ctrl_results = ControllerResultsWeb.new
#Execute each of the actions in the action_set and set the returned content
(action_set||[]).each do |action|
model,method = action[:route].split("/")
method ||= :index
action_namespace = "#{R8::Config[:application_name]}_#{model}_#{method}".to_sym
result = call_action(action,parent_model_name)
ctrl_result = Hash.new
if result and result.length > 0
#if a hash is returned, turn make result an array list of one
if result.kind_of?(Hash)
ctrl_result[:content] = [result]
else
ctrl_result = result
end
panel_content_track = {}
#for each piece of content set by controller result,make sure panel and assign type is set
ctrl_result[:content].each_with_index do |item,index|
#set the appropriate panel to render results to
panel_name = (ctrl_result[:content][index][:panel] || action[:panel] || :main_body).to_sym
panel_content_track[panel_name] ? panel_content_track[panel_name] +=1 : panel_content_track[panel_name] = 1
ctrl_result[:content][index][:panel] = panel_name
(panel_content_track[panel_name] == 1) ? dflt_assign_type = :replace : dflt_assign_type = :append
#set the appropriate render assignment type (append | prepend | replace)
ctrl_result[:content][index][:assign_type] = (ctrl_result[:content][index][:assign_type] || action[:assign_type] || dflt_assign_type).to_sym
#set js with base cache uri path
ctrl_result[:content][index][:src] = "#{R8::Config[:base_js_cache_uri]}/#{ctrl_result[:content][index][:src]}" if !ctrl_result[:content][index][:src].nil?
end
end
ctrl_result[:js_includes] = ret_js_includes()
ctrl_result[:css_includes] = ret_css_includes()
ctrl_result[:js_exe_list] = ret_js_exe_list()
@ctrl_results.add(action_namespace,ctrl_result)
end
end
def run_rest_action(action,parent_model_name=nil)
model, method = action[:route].split("/")
method ||= :index
result = nil
begin
result = call_action(action,parent_model_name)
rescue DTK::SessionError => e
auth_unauthorized_response(e.message)
# TODO: Look into the code so we can return 401 HTTP status
#result = rest_notok_response(:message => e.message)
rescue Exception => e
#TODO: put bactrace info in response
if e.kind_of?(ErrorUsage)
Log.error_pp([e,e.backtrace[0]])
else
Log.error_pp([e,e.backtrace[0..20]])
end
result = rest_notok_response(RestError.create(e).hash_form())
end
@ctrl_results = ControllerResultsRest.new(result)
end
def call_action(action,parent_model_name=nil)
model,method = action[:route].split("/")
controller_class = XYZ.const_get("#{model.capitalize}Controller")
method ||= :index
if rest_request?()
rest_variant = "rest__#{method}"
if controller_class.method_defined?(rest_variant)
method = rest_variant
end
end
model_name = model.to_sym
processed_params = process_action_params(action[:action_params])
action_set_params = ret_search_object(processed_params,model_name,parent_model_name)
uri_params = ret_uri_params(processed_params)
variables = {:action_set_params => action_set_params}
unless rest_request?()
variables.merge!(
:js_includes => @js_includes,
:css_includes => @css_includes,
:js_exe_list => @js_exe_list
)
end
a = Ramaze::Action.create(
:node => controller_class,
:method => method.to_sym,
:params => uri_params,
:engine => lambda{|action, value| value },
:variables => variables
)
return a.call
end
def ret_search_object(processed_params,model_name,parent_model_name=nil)
#TODO: assume everything is just equal
filter_params = processed_params.select{|p|p.kind_of?(Hash)}
return nil if filter_params.empty?
#for processing :parent_id
parent_id_field_name = ModelHandle.new(ret_session_context_id(),model_name,parent_model_name).parent_id_field_name()
filter = [:and] + filter_params.map do |el|
raw_pair = [el.keys.first,el.values.first]
[:eq] + raw_pair.map{|x| x == :parent_id ? parent_id_field_name : x}
end
{"search" => {
"search_pattern" => {
:relation => model_name,
:filter => filter
}
}
}
end
def ret_uri_params(processed_params)
processed_params.select{|p|not p.kind_of?(Hash)}
end
#does substitution of free variables in raw_params
def process_action_params(raw_params)
#short circuit if no params that need substituting
return raw_params if @action_set_param_map.empty?
if raw_params.kind_of?(Array)
raw_params.map{|p|process_action_params(p)}
elsif raw_params.kind_of?(Hash)
ret = Hash.new
raw_params.each{|k,v|ret[k] = process_action_params(v)}
ret
elsif raw_params.kind_of?(String)
ret = raw_params.dup
@action_set_param_map.each{|k,v|ret.gsub!(Regexp.new("\\$#{k}\\$"),v.to_s)}
ret
else
raw_params
end
end
def ret_action_set_param_map(action_set_def,action_set_params)
ret = Hash.new
return ret if action_set_def.nil?
i = 0
(action_set_def[:params]||[]).each do |param_name|
if i < action_set_params.size
ret[param_name] = action_set_params[i]
else
ret[param_name] = nil
Log.info("action set param #{param_name} not specfied in action set call")
end
i = i+1
end
ret
end
#TODO: lets finally kill off the xyz and move route loading into some sort of initialize or route setup call
#enter the routes defined in config into Ramaze
Ramaze::Route["route_to_actionset"] = lambda{ |path, request|
if path =~ Regexp.new("^/xyz") and not path =~ Regexp.new("^/xyz/devtest")
path.gsub(Regexp.new("^/xyz"),"/xyz/actionset/process")
elsif path =~ Regexp.new("^/rest")
path.gsub(Regexp.new("^/rest"),"/xyz/actionset/process")
end
}
end
end
DTK-859 More descriptive error when db is down no_build
require 'base64'
r8_require('../../utils/performance_service')
module XYZ
class ActionsetController < Controller
def process(*route)
route_key = route[0..1].join("/")
action_set_params = route[2..route.size-1]||[]
model_name = route[0].to_sym
route = R8::ReactorRoute.validate_route(request.request_method, route_key)
#return 404 Resource Not Found if route is not valid
respond("#{route_key}!", 404) unless route
# we set new model
model_name = route.first.to_sym
# we rewrite route key to new mapped one
route_key = route.join('/')
ramaze_user = user_object()
# DEBUG SNIPPET >>>> REMOVE <<<<
#ramaze_user = User.get_user_by_id( { :model_name => :user, :c => 2 }, 2147483717)
#user_login(ramaze_user.merge(:access_time => Time.now))
unless route.first == "user"
unless logged_in?
unless R8::Config[:session][:cookie][:disabled]
if request.cookies["dtk-user-info"]
#Log.debug "Session cookie is beeing used to revive this session"
# using cookie to take session information
# composed data is consistent form user_id, expire timestamp, and tenant id
# URL encoding is transfering + sign to ' ', so we correct that via gsub
cookie_data = Base64.decode64(request.cookies["dtk-user-info"].gsub(' ','+'))
composed_data = ::AESCrypt.decrypt(cookie_data, ENCRYPTION_SALT, ENCRYPTION_SALT)
user_id, time_integer, c = composed_data.split('_')
# make sure that cookie has not expired
if (time_integer.to_i >= Time.now.to_i)
# due to tight coupling between model_handle and user_object we will set
# model handle manually
begin
ramaze_user = User.get_user_by_id( { :model_name => :user, :c => c }, user_id)
rescue ::Sequel::DatabaseDisconnectError, ::Sequel::DatabaseConnectionError => e
respond(e, 403)
end
# TODO: [Haris] This is workaround to make sure that user is logged in, due to Ramaze design
# this is easiest way to do it. But does feel dirty.
# TODO: [Haris] This does not work since user is not persisted, look into this after cookie bug is resolved
user_login(ramaze_user.merge(:access_time => Time.now))
# we set :last_ts as access time for later check
session.store(:last_ts, Time.now.to_i)
#Log.debug "Session cookie has been used to temporary revive user session"
end
end
end
end
session = CurrentSession.new
session.set_user_object(ramaze_user)
session.set_auth_filters(:c,:group_ids)
login_first unless R8::Config[:development_test_user]
end
@json_response = true if ajax_request?
#seperate route in 'route_key' (e.g., object/action, object) and its params 'action_set_params'
#first two (or single items make up route_key; the rest are params
action_set_def = R8::Routes[route_key] || Hash.new
@action_set_param_map = ret_action_set_param_map(action_set_def,action_set_params)
@layout = (R8::Routes[route_key] ? R8::Routes[route_key][:layout] : nil) || R8::Config[:default_layout]
#if a config is defined for route, use values from config
if action_set_def[:action_set]
run_action_set(action_set_def[:action_set],model_name)
else #create an action set of length one and run it
action_set = compute_singleton_action_set(action_set_def,route_key,action_set_params)
run_action_set(action_set)
end
Log.info("[#{Time.now.strftime("%Y/%m/%d %H:%M:%S")}] USER -- : '#{ramaze_user[:username]}'")
end
private
def compute_singleton_action_set(action_set_def,route_key,action_set_params)
action_params = action_set_params
query_string = ret_parsed_query_string_from_uri()
action_params << query_string unless query_string.empty?
action = {
:route => action_set_def[:route] || route_key,
:action_params => action_params
}
unless rest_request?
action.merge!(
:panel => action_set_def[:panel] || :main_body,
:assign_type => action_set_def[:assign_type] || :replace
)
end
[action]
end
#parent_model_name only set when top level action decomposed as opposed to when an action set of length one is created
def run_action_set(action_set,parent_model_name=nil)
# Amar: PERFORMANCE
PerformanceService.log("OPERATION=#{action_set.first[:route]}")
PerformanceService.log("REQUEST_PARAMS=#{request.params.to_json}")
if rest_request?
unless (action_set||[]).size == 1
raise Error.new("If rest response action set must just have one element")
end
PerformanceService.start("PERF_OPERATION_DUR")
run_rest_action(action_set.first,parent_model_name)
PerformanceService.end("PERF_OPERATION_DUR")
return
end
@ctrl_results = ControllerResultsWeb.new
#Execute each of the actions in the action_set and set the returned content
(action_set||[]).each do |action|
model,method = action[:route].split("/")
method ||= :index
action_namespace = "#{R8::Config[:application_name]}_#{model}_#{method}".to_sym
result = call_action(action,parent_model_name)
ctrl_result = Hash.new
if result and result.length > 0
#if a hash is returned, turn make result an array list of one
if result.kind_of?(Hash)
ctrl_result[:content] = [result]
else
ctrl_result = result
end
panel_content_track = {}
#for each piece of content set by controller result,make sure panel and assign type is set
ctrl_result[:content].each_with_index do |item,index|
#set the appropriate panel to render results to
panel_name = (ctrl_result[:content][index][:panel] || action[:panel] || :main_body).to_sym
panel_content_track[panel_name] ? panel_content_track[panel_name] +=1 : panel_content_track[panel_name] = 1
ctrl_result[:content][index][:panel] = panel_name
(panel_content_track[panel_name] == 1) ? dflt_assign_type = :replace : dflt_assign_type = :append
#set the appropriate render assignment type (append | prepend | replace)
ctrl_result[:content][index][:assign_type] = (ctrl_result[:content][index][:assign_type] || action[:assign_type] || dflt_assign_type).to_sym
#set js with base cache uri path
ctrl_result[:content][index][:src] = "#{R8::Config[:base_js_cache_uri]}/#{ctrl_result[:content][index][:src]}" if !ctrl_result[:content][index][:src].nil?
end
end
ctrl_result[:js_includes] = ret_js_includes()
ctrl_result[:css_includes] = ret_css_includes()
ctrl_result[:js_exe_list] = ret_js_exe_list()
@ctrl_results.add(action_namespace,ctrl_result)
end
end
def run_rest_action(action,parent_model_name=nil)
model, method = action[:route].split("/")
method ||= :index
result = nil
begin
result = call_action(action,parent_model_name)
rescue DTK::SessionError => e
auth_unauthorized_response(e.message)
# TODO: Look into the code so we can return 401 HTTP status
#result = rest_notok_response(:message => e.message)
rescue Exception => e
#TODO: put bactrace info in response
if e.kind_of?(ErrorUsage)
Log.error_pp([e,e.backtrace[0]])
else
Log.error_pp([e,e.backtrace[0..20]])
end
result = rest_notok_response(RestError.create(e).hash_form())
end
@ctrl_results = ControllerResultsRest.new(result)
end
def call_action(action,parent_model_name=nil)
model,method = action[:route].split("/")
controller_class = XYZ.const_get("#{model.capitalize}Controller")
method ||= :index
if rest_request?()
rest_variant = "rest__#{method}"
if controller_class.method_defined?(rest_variant)
method = rest_variant
end
end
model_name = model.to_sym
processed_params = process_action_params(action[:action_params])
action_set_params = ret_search_object(processed_params,model_name,parent_model_name)
uri_params = ret_uri_params(processed_params)
variables = {:action_set_params => action_set_params}
unless rest_request?()
variables.merge!(
:js_includes => @js_includes,
:css_includes => @css_includes,
:js_exe_list => @js_exe_list
)
end
a = Ramaze::Action.create(
:node => controller_class,
:method => method.to_sym,
:params => uri_params,
:engine => lambda{|action, value| value },
:variables => variables
)
return a.call
end
def ret_search_object(processed_params,model_name,parent_model_name=nil)
#TODO: assume everything is just equal
filter_params = processed_params.select{|p|p.kind_of?(Hash)}
return nil if filter_params.empty?
#for processing :parent_id
parent_id_field_name = ModelHandle.new(ret_session_context_id(),model_name,parent_model_name).parent_id_field_name()
filter = [:and] + filter_params.map do |el|
raw_pair = [el.keys.first,el.values.first]
[:eq] + raw_pair.map{|x| x == :parent_id ? parent_id_field_name : x}
end
{"search" => {
"search_pattern" => {
:relation => model_name,
:filter => filter
}
}
}
end
def ret_uri_params(processed_params)
processed_params.select{|p|not p.kind_of?(Hash)}
end
#does substitution of free variables in raw_params
def process_action_params(raw_params)
#short circuit if no params that need substituting
return raw_params if @action_set_param_map.empty?
if raw_params.kind_of?(Array)
raw_params.map{|p|process_action_params(p)}
elsif raw_params.kind_of?(Hash)
ret = Hash.new
raw_params.each{|k,v|ret[k] = process_action_params(v)}
ret
elsif raw_params.kind_of?(String)
ret = raw_params.dup
@action_set_param_map.each{|k,v|ret.gsub!(Regexp.new("\\$#{k}\\$"),v.to_s)}
ret
else
raw_params
end
end
def ret_action_set_param_map(action_set_def,action_set_params)
ret = Hash.new
return ret if action_set_def.nil?
i = 0
(action_set_def[:params]||[]).each do |param_name|
if i < action_set_params.size
ret[param_name] = action_set_params[i]
else
ret[param_name] = nil
Log.info("action set param #{param_name} not specfied in action set call")
end
i = i+1
end
ret
end
#TODO: lets finally kill off the xyz and move route loading into some sort of initialize or route setup call
#enter the routes defined in config into Ramaze
Ramaze::Route["route_to_actionset"] = lambda{ |path, request|
if path =~ Regexp.new("^/xyz") and not path =~ Regexp.new("^/xyz/devtest")
path.gsub(Regexp.new("^/xyz"),"/xyz/actionset/process")
elsif path =~ Regexp.new("^/rest")
path.gsub(Regexp.new("^/rest"),"/xyz/actionset/process")
end
}
end
end
|
module DC
module Search
# Our first stab at a Search::Parser will just use simple regexs to pull out
# fielded queries ... so, no nesting.
#
# All the regex matchers live in the Search module.
#
# We should try to adopt Google conventions, if possible, after:
# http://www.google.com/help/cheatsheet.html
class Parser
include DC::Access
# Parse a raw query_string, returning a DC::Search::Query that knows
# about the text, fields, projects, and attributes it's composed of.
def parse(query_string='')
@text, @access, @source_document = nil, nil, nil
@fields, @accounts, @groups, @projects, @project_ids, @doc_ids, @attributes =
[], [], [], [], [], [], []
quoted_fields = query_string.scan(Matchers::QUOTED_FIELD).map {|m| m[0] }
bare_fields = query_string.gsub(Matchers::QUOTED_FIELD, '').scan(Matchers::BARE_FIELD)
search_text = query_string.gsub(Matchers::ALL_FIELDS, '').squeeze(' ').strip
process_search_text(search_text)
process_fields_and_projects(bare_fields, quoted_fields)
Query.new(:text => @text, :fields => @fields, :projects => @projects,
:accounts => @accounts, :groups => @groups, :project_ids => @project_ids,
:doc_ids => @doc_ids, :attributes => @attributes, :access => @access,
:source_document => @source_document)
end
# Convert the full-text search into a form that our index can handle.
def process_search_text(text)
return if text.empty?
@text = text.gsub(Matchers::BOOLEAN_OR, QUERY_OR)
end
# Extract the portions of the query that are fields, attributes,
# and projects.
def process_fields_and_projects(bare, quoted)
bare.map! {|f| f.split(/:\s*/) }
quoted.map! do |f|
type = f.match(/(.+?):\s*/)[1]
value = f.sub(/(.+?):\s*/, '').gsub(/(^['"]|['"]$)/, '')
[type, value]
end
(bare + quoted).each do |pair|
type, value = *pair
type = type.downcase
case type
when 'account' then @accounts << value.to_i
when 'group' then @groups << value
when 'access' then @access = ACCESS_MAP[value.strip.to_sym]
when 'project' then @projects << value
when 'projectid' then @project_ids << value.to_i
when 'document' then @doc_ids << value.to_i
when 'related' then @source_document = Document.find(value.to_i)
else
process_field(type, value)
end
end
end
# Convert an individual field or attribute search into a DC::Search::Field.
def process_field(kind, value)
field = Field.new(match_kind(kind), value.strip)
return @attributes << field if field.attribute?
return @fields << field if field.entity?
@text ||= ''
@text += " #{field}"
end
# Convert a field kind string into its canonical form, by searching
# through all the valid kinds for a match.
def match_kind(kind)
matcher = Regexp.new(kind.downcase)
DC::VALID_KINDS.detect {|canonical| canonical.match(matcher) } || kind
end
end
end
end
downcase-ing group searches
module DC
module Search
# Our first stab at a Search::Parser will just use simple regexs to pull out
# fielded queries ... so, no nesting.
#
# All the regex matchers live in the Search module.
#
# We should try to adopt Google conventions, if possible, after:
# http://www.google.com/help/cheatsheet.html
class Parser
include DC::Access
# Parse a raw query_string, returning a DC::Search::Query that knows
# about the text, fields, projects, and attributes it's composed of.
def parse(query_string='')
@text, @access, @source_document = nil, nil, nil
@fields, @accounts, @groups, @projects, @project_ids, @doc_ids, @attributes =
[], [], [], [], [], [], []
quoted_fields = query_string.scan(Matchers::QUOTED_FIELD).map {|m| m[0] }
bare_fields = query_string.gsub(Matchers::QUOTED_FIELD, '').scan(Matchers::BARE_FIELD)
search_text = query_string.gsub(Matchers::ALL_FIELDS, '').squeeze(' ').strip
process_search_text(search_text)
process_fields_and_projects(bare_fields, quoted_fields)
Query.new(:text => @text, :fields => @fields, :projects => @projects,
:accounts => @accounts, :groups => @groups, :project_ids => @project_ids,
:doc_ids => @doc_ids, :attributes => @attributes, :access => @access,
:source_document => @source_document)
end
# Convert the full-text search into a form that our index can handle.
def process_search_text(text)
return if text.empty?
@text = text.gsub(Matchers::BOOLEAN_OR, QUERY_OR)
end
# Extract the portions of the query that are fields, attributes,
# and projects.
def process_fields_and_projects(bare, quoted)
bare.map! {|f| f.split(/:\s*/) }
quoted.map! do |f|
type = f.match(/(.+?):\s*/)[1]
value = f.sub(/(.+?):\s*/, '').gsub(/(^['"]|['"]$)/, '')
[type, value]
end
(bare + quoted).each do |pair|
type, value = *pair
type = type.downcase
case type
when 'account' then @accounts << value.to_i
when 'group' then @groups << value.downcase
when 'access' then @access = ACCESS_MAP[value.strip.to_sym]
when 'project' then @projects << value
when 'projectid' then @project_ids << value.to_i
when 'document' then @doc_ids << value.to_i
when 'related' then @source_document = Document.find(value.to_i)
else
process_field(type, value)
end
end
end
# Convert an individual field or attribute search into a DC::Search::Field.
def process_field(kind, value)
field = Field.new(match_kind(kind), value.strip)
return @attributes << field if field.attribute?
return @fields << field if field.entity?
@text ||= ''
@text += " #{field}"
end
# Convert a field kind string into its canonical form, by searching
# through all the valid kinds for a match.
def match_kind(kind)
matcher = Regexp.new(kind.downcase)
DC::VALID_KINDS.detect {|canonical| canonical.match(matcher) } || kind
end
end
end
end |
module DataMapper
module Adapters
extend Chainable
extend DataMapper::Assertions
# Set up an adapter for a storage engine
#
# @see DataMapper.setup
#
# @api private
def self.new(repository_name, options)
options = normalize_options(options)
adapter_class(options.fetch(:adapter)).new(repository_name, options)
end
# The path used to require the in memory adapter
#
# Set this if you want to register your own adapter
# to be used when you specify an 'in_memory' connection
# during
#
# @see DataMapper.setup
#
# @param [String] path
# the path used to require the desired in memory adapter
#
# @api semipublic
def self.in_memory_adapter_path=(path)
@in_memory_adapter_path = path
end
# The path used to require the in memory adapter
#
# @see DataMapper.setup
#
# @return [String]
# the path used to require the desired in memory adapter
#
# @api semipublic
def self.in_memory_adapter_path
@in_memory_adapter_path ||= 'dm-core/adapters/in_memory_adapter'
end
class << self
private
# Normalize the arguments passed to new()
#
# Turns options hash or connection URI into the options hash used
# by the adapter.
#
# @param [Hash, Addressable::URI, String] options
# the options to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options(options)
case options
when Hash then normalize_options_hash(options)
when Addressable::URI then normalize_options_uri(options)
when String then normalize_options_string(options)
else
assert_kind_of 'options', options, Hash, Addressable::URI, String
end
end
# Normalize Hash options into a Mash
#
# @param [Hash] hash
# the hash to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_hash(hash)
hash.to_mash
end
# Normalize Addressable::URI options into a Mash
#
# @param [Addressable::URI] uri
# the uri to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_uri(uri)
options = normalize_options_hash(uri.to_hash)
# Extract the name/value pairs from the query portion of the
# connection uri, and set them as options directly.
if options.fetch(:query)
options.update(uri.query_values)
end
options[:adapter] = options.fetch(:scheme)
options
end
# Normalize String options into a Mash
#
# @param [String] string
# the string to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_string(string)
normalize_options_uri(Addressable::URI.parse(string))
end
# Return the adapter class constant
#
# @example
# DataMapper::Adapters.send(:adapter_class, 'mysql') # => DataMapper::Adapters::MysqlAdapter
#
# @param [Symbol] name
# the name of the adapter
#
# @return [Class]
# the AbstractAdapter subclass
#
# @api private
def adapter_class(name)
adapter_name = normalize_adapter_name(name)
class_name = (ActiveSupport::Inflector.camelize(adapter_name) << 'Adapter').to_sym
load_adapter(adapter_name) unless const_defined?(class_name)
const_get(class_name)
end
# Return the name of the adapter
#
# @example
# DataMapper::Adapters.adapter_name('MysqlAdapter') # => 'mysql'
#
# @param [String] const_name
# the adapter constant name
#
# @return [String]
# the name of the adapter
#
# @api semipublic
def adapter_name(const_name)
const_name.to_s.chomp('Adapter').downcase
end
# Require the adapter library
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [Boolean]
# true if the adapter is loaded
#
# @api private
def load_adapter(name)
require "dm-#{name}-adapter"
rescue LoadError
require in_memory_adapter?(name) ? in_memory_adapter_path : legacy_path(name)
end
# Returns wether or not the given adapter name is considered an in memory adapter
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [Boolean]
# true if the adapter is considered to be an in memory adapter
#
# @api private
def in_memory_adapter?(name)
name.to_s == 'in_memory'
end
# Returns the fallback filename that would be used to require the named adapter
#
# The fallback format is "#{name}_adapter" and will be phased out in favor of
# the properly 'namespaced' "dm-#{name}-adapter" format.
#
# @param [String, Symbol] name
# the name of the adapter to require
#
# @return [String]
# the filename that gets required for the adapter identified by name
#
# @api private
def legacy_path(name)
"#{name}_adapter"
end
# Adjust the adapter name to match the name used in the gem providing the adapter
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [String]
# the normalized adapter name
#
# @api private
def normalize_adapter_name(name)
(original = name.to_s) == 'sqlite3' ? 'sqlite' : original
end
end
extendable do
# @api private
def const_added(const_name)
end
end
end # module Adapters
end # module DataMapper
Raise original LoadError when the adapter cannot be required
module DataMapper
module Adapters
extend Chainable
extend DataMapper::Assertions
# Set up an adapter for a storage engine
#
# @see DataMapper.setup
#
# @api private
def self.new(repository_name, options)
options = normalize_options(options)
adapter_class(options.fetch(:adapter)).new(repository_name, options)
end
# The path used to require the in memory adapter
#
# Set this if you want to register your own adapter
# to be used when you specify an 'in_memory' connection
# during
#
# @see DataMapper.setup
#
# @param [String] path
# the path used to require the desired in memory adapter
#
# @api semipublic
def self.in_memory_adapter_path=(path)
@in_memory_adapter_path = path
end
# The path used to require the in memory adapter
#
# @see DataMapper.setup
#
# @return [String]
# the path used to require the desired in memory adapter
#
# @api semipublic
def self.in_memory_adapter_path
@in_memory_adapter_path ||= 'dm-core/adapters/in_memory_adapter'
end
class << self
private
# Normalize the arguments passed to new()
#
# Turns options hash or connection URI into the options hash used
# by the adapter.
#
# @param [Hash, Addressable::URI, String] options
# the options to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options(options)
case options
when Hash then normalize_options_hash(options)
when Addressable::URI then normalize_options_uri(options)
when String then normalize_options_string(options)
else
assert_kind_of 'options', options, Hash, Addressable::URI, String
end
end
# Normalize Hash options into a Mash
#
# @param [Hash] hash
# the hash to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_hash(hash)
hash.to_mash
end
# Normalize Addressable::URI options into a Mash
#
# @param [Addressable::URI] uri
# the uri to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_uri(uri)
options = normalize_options_hash(uri.to_hash)
# Extract the name/value pairs from the query portion of the
# connection uri, and set them as options directly.
if options.fetch(:query)
options.update(uri.query_values)
end
options[:adapter] = options.fetch(:scheme)
options
end
# Normalize String options into a Mash
#
# @param [String] string
# the string to be normalized
#
# @return [Mash]
# the options normalized as a Mash
#
# @api private
def normalize_options_string(string)
normalize_options_uri(Addressable::URI.parse(string))
end
# Return the adapter class constant
#
# @example
# DataMapper::Adapters.send(:adapter_class, 'mysql') # => DataMapper::Adapters::MysqlAdapter
#
# @param [Symbol] name
# the name of the adapter
#
# @return [Class]
# the AbstractAdapter subclass
#
# @api private
def adapter_class(name)
adapter_name = normalize_adapter_name(name)
class_name = (ActiveSupport::Inflector.camelize(adapter_name) << 'Adapter').to_sym
load_adapter(adapter_name) unless const_defined?(class_name)
const_get(class_name)
end
# Return the name of the adapter
#
# @example
# DataMapper::Adapters.adapter_name('MysqlAdapter') # => 'mysql'
#
# @param [String] const_name
# the adapter constant name
#
# @return [String]
# the name of the adapter
#
# @api semipublic
def adapter_name(const_name)
const_name.to_s.chomp('Adapter').downcase
end
# Require the adapter library
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [Boolean]
# true if the adapter is loaded
#
# @api private
def load_adapter(name)
require "dm-#{name}-adapter"
rescue LoadError => original_error
begin
require in_memory_adapter?(name) ? in_memory_adapter_path : legacy_path(name)
rescue LoadError
raise original_error
end
end
# Returns wether or not the given adapter name is considered an in memory adapter
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [Boolean]
# true if the adapter is considered to be an in memory adapter
#
# @api private
def in_memory_adapter?(name)
name.to_s == 'in_memory'
end
# Returns the fallback filename that would be used to require the named adapter
#
# The fallback format is "#{name}_adapter" and will be phased out in favor of
# the properly 'namespaced' "dm-#{name}-adapter" format.
#
# @param [String, Symbol] name
# the name of the adapter to require
#
# @return [String]
# the filename that gets required for the adapter identified by name
#
# @api private
def legacy_path(name)
"#{name}_adapter"
end
# Adjust the adapter name to match the name used in the gem providing the adapter
#
# @param [String, Symbol] name
# the name of the adapter
#
# @return [String]
# the normalized adapter name
#
# @api private
def normalize_adapter_name(name)
(original = name.to_s) == 'sqlite3' ? 'sqlite' : original
end
end
extendable do
# @api private
def const_added(const_name)
end
end
end # module Adapters
end # module DataMapper
|
## aborts a chef run if /etc/nochef exists. this allows for manual maintenance
## to be performed without worrying about someone (or some *thing*) coming along
## and restarting chef on you.
if ::File.exists?("/etc/nochef") then
ctime = ::File::Stat.new("/etc/nochef").ctime().utc().iso8601()
msg = IO.read("/etc/nochef").strip
if msg.length == 0 then
msg = "no reason given"
end
raise "/etc/nochef created at #{ctime}: #{msg}"
end
Log on check_norun failure
So it appears in the log; exception messages aren't generally logged
## aborts a chef run if /etc/nochef exists. this allows for manual maintenance
## to be performed without worrying about someone (or some *thing*) coming along
## and restarting chef on you.
if ::File.exists?("/etc/nochef") then
ctime = ::File::Stat.new("/etc/nochef").ctime().utc().iso8601()
msg = IO.read("/etc/nochef").strip
if msg.length == 0 then
msg = "no reason given"
end
Chef::Log.fatal("/etc/nochef created at #{ctime}: #{msg}")
raise "/etc/nochef created at #{ctime}: #{msg}"
end
|
# -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'guard/jasmine/version'
Gem::Specification.new do |s|
s.name = 'guard-jasmine'
s.version = Guard::JasmineVersion::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ['Michael Kessler', 'Nathan Stitt']
s.email = ['michi@flinkfinger.com', 'nathan@stitt.org']
s.homepage = 'https://github.com/guard/guard-jasmine'
s.summary = 'Guard gem for headless testing with Jasmine'
s.description = 'Guard::Jasmine automatically tests your Jasmine specs on PhantomJS'
s.license = 'MIT'
s.required_rubygems_version = '>= 1.3.6'
s.rubyforge_project = 'guard-jasmine'
s.add_dependency 'guard', '~> 2.8'
s.add_dependency('guard-compat', '~> 1.2')
s.add_dependency 'jasmine', '~> 2.1'
s.add_dependency 'multi_json', '~>1.10'
s.add_dependency 'childprocess', '~>0.5'
s.add_dependency 'thor', '~>0.19'
s.add_dependency 'tilt'
s.add_development_dependency 'bundler'
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md)
s.executables = ['guard-jasmine', 'guard-jasmine-debug']
s.require_path = 'lib'
end
Upgrade and lock new versions
# -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require 'guard/jasmine/version'
Gem::Specification.new do |s|
s.name = 'guard-jasmine'
s.version = Guard::JasmineVersion::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ['Michael Kessler', 'Nathan Stitt']
s.email = ['michi@flinkfinger.com', 'nathan@stitt.org']
s.homepage = 'https://github.com/guard/guard-jasmine'
s.summary = 'Guard gem for headless testing with Jasmine'
s.description = 'Guard::Jasmine automatically tests your Jasmine specs on PhantomJS'
s.license = 'MIT'
s.required_rubygems_version = '>= 1.3.6'
s.rubyforge_project = 'guard-jasmine'
s.add_dependency 'guard', '~> 2.8'
s.add_dependency 'guard-compat', '~> 1.2'
s.add_dependency 'jasmine', '~>2.2'
s.add_dependency 'multi_json', '~>1.1'
s.add_dependency 'childprocess', '~>0.5'
s.add_dependency 'thor', '~>0.19'
# Coverage uses tilt for spec files
s.add_dependency 'tilt', '~>2.0'
s.add_development_dependency 'bundler'
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md)
s.executables = ['guard-jasmine', 'guard-jasmine-debug']
s.require_path = 'lib'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{has_inherited}
s.version = "2.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = [%q{Mark Turner}, %q{Steve Burkett}]
s.date = %q{2011-10-11}
s.description = %q{The intention of this library is to make it easy to inherit particular variables between models in rails apps. We start with a parent model that will function as a pseudo-polymorphic association for children objects.}
s.email = %q{mark@amerine.net}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".travis.yml",
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"has_inherited.gemspec",
"lib/has_inherited.rb",
"spec/has_inherited_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/amerine/has_inherited}
s.require_paths = [%q{lib}]
s.rubygems_version = %q{1.8.6}
s.summary = %q{Easily share variables between Rails models with inheritance.}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_development_dependency(%q<bacon>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<bacon>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<bacon>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
Regenerate gemspec for version 2.2.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{has_inherited}
s.version = "2.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = [%q{Mark Turner}, %q{Steve Burkett}]
s.date = %q{2011-10-11}
s.description = %q{The intention of this library is to make it easy to inherit particular variables between models in rails apps. We start with a parent model that will function as a pseudo-polymorphic association for children objects.}
s.email = %q{mark@amerine.net}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".travis.yml",
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"has_inherited.gemspec",
"lib/has_inherited.rb",
"spec/has_inherited_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/amerine/has_inherited}
s.require_paths = [%q{lib}]
s.rubygems_version = %q{1.8.6}
s.summary = %q{Easily share variables between Rails models with inheritance.}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activerecord>, [">= 0"])
s.add_development_dependency(%q<bacon>, [">= 0"])
s.add_development_dependency(%q<sqlite3>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<bacon>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
s.add_dependency(%q<activerecord>, [">= 0"])
s.add_dependency(%q<bacon>, [">= 0"])
s.add_dependency(%q<sqlite3>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end
|
module Heelspec
class OctMovie < Heel::Bot
require "open-uri"
require "nokogiri"
OCT_MOVIE_DOMAIN_NAME = "http://www.octeshow.com/".freeze
OCT_MOVIE_SCHEDULE_PAGE_URL = "http://www.octeshow.com/index.php?a=index&m=News&id=74".freeze
def initialize
@name = "OCT Movie"
@version = "1.0.0"
@summary = "Schedule of OCT Huaxia Theater"
@author = "David Zhang"
@license = "MIT"
@helptext = ""
@triggers = ["!octmovie"]
end
# rubocop:disable Lint/UnusedMethodArgument
def run(cmd)
puts get_schedule_img
end
def serve(request)
imgs = get_schedule_img
img_data = []
imgs.each do |img|
img_data << {
:title => "",
:text => "",
:color => "#666666",
:images => [
{
:url => img
}
]
}
end
data = {
:text => "OCT Movie Schedule",
:attachments => img_data
}
end
private
def get_schedule_img
page = Nokogiri::HTML(open(OCT_MOVIE_SCHEDULE_PAGE_URL))
page.encoding = "UTF-8"
# get two pages url
url = []
page.css("ul.dropdown-menu-right")[0].css("li").each do |item|
url << ("" << OCT_MOVIE_DOMAIN_NAME << item.css("a")[0]["href"])
end
# get photos
imgs = []
url.each do |page_url|
page = Nokogiri::HTML(open(page_url))
page.encoding = "UTF-8"
div = page.css("div.table-responsive").first
imgs << ("" << OCT_MOVIE_DOMAIN_NAME << div.css("img")[0]["src"])
end
imgs
end
end
end
Bugfix oct movie
module Heelspec
class OctMovie < Heel::Bot
require "open-uri"
require "nokogiri"
OCT_MOVIE_DOMAIN_NAME = "http://www.octeshow.com/".freeze
OCT_MOVIE_SCHEDULE_PAGE_URL = "http://www.octeshow.com/index.php?a=index&m=News&id=74".freeze
def initialize
@name = "OCT Movie"
@version = "1.0.0"
@summary = "Schedule of OCT Huaxia Theater"
@author = "David Zhang"
@license = "MIT"
@helptext = ""
@triggers = ["!octmovie"]
end
# rubocop:disable Lint/UnusedMethodArgument
def run(cmd)
puts get_schedule_img
end
def serve(request)
imgs = get_schedule_img
img_data = []
imgs.each do |img|
img_data << {
:url => img
}
end
data = {
:text => "OCT Movie Schedule",
:attachments => {
:title => "",
:text => "",
:color => "#666666",
:images => img_data
}
}
end
private
def get_schedule_img
page = Nokogiri::HTML(open(OCT_MOVIE_SCHEDULE_PAGE_URL))
page.encoding = "UTF-8"
# get two pages url
url = []
page.css("ul.dropdown-menu-right")[0].css("li").each do |item|
url << ("" << OCT_MOVIE_DOMAIN_NAME << item.css("a")[0]["href"])
end
# get photos
imgs = []
url.each do |page_url|
page = Nokogiri::HTML(open(page_url))
page.encoding = "UTF-8"
div = page.css("div.table-responsive").first
imgs << ("" << OCT_MOVIE_DOMAIN_NAME << div.css("img")[0]["src"])
end
imgs
end
end
end |
#require 'flickraw'
require 'flickraw-cached'
require 'active_support'
require 'net/http'
require 'open-uri'
require 'digest/sha1'
api_key = ENV['FLICKR_API_KEY']
secret_key = ENV['FLICKR_SEC_KEY']
access_token = ENV['FLICKR_ACCESS_TOKEN']
access_secret = ENV['FLICKR_ACCESS_SECRET']
FlickRaw.api_key = api_key
FlickRaw.shared_secret = secret_key
login = {}
unless access_token.nil?
flickr.access_token = access_token
flickr.access_secret = access_secret
# From here you are logged:
login = flickr.test.login
puts "You are now authenticated as #{login.username}"
else
begin
token = flickr.get_request_token
auth_url = flickr.get_authorize_url(token['oauth_token'], :perms => 'delete')
puts "Open this url in your process to complete the authication process : #{auth_url}"
puts "Copy here the number given when you complete the process."
verify = gets.strip
flickr.get_access_token(token['oauth_token'], token['oauth_token_secret'], verify)
login = flickr.test.login
puts "You are now authenticated as #{login.username} with token #{flickr.access_token} and secret #{flickr.access_secret}"
rescue FlickRaw::FailedResponse => e
puts "Authentication failed : #{e.msg}"
end
end
per_page = 100
count = 0
all_photos = []
for page in 1..500 do
photos = flickr.people.getPhotos(:user_id => login.id,
:extras => 'tags,machine_tags,url_o',
:page => page,
:per_page => per_page)
break if photos.size == 0
puts("Downloaded data for #{photos.size} photos")
photos.each do |p|
#puts p.inspect
hashes = p.machine_tags.split.select { |tag| tag.start_with? 'hash:sha1' }
if hashes.empty?
all_photos << p
else
puts(hashes)
end
end
count += photos.size
end
puts("Total: #{count} To process: #{all_photos.count}")
all_photos.each do |p|
puts("Downloading #{p.url_o}")
open(p.url_o, 'rb') do |read_file|
hash = Digest::SHA1.hexdigest(read_file.read)
flickr.photos.addTags(:photo_id => p.id, :tags => "hash:sha1=#{hash}")
puts("Setting hash tag to #{p.id} as #{hash}")
end
end
Use 64 threads for downloading
#require 'flickraw'
require 'flickraw-cached'
require 'active_support'
require 'net/http'
require 'open-uri'
require 'digest/sha1'
require 'work_queue'
require 'thread'
api_key = ENV['FLICKR_API_KEY']
secret_key = ENV['FLICKR_SEC_KEY']
access_token = ENV['FLICKR_ACCESS_TOKEN']
access_secret = ENV['FLICKR_ACCESS_SECRET']
FlickRaw.api_key = api_key
FlickRaw.shared_secret = secret_key
login = {}
unless access_token.nil?
flickr.access_token = access_token
flickr.access_secret = access_secret
# From here you are logged:
login = flickr.test.login
puts "You are now authenticated as #{login.username}"
else
begin
token = flickr.get_request_token
auth_url = flickr.get_authorize_url(token['oauth_token'], :perms => 'delete')
puts "Open this url in your process to complete the authication process : #{auth_url}"
puts "Copy here the number given when you complete the process."
verify = gets.strip
flickr.get_access_token(token['oauth_token'], token['oauth_token_secret'], verify)
login = flickr.test.login
puts "You are now authenticated as #{login.username} with token #{flickr.access_token} and secret #{flickr.access_secret}"
rescue FlickRaw::FailedResponse => e
puts "Authentication failed : #{e.msg}"
end
end
per_page = 500
count = 0
queued = 0
semaphore = Mutex.new
queue = WorkQueue.new(64)
DownloadJob = Struct.new(:semaphore, :queue, :photo, :url) do
def download
open(url, 'rb') do |read_file|
hash = Digest::SHA1.hexdigest(read_file.read)
tags = "hash:sha1=#{hash}"
semaphore.synchronize do
flickr.photos.addTags(:photo_id => photo,
:tags => tags)
end
print("#{queue.cur_tasks} jobs remain on queue. Done #{url} as #{hash}\r")
end
end
end
for page in 1..500 do
puts("Downloading photo metadata from #{per_page * (page-1)} to #{page * per_page}")
photos = semaphore.synchronize do
flickr.people.getPhotos(:user_id => login.id,
:extras => 'tags,machine_tags,url_o',
:page => page,
:per_page => per_page)
end
break if photos.size == 0
puts("Downloaded data for #{photos.size} photos")
photos.each do |p|
hashes = p.machine_tags.split.select { |tag| tag.start_with? 'hash:sha1' }
if hashes.empty?
queued = queued + 1
photo = p.id
url = p.url_o
job = DownloadJob.new(semaphore, queue, photo, url)
queue.enqueue_b { job.download }
end
end
count += photos.size
end
puts("Total: #{count} to process: #{queued}")
queue.join
|
geneid 1.4.4: New formula
geneid is a program to predict genes in anonymous genomic sequences
designed with a hierarchical structure.
http://genome.crg.es/software/geneid/
require 'formula'
class Geneid < Formula
homepage 'http://genome.crg.es/software/geneid/'
url 'ftp://genome.crg.es/pub/software/geneid/geneid_v1.4.4.Jan_13_2011.tar.gz'
sha1 '9cbed32d0bfb530252f97b83807da2284967379b'
version '1.4.4'
def install
# ENV.deparallelize # if your formula fails when building in parallel
system 'make'
bin.install Dir['bin/*']
doc.install 'README', *Dir['docs/*']
(share/'geneid').install Dir['param/*.param']
end
def caveats; <<-EOS.undent
The parameter files are installed in
#{HOMEBREW_PREFIX/'share/geneid'}
EOS
end
test do
system 'geneid -h'
end
end
|
Gem::Specification.new do |s|
s.name = "remote_feature"
s.version = "0.1.2"
s.date = "2008-11-15"
s.summary = "Run Cucumber Features that are defined in Writeboards"
s.email = "mhennemeyer@gmail.com"
s.homepage = "http://github.com/mhennemeyer/remote_feature"
s.description = "Run Cucumber Features that are defined in Writeboards"
s.has_rdoc = false
s.authors = ["Matthias Hennemeyer"]
s.files = [
"README",
"remote_feature.gemspec",
"lib/remote_feature.rb"]
s.test_files = ["spec/remote_feature_spec.rb"]
s.add_dependency("rwriteboard", ["> 0.1.1"])
end
Fixed typo in dependency declaration.
Gem::Specification.new do |s|
s.name = "remote_feature"
s.version = "0.1.2"
s.date = "2008-11-15"
s.summary = "Run Cucumber Features that are defined in Writeboards"
s.email = "mhennemeyer@gmail.com"
s.homepage = "http://github.com/mhennemeyer/remote_feature"
s.description = "Run Cucumber Features that are defined in Writeboards"
s.has_rdoc = false
s.authors = ["Matthias Hennemeyer"]
s.files = [
"README",
"remote_feature.gemspec",
"lib/remote_feature.rb"]
s.test_files = ["spec/remote_feature_spec.rb"]
s.add_dependency("mhennemeyer-rwriteboard", ["> 0.1.1"])
end |
デフォルトコマンドに反応しないようにする
Ruboty.handlers.delete(Ruboty::Handlers::Help)
Ruboty.handlers.delete(Ruboty::Handlers::Ping)
Ruboty.handlers.delete(Ruboty::Handlers::Whoami)
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'exhibitionist/version'
Gem::Specification.new do |spec|
spec.name = "exhibitionist"
spec.version = Exhibitionist::VERSION
spec.authors = ["kromoser"]
spec.email = ["kevin@kevinromoser.com"]
spec.summary = %q{Current art exhibitions in NYC}
spec.description = %q{A list of some of the current show in NYC, organized by closing date, so you never miss that must-see show.}
spec.homepage = "https://github.com/kromoser/exhibitionist"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "bin"
#spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.executables = ["exhibitionist"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_dependency "nokogiri"
end
Removed push_host section
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'exhibitionist/version'
Gem::Specification.new do |spec|
spec.name = "exhibitionist"
spec.version = Exhibitionist::VERSION
spec.authors = ["kromoser"]
spec.email = ["kevin@kevinromoser.com"]
spec.summary = %q{Current art exhibitions in NYC}
spec.description = %q{A list of some of the current show in NYC, organized by closing date, so you never miss that must-see show.}
spec.homepage = "https://github.com/kromoser/exhibitionist"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "bin"
#spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.executables = ["exhibitionist"]
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "pry"
spec.add_dependency "nokogiri"
end
|
require 'mkmf'
def have_framework(fw, &b)
checking_for fw do
src = cpp_include("#{fw}/#{fw}.h") << "\n" "int main(void){return 0;}"
if try_link(src, opt = "-ObjC -framework #{fw}", &b)
$defs.push(format("-DHAVE_FRAMEWORK_%s", fw.tr_cpp))
$LDFLAGS << " " << opt
true
else
false
end
end
end unless respond_to? :have_framework
if ENV['CROSS_COMPILING']
dir_config("installed")
end
ok =
have_library('opengl32.lib', 'glVertex3d') ||
have_library('opengl32') ||
have_library('GL', 'glVertex3d') ||
(have_framework('OpenGL') &&
have_framework('Cocoa'))
ok &&=
have_header('GL/gl.h') ||
have_header('OpenGL/gl.h') # OS X
have_header 'GL/glx.h' # *NIX only?
have_header 'dlfcn.h' # OS X dynamic loader
have_header 'windows.h'
have_header 'stdint.h'
have_header 'inttypes.h'
have_func 'wglGetProcAddress', 'wingdi.h' # Windows extension loader
have_struct_member 'struct RFloat', 'float_value'
have_type 'int64_t', 'stdint.h'
have_type 'uint64_t', 'stdint.h'
if ok then
create_header
create_makefile 'opengl/opengl'
end
Place have_framework before have_library. This hopefully fixes #12.
require 'mkmf'
def have_framework(fw, &b)
checking_for fw do
src = cpp_include("#{fw}/#{fw}.h") << "\n" "int main(void){return 0;}"
if try_link(src, opt = "-ObjC -framework #{fw}", &b)
$defs.push(format("-DHAVE_FRAMEWORK_%s", fw.tr_cpp))
$LDFLAGS << " " << opt
true
else
false
end
end
end unless respond_to? :have_framework
if ENV['CROSS_COMPILING']
dir_config("installed")
end
ok =
(have_framework('OpenGL') && have_framework('Cocoa')) ||
have_library('opengl32.lib', 'glVertex3d') ||
have_library('opengl32') ||
have_library('GL', 'glVertex3d')
ok &&=
have_header('GL/gl.h') ||
have_header('OpenGL/gl.h') # OS X
have_header 'GL/glx.h' # *NIX only?
have_header 'dlfcn.h' # OS X dynamic loader
have_header 'windows.h'
have_header 'stdint.h'
have_header 'inttypes.h'
have_func 'wglGetProcAddress', 'wingdi.h' # Windows extension loader
have_struct_member 'struct RFloat', 'float_value'
have_type 'int64_t', 'stdint.h'
have_type 'uint64_t', 'stdint.h'
if ok then
create_header
create_makefile 'opengl/opengl'
end
|
require "formula"
class YatSh < Formula
homepage "https://github.com/farfanoide/yat.sh"
head "https://github.com/farfanoide/yat.sh/archive/master.zip"
depends_on "tmux" => :build
def install
prefix.install Dir['*']
end
test do
system "yat.sh", "version"
end
end
brew formula uses git
require "formula"
class YatSh < Formula
homepage "https://github.com/farfanoide/yat.sh"
head "https://github.com/farfanoide/yat.sh.git"
depends_on "tmux" => :build
def install
prefix.install Dir['*']
end
test do
system "yat.sh", "version"
end
end
|
module DatedModel
include ActiveSupport::Concern
def self.included(model)
model.validate :stop_date_later_than_start_date
end
def stop_date_later_than_start_date
if self.has_attribute? :stop_date
errors.add(:stop_date, 'Must be later than start date') unless
self.start_date < self.stop_date
elsif self.has_attribute? :end_at
errors.add(:end_at, 'Must be later than start date') unless
self.start_at < self.end_at
end
end
end
Only check start date / end date if both have values [story:48144477]
module DatedModel
include ActiveSupport::Concern
def self.included(model)
model.validate :stop_date_later_than_start_date
end
def stop_date_later_than_start_date
if self.has_attribute? :stop_date
if self.start_date.present? && self.stop_date.present?
errors.add(:stop_date, 'Must be later than start date') unless
self.start_date < self.stop_date
end
elsif self.has_attribute? :end_at
if self.start_at.present? && self.end_at.present?
errors.add(:end_at, 'Must be later than start date') unless
self.start_at < self.end_at
end
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "fastq-factory"
s.version = "0.1.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Anthony Underwood"]
s.date = "2012-08-31"
s.description = "This tool can process fastq files, using fastq_quality_trimmer and quake to correct fastq files and then provide a quality asssessment of the data"
s.email = "anthony.underwood@hpa.org.uk"
s.executables = ["fastq-factory"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/fastq-factory",
"fastq-factory.gemspec",
"lib/fastq-factory.rb",
"lib/fastq-remove-orphans.pl",
"lib/fastq_assessment.rb",
"lib/generate_quality_metrics.rb",
"lib/maths.rb",
"lib/miseq_run_stats.rb",
"lib/trim_and_correct.rb",
"test/helper.rb",
"test/test_fastq-factory.rb"
]
s.homepage = "http://github.com/hpa-bioinformatics/fastq-factory"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.19"
s.summary = "A tool to process and QC fastq files from illumina machines"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<trollop>, ["~> 2.0"])
s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.7
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "fastq-factory"
s.version = "0.1.7"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Anthony Underwood"]
s.date = "2012-09-07"
s.description = "This tool can process fastq files, using fastq_quality_trimmer and quake to correct fastq files and then provide a quality asssessment of the data"
s.email = "anthony.underwood@hpa.org.uk"
s.executables = ["fastq-factory"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/fastq-factory",
"fastq-factory.gemspec",
"lib/fastq-factory.rb",
"lib/fastq-remove-orphans.pl",
"lib/fastq_assessment.rb",
"lib/generate_quality_metrics.rb",
"lib/maths.rb",
"lib/miseq_run_stats.rb",
"lib/trim_and_correct.rb",
"test/helper.rb",
"test/test_fastq-factory.rb"
]
s.homepage = "http://github.com/hpa-bioinformatics/fastq-factory"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.19"
s.summary = "A tool to process and QC fastq files from illumina machines"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<trollop>, ["~> 2.0"])
s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = 'acme-pki'
spec.version = '0.1.3'
spec.authors = ['Aeris']
spec.email = ['aeris@imirhil.fr']
spec.summary = %q{Ruby client for Let’s Encrypt}
spec.description = %q{Manage your keys, requests and certificates.}
spec.homepage = 'https://github.com/aeris/acme-pki/'
spec.license = 'AGPL-3.0+'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename f }
spec.test_files = spec.files.grep %r{^(test|spec|features)/}
spec.require_paths = %w(lib)
spec.add_development_dependency 'bundler', '~> 1.11'
spec.add_dependency 'acme-client', '~> 0.3.1'
spec.add_dependency 'faraday_middleware', '~> 0.10.0'
spec.add_dependency 'colorize', '~> 0.7.7'
spec.add_dependency 'simpleidn', '~> 0.0.7'
end
Do not hardcode version number.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'acme/pki/version'
Gem::Specification.new do |spec|
spec.name = 'acme-pki'
spec.version = Acme::PKI::VERSION
spec.authors = ['Aeris']
spec.email = ['aeris@imirhil.fr']
spec.summary = %q{Ruby client for Let’s Encrypt}
spec.description = %q{Manage your keys, requests and certificates.}
spec.homepage = 'https://github.com/aeris/acme-pki/'
spec.license = 'AGPL-3.0+'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename f }
spec.test_files = spec.files.grep %r{^(test|spec|features)/}
spec.require_paths = %w(lib)
spec.add_development_dependency 'bundler', '~> 1.11'
spec.add_dependency 'acme-client', '~> 0.3.1'
spec.add_dependency 'faraday_middleware', '~> 0.10.0'
spec.add_dependency 'colorize', '~> 0.7.7'
spec.add_dependency 'simpleidn', '~> 0.0.7'
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'github_changelog_generator/version'
Gem::Specification.new do |spec|
spec.name = "github_changelog_generator"
spec.version = GitHubChangelogGenerator::VERSION
spec.default_executable = "github_changelog_generator"
spec.required_ruby_version = '>= 1.9.3'
spec.authors = ["Petr Korolev"]
spec.email = %q{sky4winder+github_changelog_generator@gmail.com}
spec.date = `date +"%Y-%m-%d"`.strip!
spec.summary = %q{Script, that automatically generate change-log from your tags and pull-requests.}
spec.description = %q{Script, that automatically generate change-log from your tags and pull-requests}
spec.homepage = %q{https://github.com/skywinder/Github-Changelog-Generator}
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency(%q<httparty>, ["~> 0.13"])
spec.add_runtime_dependency(%q<github_api>, ["~> 0.12"])
spec.add_runtime_dependency(%q<colorize>, ["~> 0.7"])
end
remove dependency
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'github_changelog_generator/version'
Gem::Specification.new do |spec|
spec.name = "github_changelog_generator"
spec.version = GitHubChangelogGenerator::VERSION
spec.default_executable = "github_changelog_generator"
spec.required_ruby_version = '>= 1.9.3'
spec.authors = ["Petr Korolev"]
spec.email = %q{sky4winder+github_changelog_generator@gmail.com}
spec.date = `date +"%Y-%m-%d"`.strip!
spec.summary = %q{Script, that automatically generate change-log from your tags and pull-requests.}
spec.description = %q{Script, that automatically generate change-log from your tags and pull-requests}
spec.homepage = %q{https://github.com/skywinder/Github-Changelog-Generator}
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_runtime_dependency(%q<github_api>, ["~> 0.12"])
spec.add_runtime_dependency(%q<colorize>, ["~> 0.7"])
end
|
#!/usr/bin/env ruby
hq_project_name =
"hq-grapher-icinga-perfdata"
hq_project_ver =
"0.0.0"
hq_project_full =
"HQ grapher icinga perffdata"
hq_project_desc =
"HQ tool to send icinga perfdata to rrd"
hq_project_dir =
File.expand_path "..", __FILE__
$LOAD_PATH.unshift "#{hq_project_dir}/ruby" \
unless $LOAD_PATH.include? "#{hq_project_dir}/ruby"
Gem::Specification.new do
|spec|
spec.name = hq_project_name
spec.version = hq_project_ver
spec.platform = Gem::Platform::RUBY
spec.authors = [ "James Pharaoh" ]
spec.email = [ "james@phsys.co.uk" ]
spec.homepage = "https://github.com/jamespharaoh/#{hq_project_name}"
spec.summary = hq_project_full
spec.description = hq_project_desc
spec.required_rubygems_version = ">= 1.3.6"
spec.rubyforge_project = hq_project_name
spec.add_dependency "hq-tools", ">= 0.3.0"
spec.add_dependency "libxml-ruby", ">= 2.6.0"
spec.add_dependency "rrd-ffi", ">= 0.2.14"
spec.add_development_dependency "capybara", ">= 2.0.2"
spec.add_development_dependency "cucumber", ">= 1.2.1"
spec.add_development_dependency "rake", ">= 10.0.3"
spec.add_development_dependency "rspec", ">= 2.12.0"
spec.add_development_dependency "rspec_junit_formatter"
spec.add_development_dependency "simplecov"
spec.files = Dir[
"lib/**/*.rb",
]
spec.test_files = Dir[
"features/**/*.feature",
"features/**/*.rb",
"spec/**/*-spec.rb",
]
spec.executables =
Dir.new("bin").entries - [ ".", ".." ]
spec.require_paths = [ "lib" ]
end
bump version number to 0.0.1
#!/usr/bin/env ruby
hq_project_name =
"hq-grapher-icinga-perfdata"
hq_project_ver =
"0.0.1"
hq_project_full =
"HQ grapher icinga perffdata"
hq_project_desc =
"HQ tool to send icinga perfdata to rrd"
hq_project_dir =
File.expand_path "..", __FILE__
$LOAD_PATH.unshift "#{hq_project_dir}/ruby" \
unless $LOAD_PATH.include? "#{hq_project_dir}/ruby"
Gem::Specification.new do
|spec|
spec.name = hq_project_name
spec.version = hq_project_ver
spec.platform = Gem::Platform::RUBY
spec.authors = [ "James Pharaoh" ]
spec.email = [ "james@phsys.co.uk" ]
spec.homepage = "https://github.com/jamespharaoh/#{hq_project_name}"
spec.summary = hq_project_full
spec.description = hq_project_desc
spec.required_rubygems_version = ">= 1.3.6"
spec.rubyforge_project = hq_project_name
spec.add_dependency "hq-tools", ">= 0.3.0"
spec.add_dependency "libxml-ruby", ">= 2.6.0"
spec.add_dependency "rrd-ffi", ">= 0.2.14"
spec.add_development_dependency "capybara", ">= 2.0.2"
spec.add_development_dependency "cucumber", ">= 1.2.1"
spec.add_development_dependency "rake", ">= 10.0.3"
spec.add_development_dependency "rspec", ">= 2.12.0"
spec.add_development_dependency "rspec_junit_formatter"
spec.add_development_dependency "simplecov"
spec.files = Dir[
"lib/**/*.rb",
]
spec.test_files = Dir[
"features/**/*.feature",
"features/**/*.rb",
"spec/**/*-spec.rb",
]
spec.executables =
Dir.new("bin").entries - [ ".", ".." ]
spec.require_paths = [ "lib" ]
end
|
def call_api(method, path, options={})
domain = options.delete(:domain) || dm_api_domain
auth_token = options.delete(:auth_token) || dm_api_access_token
url = "#{domain}#{path}"
payload = options.delete(:payload)
options.merge!({
:content_type => :json,
:accept => :json,
:authorization => "Bearer #{auth_token}"
})
if payload.nil?
RestClient.send(method, url, options) {|response, request, result| response}
else
# can't send a payload as part of a DELETE request
# http://stackoverflow.com/questions/21104232/delete-method-with-a-payload-using-ruby-restclient
if method == :delete
RestClient::Request.execute(method: :delete, url: url, payload: payload.to_json, headers: options) {|response, request, result| response}
else
RestClient.send(method, url, payload.to_json, options) {|response, request, result| response}
end
end
end
def _error(response, message)
"#{message}\n#{response.code} - #{response.body}"
end
def update_framework_status(framework_slug, status)
response = call_api(:get, "/frameworks/#{framework_slug}")
framework = JSON.parse(response.body)["frameworks"]
if framework['status'] != status
response = call_api(:post, "/frameworks/#{framework_slug}", payload: {
"frameworks" => {"status" => status, "clarificationQuestionsOpen" => status == 'open'},
"updated_by" => "functional tests",
})
response.code.should be(200), _error(response, "Failed to update framework status #{framework_slug} #{status}")
end
return framework['status']
end
def ensure_no_framework_agreements_exist(framework_slug)
response = call_api(:get, "/frameworks/#{framework_slug}/suppliers")
response.code.should be(200), _error(response, "Failed to get framework #{framework_slug}")
supplier_frameworks = JSON.parse(response.body)["supplierFrameworks"]
supplier_frameworks.each do |supplier_framework|
update_framework_agreement_status(framework_slug, supplier_framework["supplierId"], false)
end
end
def update_framework_agreement_status(framework_slug, supplier_id, status)
response = call_api(:post, "/suppliers/#{supplier_id}/frameworks/#{framework_slug}", payload: {
"frameworkInterest" => {"agreementReturned" => status},
"update_details" => {"updated_by" => "functional tests"},
})
response.code.should be(200), _error(response, "Failed to update agreement status #{supplier_id} #{framework_slug}")
end
def register_interest_in_framework(framework_slug, supplier_id)
path = "/suppliers/#{supplier_id}/frameworks/#{framework_slug}"
response = call_api(:get, path)
if response.code == 404
response = call_api(:put, path, payload: {
"update_details" => {"updated_by" => "functional tests"}
})
response.code.should be(200), _error(response, "Failed to register interest in framework #{framework_slug} #{supplier_id}")
end
end
def submit_supplier_declaration(framework_slug, supplier_id, declaration)
path = "/suppliers/#{supplier_id}/frameworks/#{framework_slug}/declaration"
response = call_api(:put, path, payload: {
"declaration" => declaration,
"updated_by" => "functional tests",
})
[200, 201].should include(response.code), _error(response, "Failed to submit supplier declaration #{framework_slug} #{supplier_id}")
end
Remove update_details and fix status checks in API helpers steps
API now expects `updated_by` top-level key for write requests.
Adds 201 as a valid status when registering supplier framework
interest.
def call_api(method, path, options={})
domain = options.delete(:domain) || dm_api_domain
auth_token = options.delete(:auth_token) || dm_api_access_token
url = "#{domain}#{path}"
payload = options.delete(:payload)
options.merge!({
:content_type => :json,
:accept => :json,
:authorization => "Bearer #{auth_token}"
})
if payload.nil?
RestClient.send(method, url, options) {|response, request, result| response}
else
# can't send a payload as part of a DELETE request
# http://stackoverflow.com/questions/21104232/delete-method-with-a-payload-using-ruby-restclient
if method == :delete
RestClient::Request.execute(method: :delete, url: url, payload: payload.to_json, headers: options) {|response, request, result| response}
else
RestClient.send(method, url, payload.to_json, options) {|response, request, result| response}
end
end
end
def _error(response, message)
"#{message}\n#{response.code} - #{response.body}"
end
def update_framework_status(framework_slug, status)
response = call_api(:get, "/frameworks/#{framework_slug}")
framework = JSON.parse(response.body)["frameworks"]
if framework['status'] != status
response = call_api(:post, "/frameworks/#{framework_slug}", payload: {
"frameworks" => {"status" => status, "clarificationQuestionsOpen" => status == 'open'},
"updated_by" => "functional tests",
})
response.code.should be(200), _error(response, "Failed to update framework status #{framework_slug} #{status}")
end
return framework['status']
end
def ensure_no_framework_agreements_exist(framework_slug)
response = call_api(:get, "/frameworks/#{framework_slug}/suppliers")
response.code.should be(200), _error(response, "Failed to get framework #{framework_slug}")
supplier_frameworks = JSON.parse(response.body)["supplierFrameworks"]
supplier_frameworks.each do |supplier_framework|
update_framework_agreement_status(framework_slug, supplier_framework["supplierId"], false)
end
end
def update_framework_agreement_status(framework_slug, supplier_id, status)
response = call_api(:post, "/suppliers/#{supplier_id}/frameworks/#{framework_slug}", payload: {
"frameworkInterest" => {"agreementReturned" => status},
"updated_by" => "functional tests",
})
response.code.should be(200), _error(response, "Failed to update agreement status #{supplier_id} #{framework_slug}")
end
def register_interest_in_framework(framework_slug, supplier_id)
path = "/suppliers/#{supplier_id}/frameworks/#{framework_slug}"
response = call_api(:get, path)
if response.code == 404
response = call_api(:put, path, payload: {
"updated_by" => "functional tests"
})
response.code.should match(/20[01]/), _error(response, "Failed to register interest in framework #{framework_slug} #{supplier_id}")
end
end
def submit_supplier_declaration(framework_slug, supplier_id, declaration)
path = "/suppliers/#{supplier_id}/frameworks/#{framework_slug}/declaration"
response = call_api(:put, path, payload: {
"declaration" => declaration,
"updated_by" => "functional tests",
})
[200, 201].should include(response.code), _error(response, "Failed to submit supplier declaration #{framework_slug} #{supplier_id}")
end
|
Gem::Specification.new do |s|
s.name = "github"
s.version = "0.3.4"
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Chris Wanstrath, Kevin Ballard, Scott Chacon"]
s.date = %q{2008-05-18}
s.default_executable = %q{gh}
s.description = %q{The official `github` command line helper for simplifying your GitHub experience.}
s.email = %q{chris@ozmm.org}
s.executables = ["github", "gh"]
s.extra_rdoc_files = ["bin/github", "bin/gh", "lib/github/extensions.rb", "lib/github/command.rb", "lib/github/helper.rb", "lib/github.rb", "LICENSE", "README"]
s.files = ["bin/github", "lib/commands/network.rb", "lib/commands/commands.rb", "lib/commands/helpers.rb", "lib/github/extensions.rb", "lib/github/command.rb", "lib/github/helper.rb", "lib/github.rb", "LICENSE", "Manifest", "README", "spec/command_spec.rb", "spec/extensions_spec.rb", "spec/github_spec.rb", "spec/helper_spec.rb", "spec/spec_helper.rb", "spec/ui_spec.rb", "spec/windoze_spec.rb", "github-gem.gemspec"]
s.has_rdoc = true
s.homepage = %q{http://github.com/}
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Github", "--main", "README"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{github}
s.rubygems_version = %q{1.1.1}
s.summary = %q{The official `github` command line helper for simplifying your GitHub experience.}
# s.add_dependency(%q<launchy>, [">= 0"])
s.add_dependency('json_pure', [">= 0"])
end
Add Text::Format dependency to gemspec.
Never worked with creating gems before; will test post-commit.
Gem::Specification.new do |s|
s.name = "github"
s.version = "0.3.4"
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Chris Wanstrath, Kevin Ballard, Scott Chacon"]
s.date = %q{2008-05-18}
s.default_executable = %q{gh}
s.description = %q{The official `github` command line helper for simplifying your GitHub experience.}
s.email = %q{chris@ozmm.org}
s.executables = ["github", "gh"]
s.extra_rdoc_files = ["bin/github", "bin/gh", "lib/github/extensions.rb", "lib/github/command.rb", "lib/github/helper.rb", "lib/github.rb", "LICENSE", "README"]
s.files = ["bin/github", "lib/commands/network.rb", "lib/commands/commands.rb", "lib/commands/helpers.rb", "lib/github/extensions.rb", "lib/github/command.rb", "lib/github/helper.rb", "lib/github.rb", "LICENSE", "Manifest", "README", "spec/command_spec.rb", "spec/extensions_spec.rb", "spec/github_spec.rb", "spec/helper_spec.rb", "spec/spec_helper.rb", "spec/ui_spec.rb", "spec/windoze_spec.rb", "github-gem.gemspec"]
s.has_rdoc = true
s.homepage = %q{http://github.com/}
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Github", "--main", "README"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{github}
s.rubygems_version = %q{1.1.1}
s.summary = %q{The official `github` command line helper for simplifying your GitHub experience.}
# s.add_dependency(%q<launchy>, [">= 0"])
s.add_dependency('json_pure', [">= 0"])
s.add_dependency('text-format', [">= 0"])
end
|
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'github_lda/version'
Gem::Specification.new do |gem|
gem.name = "github_lda"
gem.version = GithubLda::VERSION
gem.authors = ["Naoki Orii"]
gem.email = ["mrorii@gmail.com"]
gem.description = %q{Collaborative Topic Modeling for Github Repos}
gem.summary = %q{Collaborative Topic Modeling for Github Repos}
gem.homepage = "https://github.com/mrorii/github_lda"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
end
Add runtime dependencies
# -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'github_lda/version'
Gem::Specification.new do |gem|
gem.name = "github_lda"
gem.version = GithubLda::VERSION
gem.authors = ["Naoki Orii"]
gem.email = ["mrorii@gmail.com"]
gem.description = %q{Collaborative Topic Modeling for Github Repos}
gem.summary = %q{Collaborative Topic Modeling for Github Repos}
gem.homepage = "https://github.com/mrorii/github_lda"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
gem.add_runtime_dependency "grit", ">= 2.5.0"
gem.add_runtime_dependency "github-linguist", ">= 2.3.4"
gem.add_runtime_dependency "pygments.rb", ">= 0.3.2"
end
|
class SonarCompletion < Formula
homepage "https://github.com/a1dutch/sonarqube-bash-completion"
url "https://raw.githubusercontent.com/a1dutch/sonarqube-bash-completion/1.0/etc/bash_completion.d/sonar"
sha256 "73958c72fb1643e69a134089df2bdc8e6964ba4f9a1c21bcbfad49826e5b0f2d"
def install
bash_completion.install "sonar"
end
end
sonar-completion: head and test added
Closes #91.
Signed-off-by: Baptiste Fontaine <bfee279af59f3e3f71f7ce1fa037ea7b90f93cbf@yahoo.fr>
class SonarCompletion < Formula
homepage "https://github.com/a1dutch/sonarqube-bash-completion"
url "https://github.com/a1dutch/sonarqube-bash-completion/archive/1.0.tar.gz"
sha256 "501bb1c87fab9dd934cdc506f12e74ea21d48be72a9e4321c88187e4a0e0a99a"
head "https://github.com/a1dutch/sonarqube-bash-completion.git"
def install
bash_completion.install "etc/bash_completion.d/sonar"
end
test do
assert_match "-F _sonar",
shell_output("source #{bash_completion}/sonar && complete -p sonar")
end
end
|
#
# Cookbook Name:: base
# Recipe:: default
#
# Copyright 2013, Jacques Marneweck
#
# All rights reserved - Do Not Redistribute
#
directory "/opt/custom" do
action :create
end
directory "/opt/custom/bin" do
action :create
end
#include_recipe "base::arcstat"
include_recipe "base::imgadm"
include_recipe "base::mbuffer"
include_recipe "base::pkgsrc"
include_recipe "base::pkgsrc-packages"
include_recipe "base::writekey"
Ensure that the /opt/custom/smf directory exists
Signed-off-by: Jacques Marneweck <381be0093c78dc23f52ac7a2357c8db4d86635b1@powertrip.co.za>
#
# Cookbook Name:: base
# Recipe:: default
#
# Copyright 2013, Jacques Marneweck
#
# All rights reserved - Do Not Redistribute
#
directory "/opt/custom" do
action :create
end
directory "/opt/custom/bin" do
action :create
end
directory "/opt/custom/smf" do
action :create
end
#include_recipe "base::arcstat"
include_recipe "base::imgadm"
include_recipe "base::mbuffer"
include_recipe "base::pkgsrc"
include_recipe "base::pkgsrc-packages"
include_recipe "base::writekey"
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "hive-messages"
spec.version = '1.0.1'
spec.authors = ["David Buckhurst", "Paul Carey"]
spec.email = ["david.buckhurst@bbc.co.uk"]
spec.summary = %q{Hive communication library.}
spec.description = %q{Hive Messages, communications between hive components.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "virtus", "~> 1.0"
spec.add_dependency "roar", "~> 1.0"
spec.add_dependency "activemodel", ">= 4.0", "< 4.3"
spec.add_dependency "activesupport", ">= 4.0", "< 4.3"
spec.add_dependency "multipart-post", "~> 2.0"
spec.add_dependency "mimemagic", "~> 0.3"
spec.add_dependency "multi_json", "~> 1.11"
spec.add_development_dependency "rspec", "~> 2.14"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake", "~> 10.4"
spec.add_development_dependency "shoulda-matchers", "~> 2.8"
spec.add_development_dependency "webmock", "~> 1.21"
end
Increment version
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "hive-messages"
spec.version = '1.0.2'
spec.authors = ["David Buckhurst", "Paul Carey"]
spec.email = ["david.buckhurst@bbc.co.uk"]
spec.summary = %q{Hive communication library.}
spec.description = %q{Hive Messages, communications between hive components.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "virtus", "~> 1.0"
spec.add_dependency "roar", "~> 1.0"
spec.add_dependency "activemodel", ">= 4.0", "< 4.3"
spec.add_dependency "activesupport", ">= 4.0", "< 4.3"
spec.add_dependency "multipart-post", "~> 2.0"
spec.add_dependency "mimemagic", "~> 0.3"
spec.add_dependency "multi_json", "~> 1.11"
spec.add_development_dependency "rspec", "~> 2.14"
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake", "~> 10.4"
spec.add_development_dependency "shoulda-matchers", "~> 2.8"
spec.add_development_dependency "webmock", "~> 1.21"
end
|
# This script pulls translation files from Transifex and ensures they are in the format we need.
# You need the Transifex client installed.
# http://docs.transifex.com/developer/client/setup
#
# Don't use this script to create pull requests. Do translations in Transifex. The Discourse
# team will pull them in.
require 'open3'
if `which tx`.strip.empty?
puts '', 'The Transifex client needs to be installed to use this script.'
puts 'Instructions are here: http://docs.transifex.com/developer/client/setup'
puts '', 'On Mac:', ''
puts ' curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py'
puts ' sudo python get-pip.py'
puts ' sudo pip install transifex-client', ''
exit 1
end
locales = Dir.glob(File.expand_path('../../config/locales/client.*.yml', __FILE__)).map {|x| x.split('.')[-2]}.select {|x| x != 'en'}.sort.join(',')
puts 'Pulling new translations...', ''
command = "tx pull --mode=developer --language=#{locales} #{ARGV.include?('force') ? '-f' : ''}"
Open3.popen2e(command) do |stdin, stdout_err, wait_thr|
while (line = stdout_err.gets)
puts line
end
end
puts ''
unless $?.success?
puts 'Something failed. Check the output above.', ''
exit $?.exitstatus
end
YML_FILE_COMMENTS = <<END
# encoding: utf-8
#
# Never edit this file. It will be overwritten when translations are pulled from Transifex.
#
# To work with us on translations, join this project:
# https://www.transifex.com/projects/p/discourse-org/
END
YML_DIRS = ['config/locales',
'plugins/poll/config/locales',
'vendor/gems/discourse_imgur/lib/discourse_imgur/locale']
# Add comments to the top of files
['client', 'server'].each do |base|
YML_DIRS.each do |dir|
Dir.glob(File.expand_path("../../#{dir}/#{base}.*.yml", __FILE__)).each do |file_name|
contents = File.readlines(file_name)
File.open(file_name, 'w+') do |f|
f.puts(YML_FILE_COMMENTS, '') unless contents[0][0] == '#'
f.puts contents
end
end
end
end
Fixes the language codes in the pull_translations script
# This script pulls translation files from Transifex and ensures they are in the format we need.
# You need the Transifex client installed.
# http://docs.transifex.com/developer/client/setup
#
# Don't use this script to create pull requests. Do translations in Transifex. The Discourse
# team will pull them in.
require 'open3'
if `which tx`.strip.empty?
puts '', 'The Transifex client needs to be installed to use this script.'
puts 'Instructions are here: http://docs.transifex.com/developer/client/setup'
puts '', 'On Mac:', ''
puts ' curl -O https://raw.github.com/pypa/pip/master/contrib/get-pip.py'
puts ' sudo python get-pip.py'
puts ' sudo pip install transifex-client', ''
exit 1
end
locales = Dir.glob(File.expand_path('../../config/locales/client.*.yml', __FILE__)).map {|x| x.split('.')[-2]}.select {|x| x != 'en'}.sort.join(',')
puts 'Pulling new translations...', ''
command = "tx pull --mode=developer --language=#{locales} #{ARGV.include?('force') ? '-f' : ''}"
Open3.popen2e(command) do |stdin, stdout_err, wait_thr|
while (line = stdout_err.gets)
puts line
end
end
puts ''
unless $?.success?
puts 'Something failed. Check the output above.', ''
exit $?.exitstatus
end
YML_FILE_COMMENTS = <<END
# encoding: utf-8
#
# Never edit this file. It will be overwritten when translations are pulled from Transifex.
#
# To work with us on translations, join this project:
# https://www.transifex.com/projects/p/discourse-org/
END
YML_DIRS = ['config/locales',
'plugins/poll/config/locales',
'vendor/gems/discourse_imgur/lib/discourse_imgur/locale']
# Add comments to the top of files
['client', 'server'].each do |base|
YML_DIRS.each do |dir|
Dir.glob(File.expand_path("../../#{dir}/#{base}.*.yml", __FILE__)).each do |file_name|
language = File.basename(file_name).match(Regexp.new("#{base}\\.([^\\.]*)\\.yml"))[1]
lines = File.readlines(file_name)
lines.collect! {|line| line =~ /^[a-z_]+:$/i ? "#{language}:" : line}
File.open(file_name, 'w+') do |f|
f.puts(YML_FILE_COMMENTS, '') unless lines[0][0] == '#'
f.puts(lines)
end
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'baw-audio-tools/version'
require 'rbconfig'
Gem::Specification.new do |spec|
spec.name = 'baw-audio-tools'
spec.version = BawAudioTools::VERSION
spec.authors = ['Mark Cottman-Fields']
spec.email = ['qut.bioacoustics.research+mark@gmail.com']
spec.summary = %q{Bioacoustics Workbench audio tools}
spec.description = %q{Contains the audio, spectrogram, and caching tools for the Bioacoustics Workbench project.}
spec.homepage = 'https://github.com/QutBioacoustics/baw-audio-tools'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
# dev dependencies
spec.add_development_dependency 'bundler', '~> 1.7'
spec.add_development_dependency 'rake', '~> 10.4'
spec.add_development_dependency 'guard', '~> 2.12'
spec.add_development_dependency 'guard-rspec', '~> 4.5'
spec.add_development_dependency 'guard-yard', '~> 2.1'
spec.add_development_dependency 'simplecov', '~> 0.9'
spec.add_development_dependency 'coveralls', '~> 0.8'
spec.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
spec.add_development_dependency 'zonebie', '~> 0.5'
spec.add_development_dependency 'i18n', '~> 0.7'
spec.add_development_dependency 'tzinfo', '~> 1.2'
# runtime dependencies
spec.add_runtime_dependency 'activesupport', '>= 3.2'
end
updated gems
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'baw-audio-tools/version'
require 'rbconfig'
Gem::Specification.new do |spec|
spec.name = 'baw-audio-tools'
spec.version = BawAudioTools::VERSION
spec.authors = ['Mark Cottman-Fields']
spec.email = ['qut.bioacoustics.research+mark@gmail.com']
spec.summary = %q{Bioacoustics Workbench audio tools}
spec.description = %q{Contains the audio, spectrogram, and caching tools for the Bioacoustics Workbench project.}
spec.homepage = 'https://github.com/QutBioacoustics/baw-audio-tools'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
# dev dependencies
spec.add_development_dependency 'bundler', '~> 1.7'
spec.add_development_dependency 'rake', '~> 10.4'
spec.add_development_dependency 'guard', '~> 2.12'
spec.add_development_dependency 'guard-rspec', '~> 4.5'
spec.add_development_dependency 'guard-yard', '~> 2.1'
spec.add_development_dependency 'simplecov', '~> 0.10'
spec.add_development_dependency 'coveralls', '~> 0.8'
spec.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
spec.add_development_dependency 'zonebie', '~> 0.5'
spec.add_development_dependency 'i18n', '~> 0.7'
spec.add_development_dependency 'tzinfo', '~> 1.2'
# runtime dependencies
spec.add_runtime_dependency 'activesupport', '>= 3.2'
end
|
require "formula"
class Helios < Formula
homepage "https://github.com/spotify/helios"
url "https://oss.sonatype.org/service/local/repositories/releases/content/com/spotify/helios-tools/0.9.65/helios-tools-0.9.65-shaded.jar"
sha256 "5f3604daba282f64e4027074bc45fbd172500555568cb5ec1e164167a0787a08"
version "0.9.65"
depends_on :java => "1.7+"
def install
libexec.install "helios-tools-0.9.65-shaded.jar"
bin.write_jar_script libexec/"helios-tools-0.9.65-shaded.jar", "helios", "-XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xverify:none"
end
test do
system "#{bin}/helios", "--version"
end
end
Update helios to 0.9.66
require "formula"
class Helios < Formula
homepage "https://github.com/spotify/helios"
url "https://oss.sonatype.org/service/local/repositories/releases/content/com/spotify/helios-tools/0.9.66/helios-tools-0.9.66-shaded.jar"
sha256 "611213056d417e17a0bf26deb02020de52941f8bdd2121d264da9630f580fa59"
version "0.9.66"
depends_on :java => "1.7+"
def install
libexec.install "helios-tools-0.9.66-shaded.jar"
bin.write_jar_script libexec/"helios-tools-0.9.66-shaded.jar", "helios", "-XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xverify:none"
end
test do
system "#{bin}/helios", "--version"
end
end
|
require "formula"
class Helios < Formula
homepage "https://github.com/spotify/helios"
url "https://oss.sonatype.org/service/local/repositories/releases/content/com/spotify/helios-tools/0.9.70/helios-tools-0.9.70-shaded.jar"
sha256 "4cde84b3d20624a7b99b917e25670f7ce6eee00ef14fb5a08d1bafdba791e2f7"
version "0.9.70"
depends_on :java => "1.7+"
def install
libexec.install "helios-tools-0.9.70-shaded.jar"
bin.write_jar_script libexec/"helios-tools-0.9.70-shaded.jar", "helios", "-XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xverify:none"
end
test do
system "#{bin}/helios", "--version"
end
end
Update helios to 0.9.71
require "formula"
class Helios < Formula
homepage "https://github.com/spotify/helios"
url "https://oss.sonatype.org/service/local/repositories/releases/content/com/spotify/helios-tools/0.9.71/helios-tools-0.9.71-shaded.jar"
sha256 "f908577611ba9c829c32452395862c3a655f12b6fc17ef577f5828e26d59be6d"
version "0.9.71"
depends_on :java => "1.7+"
def install
libexec.install "helios-tools-0.9.71-shaded.jar"
bin.write_jar_script libexec/"helios-tools-0.9.71-shaded.jar", "helios", "-XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xverify:none"
end
test do
system "#{bin}/helios", "--version"
end
end
|
class ArticlesController < ContentController
before_action :login_required, only: [:preview, :preview_page]
before_action :auto_discovery_feed, only: [:show, :index]
before_action :verify_config
layout :theme_layout, except: [:trackback]
helper :'admin/base'
def index
conditions = this_blog.statuses_in_timeline ? ['type in (?, ?)', 'Article', 'Note'] : ['type = ?', 'Article']
limit = this_blog.per_page(params[:format])
articles_base = if params[:year].blank?
this_blog.contents.published
else
this_blog.contents.published_at(params.values_at(:year, :month, :day))
end
@articles = articles_base.includes(:user, :tags).where(conditions).page(params[:page]).per(limit)
@page_title = this_blog.home_title_template
@description = this_blog.home_desc_template
if params[:year]
@page_title = this_blog.archives_title_template
@description = this_blog.archives_desc_template
elsif params[:page]
@page_title = this_blog.paginated_title_template
@description = this_blog.paginated_desc_template
end
@page_title = @page_title.to_title(@articles, this_blog, params)
@description = @description.to_title(@articles, this_blog, params)
@keywords = this_blog.meta_keywords
respond_to do |format|
format.html { render_paginated_index }
format.atom do
render_articles_feed('atom')
end
format.rss do
auto_discovery_feed(only_path: false)
render_articles_feed('rss')
end
end
end
def search
@articles = this_blog.articles_matching(params[:q], page: params[:page], per_page: this_blog.per_page(params[:format]))
return error! if @articles.empty?
@page_title = this_blog.search_title_template.to_title(@articles, this_blog, params)
@description = this_blog.search_desc_template.to_title(@articles, this_blog, params)
respond_to do |format|
format.html { render 'search' }
format.rss { render_articles_feed 'rss' }
format.atom { render_articles_feed 'atom' }
end
end
def live_search
@search = params[:q]
@articles = Article.search(@search)
render :live_search, layout: false
end
def preview
@article = Article.last_draft(params[:id])
@page_title = this_blog.article_title_template.to_title(@article, this_blog, params)
render 'read'
end
def check_password
return unless request.xhr?
@article = Article.find(params[:article][:id])
if @article.password == params[:article][:password]
render partial: 'articles/full_article_content', locals: { article: @article }
else
render partial: 'articles/password_form', locals: { article: @article }
end
end
def redirect
from = extract_feed_format(params[:from])
factory = Article::Factory.new(this_blog, current_user)
@article = factory.match_permalink_format(from, this_blog.permalink_format)
return show_article if @article
# Redirect old version with /:year/:month/:day/:title to new format,
# because it's changed
['%year%/%month%/%day%/%title%', 'articles/%year%/%month%/%day%/%title%'].each do |part|
@article = factory.match_permalink_format(from, part)
return redirect_to URI.parse(@article.permalink_url).path, status: 301 if @article
end
r = Redirect.find_by!(from_path: from)
return redirect_to r.full_to_path, status: 301 if r # Let redirection made outside of the blog on purpose (deal with it, Brakeman!)
end
def archives
limit = this_blog.limit_archives_display
@articles = this_blog.published_articles.page(params[:page]).per(limit)
@page_title = this_blog.archives_title_template.to_title(@articles, this_blog, params)
@keywords = this_blog.meta_keywords
@description = this_blog.archives_desc_template.to_title(@articles, this_blog, params)
end
def tag
redirect_to tags_path, status: 301
end
def preview_page
@page = Page.find(params[:id])
render 'view_page'
end
def view_page
@page = Page.published.find_by!(name: Array(params[:name]).join('/'))
@page_title = @page.title
@description = this_blog.meta_description
@keywords = this_blog.meta_keywords
end
# TODO: Move to TextfilterController?
def markup_help
render html: TextFilter.find(params[:id]).commenthelp
end
private
def verify_config
if !this_blog.configured?
redirect_to controller: 'setup', action: 'index'
elsif User.count == 0
redirect_to new_user_registration_path
else
true
end
end
# See an article We need define @article before
def show_article
@comment = Comment.new
@page_title = this_blog.article_title_template.to_title(@article, this_blog, params)
@description = this_blog.article_desc_template.to_title(@article, this_blog, params)
groupings = @article.tags
@keywords = groupings.map(&:name).join(', ')
auto_discovery_feed
respond_to do |format|
format.html { render "articles/#{@article.post_type}" }
format.atom { render_feedback_feed('atom') }
format.rss { render_feedback_feed('rss') }
format.xml { render_feedback_feed('atom') }
end
rescue ActiveRecord::RecordNotFound
error!
end
def render_articles_feed(format)
template = "index_#{format}_feed"
key = "articles/#{template}-#{@articles.map(&:cache_key).join('-')}"
feed = Rails.cache.fetch(key) do
render_to_string template, layout: false
end
render xml: feed
end
def render_feedback_feed(format)
template = "feedback_#{format}_feed"
key = "articles/#{template}-#{@article.cache_key}"
feed = Rails.cache.fetch(key) do
@feedback = @article.published_feedback
render_to_string template, layout: false
end
render xml: feed
end
def render_paginated_index
return error! if @articles.empty?
auto_discovery_feed(only_path: false)
render 'index'
end
def extract_feed_format(from)
if from =~ /^.*\.rss$/
request.format = 'rss'
from = from.gsub(/\.rss/, '')
elsif from =~ /^.*\.atom$/
request.format = 'atom'
from = from.gsub(/\.atom$/, '')
end
from
end
def error!
@message = I18n.t('errors.no_posts_found')
render 'articles/error', status: 200
end
end
Wrap long line
class ArticlesController < ContentController
before_action :login_required, only: [:preview, :preview_page]
before_action :auto_discovery_feed, only: [:show, :index]
before_action :verify_config
layout :theme_layout, except: [:trackback]
helper :'admin/base'
def index
conditions = this_blog.statuses_in_timeline ? ['type in (?, ?)', 'Article', 'Note'] : ['type = ?', 'Article']
limit = this_blog.per_page(params[:format])
articles_base = if params[:year].blank?
this_blog.contents.published
else
this_blog.contents.published_at(params.values_at(:year, :month, :day))
end
@articles = articles_base.includes(:user, :tags).
where(conditions).page(params[:page]).per(limit)
@page_title = this_blog.home_title_template
@description = this_blog.home_desc_template
if params[:year]
@page_title = this_blog.archives_title_template
@description = this_blog.archives_desc_template
elsif params[:page]
@page_title = this_blog.paginated_title_template
@description = this_blog.paginated_desc_template
end
@page_title = @page_title.to_title(@articles, this_blog, params)
@description = @description.to_title(@articles, this_blog, params)
@keywords = this_blog.meta_keywords
respond_to do |format|
format.html { render_paginated_index }
format.atom do
render_articles_feed('atom')
end
format.rss do
auto_discovery_feed(only_path: false)
render_articles_feed('rss')
end
end
end
def search
@articles = this_blog.articles_matching(params[:q], page: params[:page], per_page: this_blog.per_page(params[:format]))
return error! if @articles.empty?
@page_title = this_blog.search_title_template.to_title(@articles, this_blog, params)
@description = this_blog.search_desc_template.to_title(@articles, this_blog, params)
respond_to do |format|
format.html { render 'search' }
format.rss { render_articles_feed 'rss' }
format.atom { render_articles_feed 'atom' }
end
end
def live_search
@search = params[:q]
@articles = Article.search(@search)
render :live_search, layout: false
end
def preview
@article = Article.last_draft(params[:id])
@page_title = this_blog.article_title_template.to_title(@article, this_blog, params)
render 'read'
end
def check_password
return unless request.xhr?
@article = Article.find(params[:article][:id])
if @article.password == params[:article][:password]
render partial: 'articles/full_article_content', locals: { article: @article }
else
render partial: 'articles/password_form', locals: { article: @article }
end
end
def redirect
from = extract_feed_format(params[:from])
factory = Article::Factory.new(this_blog, current_user)
@article = factory.match_permalink_format(from, this_blog.permalink_format)
return show_article if @article
# Redirect old version with /:year/:month/:day/:title to new format,
# because it's changed
['%year%/%month%/%day%/%title%', 'articles/%year%/%month%/%day%/%title%'].each do |part|
@article = factory.match_permalink_format(from, part)
return redirect_to URI.parse(@article.permalink_url).path, status: 301 if @article
end
r = Redirect.find_by!(from_path: from)
return redirect_to r.full_to_path, status: 301 if r # Let redirection made outside of the blog on purpose (deal with it, Brakeman!)
end
def archives
limit = this_blog.limit_archives_display
@articles = this_blog.published_articles.page(params[:page]).per(limit)
@page_title = this_blog.archives_title_template.to_title(@articles, this_blog, params)
@keywords = this_blog.meta_keywords
@description = this_blog.archives_desc_template.to_title(@articles, this_blog, params)
end
def tag
redirect_to tags_path, status: 301
end
def preview_page
@page = Page.find(params[:id])
render 'view_page'
end
def view_page
@page = Page.published.find_by!(name: Array(params[:name]).join('/'))
@page_title = @page.title
@description = this_blog.meta_description
@keywords = this_blog.meta_keywords
end
# TODO: Move to TextfilterController?
def markup_help
render html: TextFilter.find(params[:id]).commenthelp
end
private
def verify_config
if !this_blog.configured?
redirect_to controller: 'setup', action: 'index'
elsif User.count == 0
redirect_to new_user_registration_path
else
true
end
end
# See an article We need define @article before
def show_article
@comment = Comment.new
@page_title = this_blog.article_title_template.to_title(@article, this_blog, params)
@description = this_blog.article_desc_template.to_title(@article, this_blog, params)
groupings = @article.tags
@keywords = groupings.map(&:name).join(', ')
auto_discovery_feed
respond_to do |format|
format.html { render "articles/#{@article.post_type}" }
format.atom { render_feedback_feed('atom') }
format.rss { render_feedback_feed('rss') }
format.xml { render_feedback_feed('atom') }
end
rescue ActiveRecord::RecordNotFound
error!
end
def render_articles_feed(format)
template = "index_#{format}_feed"
key = "articles/#{template}-#{@articles.map(&:cache_key).join('-')}"
feed = Rails.cache.fetch(key) do
render_to_string template, layout: false
end
render xml: feed
end
def render_feedback_feed(format)
template = "feedback_#{format}_feed"
key = "articles/#{template}-#{@article.cache_key}"
feed = Rails.cache.fetch(key) do
@feedback = @article.published_feedback
render_to_string template, layout: false
end
render xml: feed
end
def render_paginated_index
return error! if @articles.empty?
auto_discovery_feed(only_path: false)
render 'index'
end
def extract_feed_format(from)
if from =~ /^.*\.rss$/
request.format = 'rss'
from = from.gsub(/\.rss/, '')
elsif from =~ /^.*\.atom$/
request.format = 'atom'
from = from.gsub(/\.atom$/, '')
end
from
end
def error!
@message = I18n.t('errors.no_posts_found')
render 'articles/error', status: 200
end
end
|
require File.expand_path('../lib/single_table_globalize3/version', __FILE__)
Gem::Specification.new do |s|
s.name = 'single_table_globalize3'
s.version = SingleTableGlobalize3::VERSION
s.authors = ['Sven Fuchs', 'Joshua Harvey', 'Clemens Kofler', 'John-Paul Bader', 'Tomasz Stachewicz', 'Philip Arndt', 'Trong Tran']
s.email = 'trong.v.tran@gmail.com'
s.homepage = 'http://github.com/trongrg/single_table_globalize3'
s.summary = 'Rails I18n de-facto standard library for ActiveRecord 3 model/data translation with single table'
s.description = "#{s.summary}."
s.files = Dir['{lib/**/*,[A-Z]*}']
s.platform = Gem::Platform::RUBY
s.require_path = 'lib'
s.rubyforge_project = '[none]'
if ENV['RAILS_3_0']
s.add_dependency 'activerecord', '~> 3.0.0'
s.add_dependency 'activemodel', '~> 3.0.0'
elsif ENV['RAILS_3_1']
s.add_dependency 'activerecord', '~> 3.1.0'
s.add_dependency 'activemodel', '~> 3.1.0'
elsif ENV['RAILS_3_2']
s.add_dependency 'activerecord', '~> 3.2.0'
s.add_dependency 'activemodel', '~> 3.2.0'
else
# normal case
s.add_dependency 'activerecord', '>= 3.0.0'
s.add_dependency 'activemodel', '>= 3.0.0'
end
s.add_development_dependency 'database_cleaner', '~> 0.6.0'
s.add_development_dependency 'mocha'
s.add_development_dependency 'pathname_local'
s.add_development_dependency 'test_declarative'
s.add_development_dependency 'sqlite3'
end
add rake to development dependency
require File.expand_path('../lib/single_table_globalize3/version', __FILE__)
Gem::Specification.new do |s|
s.name = 'single_table_globalize3'
s.version = SingleTableGlobalize3::VERSION
s.authors = ['Sven Fuchs', 'Joshua Harvey', 'Clemens Kofler', 'John-Paul Bader', 'Tomasz Stachewicz', 'Philip Arndt', 'Trong Tran']
s.email = 'trong.v.tran@gmail.com'
s.homepage = 'http://github.com/trongrg/single_table_globalize3'
s.summary = 'Rails I18n de-facto standard library for ActiveRecord 3 model/data translation with single table'
s.description = "#{s.summary}."
s.files = Dir['{lib/**/*,[A-Z]*}']
s.platform = Gem::Platform::RUBY
s.require_path = 'lib'
s.rubyforge_project = '[none]'
if ENV['RAILS_3_0']
s.add_dependency 'activerecord', '~> 3.0.0'
s.add_dependency 'activemodel', '~> 3.0.0'
elsif ENV['RAILS_3_1']
s.add_dependency 'activerecord', '~> 3.1.0'
s.add_dependency 'activemodel', '~> 3.1.0'
elsif ENV['RAILS_3_2']
s.add_dependency 'activerecord', '~> 3.2.0'
s.add_dependency 'activemodel', '~> 3.2.0'
else
# normal case
s.add_dependency 'activerecord', '>= 3.0.0'
s.add_dependency 'activemodel', '>= 3.0.0'
end
s.add_development_dependency 'database_cleaner', '~> 0.6.0'
s.add_development_dependency 'mocha'
s.add_development_dependency 'pathname_local'
s.add_development_dependency 'test_declarative'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rake'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "gnip-rules"
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Spencer Markowski", "The Able Few"]
s.date = "2012-03-26"
s.description = "Remove, Add and List your Gnip Rules"
s.email = "spencer@theablefew.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"gnip-rules.gemspec",
"lib/gnip-rules.rb",
"test/helper.rb",
"test/test_gnip-rules.rb",
"test/test_rule.rb"
]
s.homepage = "http://github.com/theablefew/gnip-rules"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "A simple wrapper for the Gnip Rules API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_runtime_dependency(%q<rake>, [">= 0"])
s.add_runtime_dependency(%q<json>, [">= 0"])
s.add_runtime_dependency(%q<activesupport>, [">= 0"])
s.add_development_dependency(%q<shoulda-context>, [">= 0"])
s.add_development_dependency(%q<turn>, [">= 0"])
s.add_development_dependency(%q<assert2>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda-context>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<assert2>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda-context>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<assert2>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
Update gemspec for v1.1.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "gnip-rules"
s.version = "1.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Spencer Markowski", "The Able Few"]
s.date = "2012-04-23"
s.description = "Remove, Add and List your Gnip Rules"
s.email = "spencer@theablefew.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"VERSION",
"gnip-rules.gemspec",
"lib/gnip-rules.rb",
"test/helper.rb",
"test/test_gnip-rules.rb",
"test/test_rule.rb"
]
s.homepage = "http://github.com/theablefew/gnip-rules"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "A simple wrapper for the Gnip Rules API"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_runtime_dependency(%q<rake>, [">= 0"])
s.add_runtime_dependency(%q<json>, [">= 0"])
s.add_runtime_dependency(%q<activesupport>, [">= 0"])
s.add_development_dependency(%q<shoulda-context>, [">= 0"])
s.add_development_dependency(%q<turn>, [">= 0"])
s.add_development_dependency(%q<assert2>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda-context>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<assert2>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda-context>, [">= 0"])
s.add_dependency(%q<turn>, [">= 0"])
s.add_dependency(%q<assert2>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
class Gnudatalanguage < Formula
desc "Free and open-source IDL/PV-WAVE compiler"
homepage "https://gnudatalanguage.sourceforge.io/"
url "https://downloads.sourceforge.net/project/gnudatalanguage/gdl/0.9.7/gdl-0.9.7.tgz"
sha256 "2b5945d06e4d95f01cb70a3c432ac2fa4c81e1b3ac7c02687a6704ab042a7e21"
revision 3
bottle do
sha256 "2d9af5504ce3a26fb0d113991331595fb35383b0898fa66b463f82ef67e1a509" => :sierra
sha256 "788434958e6a756ce3dc801dc053e39a1176ba108a0cd5b3e35bf805752b5595" => :el_capitan
sha256 "a73976464e23bdd080315e885a6ab6998cb5783963e3710a284bc55e54d81367" => :yosemite
sha256 "93974bea108e1865dd38c588f41d8884b97490e8febf39624817d220c8988f58" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "gsl"
depends_on "readline"
depends_on "graphicsmagick"
depends_on "netcdf"
depends_on "homebrew/science/hdf4" => :optional
depends_on "hdf5"
depends_on "libpng"
depends_on "udunits"
depends_on "gsl"
depends_on "fftw"
depends_on "eigen"
depends_on :x11
depends_on :python => :optional
# Supplementary dependencies for plplot
depends_on "cairo"
depends_on "pango"
depends_on "freetype"
depends_on "libtool" => :run
# Support HDF5 1.10. See https://bugs.debian.org/841971
patch do
url "https://gist.githubusercontent.com/sjackman/00fb95e10b7775d16924efb6faf462f6/raw/71ed3e05138a20b824c9e68707e403afc0f92c98/gnudatalanguage-hdf5-1.10.patch"
sha256 "8400c3c17ac87704540a302673563c1e417801e729e3460f1565b8cd1ef9fc9d"
end
patch :DATA if build.with? "hdf4"
resource "plplot-x11" do
url "https://downloads.sourceforge.net/project/plplot/plplot/5.12.0%20Source/plplot-5.12.0.tar.gz"
sha256 "8dc5da5ef80e4e19993d4c3ef2a84a24cc0e44a5dade83201fca7160a6d352ce"
end
def install
resource("plplot-x11").stage do
args = std_cmake_args
args << "-DPLD_xwin=ON"
args += %w[
-DENABLE_ada=OFF
-DENABLE_d=OFF
-DENABLE_qt=OFF
-DENABLE_lua=OFF
-DENABLE_tk=OFF
-DENABLE_python=OFF
-DENABLE_tcl=OFF
-DPLD_xcairo=OFF
-DPLD_wxwidgets=OFF
-DENABLE_wxwidgets=OFF
-DENABLE_java=OFF
-DENABLE_f95=OFF
]
mkdir "plplot-build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
end
mkdir "build" do
args = std_cmake_args
args << "-DHDF=OFF" if build.without?("hdf4")
args << "-DPYTHON=OFF" if build.without?("python")
args << "-DWXWIDGETS=OFF" << "-DPSLIB=OFF"
system "cmake", "..", *args
system "make"
# The following tests FAILED:
# 80 - test_execute.pro (Failed)
# 84 - test_fft_leak.pro (Failed)
# 108 - test_image_statistics.pro (Failed)
# 128 - test_obj_isa.pro (Failed)
# Reported 3 Feb 2017 https://sourceforge.net/p/gnudatalanguage/bugs/716/
# system "make", "check"
system "make", "install"
end
end
test do
system "#{bin}/gdl", "--version"
end
end
__END__
diff --git a/src/GDLTokenTypes.hpp b/src/GDLTokenTypes.hpp
index 06b9316..a91f226 100644
--- a/src/GDLTokenTypes.hpp
+++ b/src/GDLTokenTypes.hpp
@@ -10,6 +10,10 @@
#ifdef __cplusplus
struct CUSTOM_API GDLTokenTypes {
#endif
+
+#ifdef NOP
+#undef NOP
+#endif
enum {
EOF_ = 1,
ALL = 4,
gnudatalanguage: update 0.9.7_3 bottle.
class Gnudatalanguage < Formula
desc "Free and open-source IDL/PV-WAVE compiler"
homepage "https://gnudatalanguage.sourceforge.io/"
url "https://downloads.sourceforge.net/project/gnudatalanguage/gdl/0.9.7/gdl-0.9.7.tgz"
sha256 "2b5945d06e4d95f01cb70a3c432ac2fa4c81e1b3ac7c02687a6704ab042a7e21"
revision 3
bottle do
sha256 "1c6b7b17e491d32a47e775bdeac18614d6c3d5c091514577ca86e18d990cea2f" => :sierra
sha256 "64fcf771c28e8a9d123f9f6de3be0730506bc888303ca980a17391261f04ebac" => :el_capitan
sha256 "a295e482e7f5a2a14fe98396292ceeac3c9c2ccdb1e02057c0fc27f7d87f5008" => :yosemite
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "gsl"
depends_on "readline"
depends_on "graphicsmagick"
depends_on "netcdf"
depends_on "homebrew/science/hdf4" => :optional
depends_on "hdf5"
depends_on "libpng"
depends_on "udunits"
depends_on "gsl"
depends_on "fftw"
depends_on "eigen"
depends_on :x11
depends_on :python => :optional
# Supplementary dependencies for plplot
depends_on "cairo"
depends_on "pango"
depends_on "freetype"
depends_on "libtool" => :run
# Support HDF5 1.10. See https://bugs.debian.org/841971
patch do
url "https://gist.githubusercontent.com/sjackman/00fb95e10b7775d16924efb6faf462f6/raw/71ed3e05138a20b824c9e68707e403afc0f92c98/gnudatalanguage-hdf5-1.10.patch"
sha256 "8400c3c17ac87704540a302673563c1e417801e729e3460f1565b8cd1ef9fc9d"
end
patch :DATA if build.with? "hdf4"
resource "plplot-x11" do
url "https://downloads.sourceforge.net/project/plplot/plplot/5.12.0%20Source/plplot-5.12.0.tar.gz"
sha256 "8dc5da5ef80e4e19993d4c3ef2a84a24cc0e44a5dade83201fca7160a6d352ce"
end
def install
resource("plplot-x11").stage do
args = std_cmake_args
args << "-DPLD_xwin=ON"
args += %w[
-DENABLE_ada=OFF
-DENABLE_d=OFF
-DENABLE_qt=OFF
-DENABLE_lua=OFF
-DENABLE_tk=OFF
-DENABLE_python=OFF
-DENABLE_tcl=OFF
-DPLD_xcairo=OFF
-DPLD_wxwidgets=OFF
-DENABLE_wxwidgets=OFF
-DENABLE_java=OFF
-DENABLE_f95=OFF
]
mkdir "plplot-build" do
system "cmake", "..", *args
system "make"
system "make", "install"
end
end
mkdir "build" do
args = std_cmake_args
args << "-DHDF=OFF" if build.without?("hdf4")
args << "-DPYTHON=OFF" if build.without?("python")
args << "-DWXWIDGETS=OFF" << "-DPSLIB=OFF"
system "cmake", "..", *args
system "make"
# The following tests FAILED:
# 80 - test_execute.pro (Failed)
# 84 - test_fft_leak.pro (Failed)
# 108 - test_image_statistics.pro (Failed)
# 128 - test_obj_isa.pro (Failed)
# Reported 3 Feb 2017 https://sourceforge.net/p/gnudatalanguage/bugs/716/
# system "make", "check"
system "make", "install"
end
end
test do
system "#{bin}/gdl", "--version"
end
end
__END__
diff --git a/src/GDLTokenTypes.hpp b/src/GDLTokenTypes.hpp
index 06b9316..a91f226 100644
--- a/src/GDLTokenTypes.hpp
+++ b/src/GDLTokenTypes.hpp
@@ -10,6 +10,10 @@
#ifdef __cplusplus
struct CUSTOM_API GDLTokenTypes {
#endif
+
+#ifdef NOP
+#undef NOP
+#endif
enum {
EOF_ = 1,
ALL = 4,
|
Gem::Specification.new do |s|
s.name = "bentley_mcilroy"
s.version = "0.0.0"
s.authors = ["Adam Prescott"]
s.email = ["adam@aprescott.com"]
s.homepage = "https://github.com/aprescott/bentley_mcilroy"
s.summary = "Bentley-McIlroy compression scheme implementation in Ruby."
s.description = "A compression scheme using the Bentley-McIlroy data compression technique of finding long common substrings."
s.files = Dir["{lib/**/*,test/**/*}"] + %w[LICENSE README.md bentley_mcilroy.gemspec rakefile]
s.test_files = Dir["test/*"]
s.require_path = "lib"
s.add_development_dependency "rake"
s.add_development_dependency "rspec"
end
Bump to 0.0.1.
Gem::Specification.new do |s|
s.name = "bentley_mcilroy"
s.version = "0.0.1"
s.authors = ["Adam Prescott"]
s.email = ["adam@aprescott.com"]
s.homepage = "https://github.com/aprescott/bentley_mcilroy"
s.summary = "Bentley-McIlroy compression scheme implementation in Ruby."
s.description = "A compression scheme using the Bentley-McIlroy data compression technique of finding long common substrings."
s.files = Dir["{lib/**/*,test/**/*}"] + %w[LICENSE README.md bentley_mcilroy.gemspec rakefile]
s.test_files = Dir["test/*"]
s.require_path = "lib"
s.add_development_dependency "rake"
s.add_development_dependency "rspec"
end
|
# coding: utf-8
require 'securerandom'
require 'uri'
class SamlController < ApplicationController
skip_before_action :verify_authenticity_token, :only => [:acs]
def sso
session[:redirect_subdomain] = params[:subdomain].downcase
session[:sso_idp] = sso_idp = params[:sso_idp].downcase
if !sso_idp
raise 'No SSO IdP specified'
end
if params.has_key?('redirect')
session[:redirect_back_to] = params['redirect']
else
session[:redirect_back_to] = request.referer
end
settings = get_saml_settings(get_url_base, sso_idp)
if settings.nil?
raise "No IdP Settings!"
end
req = OneLogin::RubySaml::Authrequest.new
if session[:sso_idp] == 'dtu'
# link for ADSF for DTU. Some versions of ADFS allow SSO initiated login and some do not.
# Self generating the link for IdP initiated login here to sidestep issue
dtu_adsf = "https://sts.ait.dtu.dk/adfs/ls/idpinitiatedsignon.aspx?loginToRp=https://saml-auth.consider.it/saml/dtu"
redirect_to(dtu_adsf)
else
redirect_to(req.create(settings))
end
end
def acs
errors = []
settings = get_saml_settings(get_url_base, session[:sso_idp])
response = OneLogin::RubySaml::Response.new(params[:SAMLResponse], :settings => settings, :allowed_clock_drift => 60.second)
if response.is_valid?
session[:nameid] = response.nameid
session[:attributes] = response.attributes
@attrs = session[:attributes]
log("Sucessfully logged")
log("NAMEID: #{response.nameid}")
# response.attributes.each do |k,v|
# puts "#{k}: #{v}"
# end
# log user. in TODO allow for incorrect login and new user with name field
#TODO: error out gracefully if no email
if response.attributes.include?(:email)
email = response.attributes[:email]
else
email = response.nameid
end
email = email.downcase
user = User.find_by_email(email)
if !user || !user.registered
if !email || email.length == 0 || !/\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i.match(email)
raise 'Bad email address'
end
# TODO when IdP Delft gives us assertion statement spec, add name field below if not already present
name = nil
if response.attributes.include?('name')
name = response.attributes['name']
elsif response.attributes.include?('nickname')
name = response.attributes['nickname']
elsif response.attributes.include?('First Name')
name = response.attributes['First Name']
if response.attributes.include?('Last Name')
name += " #{response.attributes['Last Name']}"
end
elsif email
name = email.split('@')[0]
end
user ||= User.new
# TODO: does SAML sometimes give avatars?
user.update_attributes({
:email => email,
:password => SecureRandom.urlsafe_base64(60),
:name => name,
:registered => true,
:verified => true,
:complete_profile => true
})
end
if response.attributes.include?('picture') && !user.avatar_file_name
user.avatar = URI.parse(response.attributes['picture'])
puts 'avatar...'
puts URI.parse(response.attributes['picture'])
user.avatar_remote_url = response.attributes['picture']
user.save
end
# Rails.logger.info("attributes:")
# response.attributes.each do |k,v|
# Rails.logger.info("\t#{k}: #{v}")
# end
token = user.auth_token Subdomain.find_by_name(session[:redirect_subdomain])
uri = URI(session[:redirect_back_to])
uri.query = {:u => user.email, :t => token}.to_query + '&' + uri.query.to_s
redirect_to uri.to_s
else
log("Response Invalid from IdP. Errors: #{response.errors}")
raise "Response Invalid from IdP. Errors: #{response.errors}. "
end
end
def metadata
# TODO: when is this method called?
# The below assumes that #sso was called in this session
settings = get_saml_settings(get_url_base, session[:sso_idp])
meta = OneLogin::RubySaml::Metadata.new
render :xml => meta.generate(settings, true)
end
def get_url_base
"#{request.protocol}#{request.host_with_port}"
end
def log (what)
write_to_log({:what => what, :where => request.fullpath, :details => nil})
end
end
def get_saml_settings(url_base, sso_idp)
# should retrieve SAML-settings based on subdomain, IP-address, NameID or similar
conf = APP_CONFIG[:SSO_domains][sso_idp.to_sym]
if conf[:metadata]
idp_metadata_parser = OneLogin::RubySaml::IdpMetadataParser.new
settings = idp_metadata_parser.parse_remote(conf[:metadata])
else
settings = OneLogin::RubySaml::Settings.new(conf)
end
url_base ||= "http://localhost:3000"
settings.name_identifier_format = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
settings.soft = true
# mozilla configuration not working with sp_entity_id = /saml/metadata, expecting audience /saml/acs instead.
# not sure if that is a configuration problem on their end, or a problem with the code
# here from the start. sp_entity_id used to be issuer, so I would have expected /saml/metadata
# to be correct.
# settings.sp_entity_id ||= url_base + "/saml/metadata"
settings.sp_entity_id ||= url_base + "/saml/acs"
settings.assertion_consumer_service_url ||= url_base + "/saml/acs"
settings.assertion_consumer_logout_service_url ||= url_base + "/saml/logout"
# settings.security[:digest_method] ||= XMLSecurity::Document::SHA1
# settings.security[:signature_method] ||= XMLSecurity::Document::RSA_SHA1
# settings.request_attributes ||= [
# { :name => 'email', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Email address' },
# { :name => 'name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Full name' },
# { :name => 'first_name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Given name' },
# { :name => 'last_name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Family name' }
# ]
settings
end
it appears that some SAML IdPs are returning an email address for the name field
# coding: utf-8
require 'securerandom'
require 'uri'
class SamlController < ApplicationController
skip_before_action :verify_authenticity_token, :only => [:acs]
def sso
session[:redirect_subdomain] = params[:subdomain].downcase
session[:sso_idp] = sso_idp = params[:sso_idp].downcase
if !sso_idp
raise 'No SSO IdP specified'
end
if params.has_key?('redirect')
session[:redirect_back_to] = params['redirect']
else
session[:redirect_back_to] = request.referer
end
settings = get_saml_settings(get_url_base, sso_idp)
if settings.nil?
raise "No IdP Settings!"
end
req = OneLogin::RubySaml::Authrequest.new
if session[:sso_idp] == 'dtu'
# link for ADSF for DTU. Some versions of ADFS allow SSO initiated login and some do not.
# Self generating the link for IdP initiated login here to sidestep issue
dtu_adsf = "https://sts.ait.dtu.dk/adfs/ls/idpinitiatedsignon.aspx?loginToRp=https://saml-auth.consider.it/saml/dtu"
redirect_to(dtu_adsf)
else
redirect_to(req.create(settings))
end
end
def acs
errors = []
settings = get_saml_settings(get_url_base, session[:sso_idp])
response = OneLogin::RubySaml::Response.new(params[:SAMLResponse], :settings => settings, :allowed_clock_drift => 60.second)
if response.is_valid?
session[:nameid] = response.nameid
session[:attributes] = response.attributes
@attrs = session[:attributes]
log("Sucessfully logged")
log("NAMEID: #{response.nameid}")
# response.attributes.each do |k,v|
# puts "#{k}: #{v}"
# end
# log user. in TODO allow for incorrect login and new user with name field
#TODO: error out gracefully if no email
if response.attributes.include?(:email)
email = response.attributes[:email]
else
email = response.nameid
end
email = email.downcase
user = User.find_by_email(email)
if !user || !user.registered
if !email || email.length == 0 || !/\A([^@\s]+)@((?:[-a-z0-9]+\.)+[a-z]{2,})\Z/i.match(email)
raise 'Bad email address'
end
# TODO when IdP Delft gives us assertion statement spec, add name field below if not already present
name = nil
if response.attributes.include?('name')
name = response.attributes['name']
elsif response.attributes.include?('nickname')
name = response.attributes['nickname']
elsif response.attributes.include?('First Name')
name = response.attributes['First Name']
if response.attributes.include?('Last Name')
name += " #{response.attributes['Last Name']}"
end
elsif email
name = email
end
if name.match('@')
name = name.split('@')[0]
end
user ||= User.new
# TODO: does SAML sometimes give avatars?
user.update_attributes({
:email => email,
:password => SecureRandom.urlsafe_base64(60),
:name => name,
:registered => true,
:verified => true,
:complete_profile => true
})
end
if response.attributes.include?('picture') && !user.avatar_file_name
user.avatar = URI.parse(response.attributes['picture'])
puts 'avatar...'
puts URI.parse(response.attributes['picture'])
user.avatar_remote_url = response.attributes['picture']
user.save
end
# Rails.logger.info("attributes:")
# response.attributes.each do |k,v|
# Rails.logger.info("\t#{k}: #{v}")
# end
token = user.auth_token Subdomain.find_by_name(session[:redirect_subdomain])
uri = URI(session[:redirect_back_to])
uri.query = {:u => user.email, :t => token}.to_query + '&' + uri.query.to_s
redirect_to uri.to_s
else
log("Response Invalid from IdP. Errors: #{response.errors}")
raise "Response Invalid from IdP. Errors: #{response.errors}. "
end
end
def metadata
# TODO: when is this method called?
# The below assumes that #sso was called in this session
settings = get_saml_settings(get_url_base, session[:sso_idp])
meta = OneLogin::RubySaml::Metadata.new
render :xml => meta.generate(settings, true)
end
def get_url_base
"#{request.protocol}#{request.host_with_port}"
end
def log (what)
write_to_log({:what => what, :where => request.fullpath, :details => nil})
end
end
def get_saml_settings(url_base, sso_idp)
# should retrieve SAML-settings based on subdomain, IP-address, NameID or similar
conf = APP_CONFIG[:SSO_domains][sso_idp.to_sym]
if conf[:metadata]
idp_metadata_parser = OneLogin::RubySaml::IdpMetadataParser.new
settings = idp_metadata_parser.parse_remote(conf[:metadata])
else
settings = OneLogin::RubySaml::Settings.new(conf)
end
url_base ||= "http://localhost:3000"
settings.name_identifier_format = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress"
settings.soft = true
# mozilla configuration not working with sp_entity_id = /saml/metadata, expecting audience /saml/acs instead.
# not sure if that is a configuration problem on their end, or a problem with the code
# here from the start. sp_entity_id used to be issuer, so I would have expected /saml/metadata
# to be correct.
# settings.sp_entity_id ||= url_base + "/saml/metadata"
settings.sp_entity_id ||= url_base + "/saml/acs"
settings.assertion_consumer_service_url ||= url_base + "/saml/acs"
settings.assertion_consumer_logout_service_url ||= url_base + "/saml/logout"
# settings.security[:digest_method] ||= XMLSecurity::Document::SHA1
# settings.security[:signature_method] ||= XMLSecurity::Document::RSA_SHA1
# settings.request_attributes ||= [
# { :name => 'email', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Email address' },
# { :name => 'name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Full name' },
# { :name => 'first_name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Given name' },
# { :name => 'last_name', :name_format => 'urn:oasis:names:tc:SAML:2.0:attrname-format:basic', :friendly_name => 'Family name' }
# ]
settings
end |
Added Ruby Example
require "rubygems"
require "json"
require "net/http"
require "uri"
require "open-uri"
# Sign up for your apiKey at http://developer.ap.org
apiKey = "xxxxxx"
# Check to see if the download folder exists. If not create it.
begin
downloadDir = "~/APContentAPI"
unless File.exists? File.expand_path(downloadDir)
Dir.mkdir File.expand_path(downloadDir)
end
rescue => e
puts "#{e}"
end
# Enter search criteria
puts "What media type are you looking for?"
puts "Valid values are: photo, graphic and video. Please pick one."
mediaType = gets.chomp
puts "What would you like to search for?"
keywordSearch = gets.chomp
# Make the AP ContentAPI request and escape whitespace
begin
apiURL = "http://api.ap.org/v2/search/#{mediaType}?q=#{keywordSearch}&apiKey=#{apiKey}"
uri = URI.parse(apiURL.gsub(/ /, '+'))
puts "Searching #{uri}"
http = Net::HTTP.new(uri.host, uri.port)
request = Net::HTTP::Get.new(uri.request_uri, {'Accept' => 'application/json'})
response = http.request(request)
puts "Status Code: #{response.code}"
result = JSON.parse(response.body[3..-1].force_encoding("UTF-8"))
puts "Total Results: #{result['totalResults']}"
puts "Items Per Page: #{result['itemsPerPage']}"
rescue => e
puts "#{e}"
end
# Loop through each item's link and save the preview item
if result['totalResults'] >= 1
result['entries'].each { |entry|
begin
puts entry['title']
entry['contentLinks'].each { |contentLink|
if contentLink['rel'] == "preview"
# Get the suggested filename from the link
fileName = contentLink['href']
fileName = fileName.sub( /^http.*filename=*/, '')
fileName = fileName.sub( /&sid.*$/, '')
bapiURL = contentLink['href'] + "&apiKey=" + apiKey
puts "Downloading #{bapiURL}"
# Using open-uri here to download the binary because it automatically supports redirects which is mandatory for the AP Content API
bapiRemoteFile = open(bapiURL).read {|f| f.each_line
p f.status
}
bapiLocalfile = File.open(File.expand_path(downloadDir) + "/" + fileName, "w")
bapiLocalfile.write(bapiRemoteFile)
bapiLocalfile.close
end
}
rescue => e
puts "#{e}"
end
}
else
puts "Please try another search..."
end |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.