CombinedText
stringlengths
4
3.42M
module Slingshot module Search class Search attr_reader :indices, :url, :results, :response, :json, :query, :facets, :filters def initialize(*indices, &block) @options = indices.pop if indices.last.is_a?(Hash) @indices = indices raise ArgumentError, 'Please pass index or indices to search' if @indices.empty? if @options Configuration.wrapper @options[:wrapper] if @options[:wrapper] end instance_eval(&block) if block_given? end def query(&block) @query = Query.new(&block) self end def sort(&block) @sort = Sort.new(&block) self end def facet(name, options={}, &block) @facets ||= {} @facets.update Facet.new(name, options, &block).to_hash self end def filter(type, *options) @filters ||= [] @filters << Filter.new(type, *options).to_hash self end def from(value) @from = value self end def size(value) @size = value self end def fields(fields=[]) @fields = fields self end def perform @url = "#{Configuration.url}/#{indices.join(',')}/_search" @response = Configuration.client.post(@url, self.to_json) @json = Yajl::Parser.parse(@response) @results = Results::Collection.new(@json) self rescue Exception STDERR.puts "[REQUEST FAILED]\n#{self.to_curl}\n" raise ensure if Configuration.logger Configuration.logger.log_request '_search', indices, to_curl if Configuration.logger.level == 'debug' Configuration.logger.log_response @response.code, Yajl::Encoder.encode(@json, :pretty => true) end end end def to_curl %Q|curl -X POST "http://localhost:9200/#{indices}/_search?pretty=true" -d '#{self.to_json}'| end def to_json request = {} request.update( { :query => @query } ) request.update( { :sort => @sort } ) if @sort request.update( { :facets => @facets } ) if @facets @filters.each { |filter| request.update( { :filter => filter } ) } if @filters request.update( { :size => @size } ) if @size request.update( { :from => @from } ) if @from request.update( { :fields => @fields } ) if @fields Yajl::Encoder.encode(request) end end end end [LOGGER] [FIX] Fixed error where `localhost:9200` was logged instead of correct URL module Slingshot module Search class Search attr_reader :indices, :url, :results, :response, :json, :query, :facets, :filters def initialize(*indices, &block) @options = indices.pop if indices.last.is_a?(Hash) @indices = indices raise ArgumentError, 'Please pass index or indices to search' if @indices.empty? if @options Configuration.wrapper @options[:wrapper] if @options[:wrapper] end instance_eval(&block) if block_given? end def query(&block) @query = Query.new(&block) self end def sort(&block) @sort = Sort.new(&block) self end def facet(name, options={}, &block) @facets ||= {} @facets.update Facet.new(name, options, &block).to_hash self end def filter(type, *options) @filters ||= [] @filters << Filter.new(type, *options).to_hash self end def from(value) @from = value self end def size(value) @size = value self end def fields(fields=[]) @fields = fields self end def perform @url = "#{Configuration.url}/#{indices.join(',')}/_search" @response = Configuration.client.post(@url, self.to_json) @json = Yajl::Parser.parse(@response) @results = Results::Collection.new(@json) self rescue Exception STDERR.puts "[REQUEST FAILED]\n#{self.to_curl}\n" raise ensure if Configuration.logger Configuration.logger.log_request '_search', indices, to_curl if Configuration.logger.level == 'debug' Configuration.logger.log_response @response.code, Yajl::Encoder.encode(@json, :pretty => true) end end end def to_curl %Q|curl -X POST "#{Configuration.url}/#{indices}/_search?pretty=true" -d '#{self.to_json}'| end def to_json request = {} request.update( { :query => @query } ) request.update( { :sort => @sort } ) if @sort request.update( { :facets => @facets } ) if @facets @filters.each { |filter| request.update( { :filter => filter } ) } if @filters request.update( { :size => @size } ) if @size request.update( { :from => @from } ) if @from request.update( { :fields => @fields } ) if @fields Yajl::Encoder.encode(request) end end end end
# frozen_string_literal: true CarrierWave.configure do |config| # make dir and files available only to the user running the servers config.permissions = 0600 config.directory_permissions = 0700 config.storage = :file # avoid uploaded files from saving to public/ config.root = Rails.root.join('system') end use "0o" for octal numbers # frozen_string_literal: true CarrierWave.configure do |config| # make dir and files available only to the user running the servers config.permissions = 0o600 config.directory_permissions = 0o700 config.storage = :file # avoid uploaded files from saving to public/ config.root = Rails.root.join('system') end
# -*- encoding: utf-8 -*- module Smartgen VERSION = "0.6.0" end Bumping version to 0.6.1 # -*- encoding: utf-8 -*- module Smartgen VERSION = "0.6.1" end
CarrierWave.configure do |config| config.root = Rails.root.join('tmp') config.cache_dir = 'carrierwave' config.fog_credentials = { provider: 'AWS', aws_access_key_id: AMAZON[Rails.env]['access_key_id'], aws_secret_access_key: AMAZON[Rails.env]['secret_access_key'], region: AMAZON[Rails.env]['region'] } config.asset_host = 'http://s3-' + AMAZON[Rails.env]['region'] + '.amazonaws.com/' + AMAZON[Rails.env]['bucket'] config.fog_directory = AMAZON[Rails.env]['bucket'] config.fog_public = true config.fog_attributes = { 'Cache-Control' => 'max-age=315576000' } end Move carrierwave config to telework project
# encoding: UTF-8 # The SMPP Transceiver maintains a bidirectional connection to an SMSC. # Provide a config hash with connection options to get started. # See the sample_gateway.rb for examples of config values. # The transceiver accepts a delegate object that may implement # the following (all optional) methods: # # mo_received(transceiver, pdu) # delivery_report_received(transceiver, pdu) # message_accepted(transceiver, mt_message_id, pdu) # message_rejected(transceiver, mt_message_id, pdu) # bound(transceiver) # unbound(transceiver) class Smpp::Transceiver < Smpp::Base # Send an MT SMS message. Delegate will receive message_accepted callback when SMSC # acknowledges, or the message_rejected callback upon error def send_mt(message_id, source_addr, destination_addr, short_message, options={}) logger.debug "Sending MT: #{short_message}" if @state == :bound pdu = Pdu::SubmitSm.new(source_addr, destination_addr, short_message, options) write_pdu pdu # keep the message ID so we can associate the SMSC message ID with our message # when the response arrives. @ack_ids[pdu.sequence_number] = message_id else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send a concatenated message with a body of > 160 characters as multiple messages. def send_concat_mt(message_id, source_addr, destination_addr, message, options = {}) logger.debug "Sending concatenated MT: #{message}" if @state == :bound # Split the message into parts of 153 characters. (160 - 7 characters for UDH) parts = [] while message.size > 0 do parts << message.slice!(0...(Smpp::Transceiver.get_message_part_size(options) - 1)) end logger.debug "Getting message parts size #{parts.size}, Inspect the parts ! #{parts.inspect} , The message id = #{message_id}" 0.upto(parts.size-1) do |i| # # udh = [] # udh[0] = sprintf("%c", 5) # UDH is 5 bytes. # logger.debug "_Step 1 - #{udh}" # udh[1] = sprintf("%c%c", 0, 3) # This is a concatenated message # logger.debug "_Step 2 - #{udh}" # #TODO Figure out why this needs to be an int here, it's a string elsewhere # udh[2] = sprintf("%c", message_id) # The ID for the entire concatenated message # logger.debug "_Step 3 - #{udh}" # udh[3] = sprintf("%c", parts.size) # How many parts this message consists of # logger.debug "_Step 4 - #{udh}" # udh[4] = sprintf("%c", i+1) # This is part i+1 # logger.debug "_Step 5 - #{udh}" # New encoding style taken from # https://github.com/Eloi/ruby-smpp/commit/6c2c20297cde4d3473c4c8362abed6ded6d59c09?diff=unified udh = [ 6, # UDH is 5 bytes. 8, 4, # This is a concatenated message message_id, # Ensure single byte message_id parts.size, # How many parts this message consists of i + 1 # This is part i+1 ].pack('CCCS>CC') # udh = "050003F0030"+(i+1).to_s options[:esm_class] = 64 # This message contains a UDH header. options[:udh] = udh logger.debug "Message sequence_number - #{i} Message UDH - #{udh.inspect} - All the options #{options.inspect}" pdu = Pdu::SubmitSm.new(source_addr, destination_addr, parts[i], options) logger.debug "send_concat_mt_pdu_details #{pdu.inspect}" write_pdu pdu # This is definately a bit hacky - multiple PDUs are being associated with a single # message_id. @ack_ids[pdu.sequence_number] = message_id end else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send MT SMS message for multiple dest_address # Author: Abhishek Parolkar (abhishek[at]parolkar.com) # USAGE: $tx.send_multi_mt(123, "9100000000", ["9199000000000","91990000000001","9199000000002"], "Message here") def send_multi_mt(message_id, source_addr, destination_addr_arr, short_message, options={}) logger.debug "Sending Multiple MT: #{short_message}" if @state == :bound pdu = Pdu::SubmitMulti.new(source_addr, destination_addr_arr, short_message, options) write_pdu pdu # keep the message ID so we can associate the SMSC message ID with our message # when the response arrives. @ack_ids[pdu.sequence_number] = message_id else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send BindTransceiverResponse PDU. def send_bind raise IOError, 'Receiver already bound.' unless unbound? pdu = Pdu::BindTransceiver.new( @config[:system_id], @config[:password], @config[:system_type], @config[:source_ton], @config[:source_npi], @config[:source_address_range]) write_pdu(pdu) end # Use data_coding to find out what message part size we can use # http://en.wikipedia.org/wiki/SMS#Message_size def self.get_message_part_size options return 153 if options[:data_coding].nil? return 153 if options[:data_coding] == 0 return 134 if options[:data_coding] == 3 return 134 if options[:data_coding] == 5 return 134 if options[:data_coding] == 6 return 134 if options[:data_coding] == 7 return 67 if options[:data_coding] == 8 return 153 end end Update transceiver.rb use hash instead of just mt_message_id # encoding: UTF-8 # The SMPP Transceiver maintains a bidirectional connection to an SMSC. # Provide a config hash with connection options to get started. # See the sample_gateway.rb for examples of config values. # The transceiver accepts a delegate object that may implement # the following (all optional) methods: # # mo_received(transceiver, pdu) # delivery_report_received(transceiver, pdu) # message_accepted(transceiver, mt_message_id, pdu) # message_rejected(transceiver, mt_message_id, pdu) # bound(transceiver) # unbound(transceiver) class Smpp::Transceiver < Smpp::Base # Send an MT SMS message. Delegate will receive message_accepted callback when SMSC # acknowledges, or the message_rejected callback upon error def send_mt(message_id, source_addr, destination_addr, short_message, options={}) logger.debug "Sending MT: #{short_message}" if @state == :bound pdu = Pdu::SubmitSm.new(source_addr, destination_addr, short_message, options) write_pdu pdu # keep the message ID so we can associate the SMSC message ID with our message # when the response arrives. @ack_ids[pdu.sequence_number] = {:message_id => message_id } else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send a concatenated message with a body of > 160 characters as multiple messages. def send_concat_mt(message_id, source_addr, destination_addr, message, options = {}) logger.debug "Sending concatenated MT: #{message}" if @state == :bound # Split the message into parts of 153 characters. (160 - 7 characters for UDH) parts = [] while message.size > 0 do parts << message.slice!(0...(Smpp::Transceiver.get_message_part_size(options) - 1)) end logger.debug "Getting message parts size #{parts.size}, Inspect the parts ! #{parts.inspect} , The message id = #{message_id}" 0.upto(parts.size-1) do |i| # # udh = [] # udh[0] = sprintf("%c", 5) # UDH is 5 bytes. # logger.debug "_Step 1 - #{udh}" # udh[1] = sprintf("%c%c", 0, 3) # This is a concatenated message # logger.debug "_Step 2 - #{udh}" # #TODO Figure out why this needs to be an int here, it's a string elsewhere # udh[2] = sprintf("%c", message_id) # The ID for the entire concatenated message # logger.debug "_Step 3 - #{udh}" # udh[3] = sprintf("%c", parts.size) # How many parts this message consists of # logger.debug "_Step 4 - #{udh}" # udh[4] = sprintf("%c", i+1) # This is part i+1 # logger.debug "_Step 5 - #{udh}" # New encoding style taken from # https://github.com/Eloi/ruby-smpp/commit/6c2c20297cde4d3473c4c8362abed6ded6d59c09?diff=unified udh = [ 6, # UDH is 5 bytes. 8, 4, # This is a concatenated message message_id, # Ensure single byte message_id parts.size, # How many parts this message consists of i + 1 # This is part i+1 ].pack('CCCS>CC') # udh = "050003F0030"+(i+1).to_s options[:esm_class] = 64 # This message contains a UDH header. options[:udh] = udh logger.debug "Message sequence_number - #{i} Message UDH - #{udh.inspect} - All the options #{options.inspect}" pdu = Pdu::SubmitSm.new(source_addr, destination_addr, parts[i], options) logger.debug "send_concat_mt_pdu_details #{pdu.inspect}" write_pdu pdu # This is definately a bit hacky - multiple PDUs are being associated with a single # message_id. # @ack_ids[pdu.sequence_number] = message_id @ack_ids[pdu.sequence_number] = {:message_id => message_id, :part_number => i + 1, :parts_size => parts.size } end else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send MT SMS message for multiple dest_address # Author: Abhishek Parolkar (abhishek[at]parolkar.com) # USAGE: $tx.send_multi_mt(123, "9100000000", ["9199000000000","91990000000001","9199000000002"], "Message here") def send_multi_mt(message_id, source_addr, destination_addr_arr, short_message, options={}) logger.debug "Sending Multiple MT: #{short_message}" if @state == :bound pdu = Pdu::SubmitMulti.new(source_addr, destination_addr_arr, short_message, options) write_pdu pdu # keep the message ID so we can associate the SMSC message ID with our message # when the response arrives. @ack_ids[pdu.sequence_number] = message_id else raise InvalidStateException, "Transceiver is unbound. Cannot send MT messages." end end # Send BindTransceiverResponse PDU. def send_bind raise IOError, 'Receiver already bound.' unless unbound? pdu = Pdu::BindTransceiver.new( @config[:system_id], @config[:password], @config[:system_type], @config[:source_ton], @config[:source_npi], @config[:source_address_range]) write_pdu(pdu) end # Use data_coding to find out what message part size we can use # http://en.wikipedia.org/wiki/SMS#Message_size def self.get_message_part_size options return 153 if options[:data_coding].nil? return 153 if options[:data_coding] == 0 return 134 if options[:data_coding] == 3 return 134 if options[:data_coding] == 5 return 134 if options[:data_coding] == 6 return 134 if options[:data_coding] == 7 return 67 if options[:data_coding] == 8 return 153 end end
module Snapshot class Builder BUILD_DIR = '/tmp/snapshot' def initialize end def build_app command = generate_build_command Helper.log.info "Building project... this might take some time...".green Helper.log.debug command.yellow all_lines = [] PTY.spawn(command) do |stdin, stdout, pid| stdin.each do |line| all_lines << line begin parse_build_line(line) if line.length > 2 rescue Exception => ex Helper.log.fatal all_lines.join("\n") raise ex end end end if all_lines.join('\n').include?'** BUILD SUCCEEDED **' Helper.log.info "BUILD SUCCEEDED".green return true else raise "Looks like the build was not successfull." end end private def parse_build_line(line) if line.include?"** BUILD FAILED **" raise line end end def generate_build_command scheme = SnapshotConfig.shared_instance.project_path.split('/').last.split('.').first # TODO [ "xctool", "-sdk iphonesimulator#{SnapshotConfig.shared_instance.ios_version}", "CONFIGURATION_BUILD_DIR='#{BUILD_DIR}/build'", "-workspace '#{SnapshotConfig.shared_instance.project_path}'", "-scheme '#{scheme}'", "-configuration Debug", "DSTROOT='#{BUILD_DIR}'", "OBJROOT='#{BUILD_DIR}'", "SYMROOT='#{BUILD_DIR}'", "ONLY_ACTIVE_ARCH=NO", "clean build" ].join(' ') end end end Added fallback to xcodebuild if xctool is not installed require 'snapshot/dependency_checker' module Snapshot class Builder BUILD_DIR = '/tmp/snapshot' def initialize end def build_app command = generate_build_command Helper.log.info "Building project... this might take some time...".green Helper.log.debug command.yellow all_lines = [] PTY.spawn(command) do |stdin, stdout, pid| stdin.each do |line| all_lines << line begin parse_build_line(line) if line.length > 2 rescue Exception => ex Helper.log.fatal all_lines.join("\n") raise ex end end end if all_lines.join('\n').include?'** BUILD SUCCEEDED **' Helper.log.info "BUILD SUCCEEDED".green return true else raise "Looks like the build was not successfull." end end private def parse_build_line(line) if line.include?"** BUILD FAILED **" raise line end end def generate_build_command scheme = SnapshotConfig.shared_instance.project_path.split('/').last.split('.').first # TODO build_command = (DependencyChecker.xctool_installed? ? 'xctool' : 'xcodebuild') [ build_command, "-sdk iphonesimulator#{SnapshotConfig.shared_instance.ios_version}", "CONFIGURATION_BUILD_DIR='#{BUILD_DIR}/build'", "-workspace '#{SnapshotConfig.shared_instance.project_path}'", "-scheme '#{scheme}'", "-configuration Debug", "DSTROOT='#{BUILD_DIR}'", "OBJROOT='#{BUILD_DIR}'", "SYMROOT='#{BUILD_DIR}'", "ONLY_ACTIVE_ARCH=NO", "clean build" ].join(' ') end end end
# RailsAdmin config file. Generated on July 29, 2013 23:52 # See github.com/sferik/rails_admin for more informations RailsAdmin.config do |config| ################ Global configuration ################ # Set the admin name here (optional second array element will appear in red). For example: config.main_app_name = ['QuienManda', 'Admin'] # or for a more dynamic name: # config.main_app_name = Proc.new { |controller| [Rails.application.engine_name.titleize, controller.params['action'].titleize] } # RailsAdmin may need a way to know who the current user is] config.current_user_method { current_user } # auto-generated # Authorization via CanCan RailsAdmin.config do |config| config.authorize_with :cancan end # Extra actions: # - toggle: to toggle booleans from index view, see rails_admin_toggleable config.actions do dashboard index new history_index show edit delete history_show toggle show_in_app end # Add our own custom admin stuff config.navigation_static_label = "Extra Admin" config.navigation_static_links = { 'Import' => '/admin/import' } # If you want to track changes on your models: # config.audit_with :history, 'User' # Or with a PaperTrail: (you need to install it first) config.audit_with :paper_trail, 'User' # Display empty fields in show views: # config.compact_show_view = false # Number of default rows per-page: # config.default_items_per_page = 20 # Exclude specific models (keep the others): # config.excluded_models = ['Entity', 'User'] # Include specific models (exclude the others): # config.included_models = ['Entity', 'User'] # Label methods for model instances: # config.label_methods << :description # Default is [:name, :title] ################ Model configuration ################ # Each model configuration can alternatively: # - stay here in a `config.model 'ModelName' do ... end` block # - go in the model definition file in a `rails_admin do ... end` block # This is your choice to make: # - This initializer is loaded once at startup (modifications will show up when restarting the application) but all RailsAdmin configuration would stay in one place. # - Models are reloaded at each request in development mode (when modified), which may smooth your RailsAdmin development workflow. # Now you probably need to tour the wiki a bit: https://github.com/sferik/rails_admin/wiki # Anyway, here is how RailsAdmin saw your application's models when you ran the initializer: config.model 'Entity' do list do field :published, :toggle field :needs_work field :priority field :person field :name field :short_name field :description field :updated_at end edit do group :basic_info do label "Basic info" field :person do default_value true end field :name field :short_name field :description field :priority do default_value Entity::PRIORITY_MEDIUM end field :avatar end group :social_media do label "Social media / web" field :web_page field :twitter_handle field :wikipedia_page field :facebook_page field :open_corporates_page field :flickr_page field :youtube_page field :linkedin_page end group :relations do # Editing the relations through the default RailsAdmin control (moving across # two columns) is very confusing. So disable for now. field :relations_as_source do read_only true inverse_of :source end field :relations_as_target do read_only true inverse_of :target end field :related_photos do read_only true inverse_of :related_entities end end group :internal do label "Internal" field :published do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :slug do help 'Leave blank for the URL slug to be auto-generated' end field :notes field :updated_at end end object_label_method do :short_or_long_name end end config.model 'Fact' do list do field :importer field :relations field :summary field :updated_at end edit do field :importer field :relations do read_only true end field :summary do read_only true end field :updated_at end end config.model 'Photo' do list do field :published, :toggle field :needs_work field :file field :footer field :tag_list field :updated_at end edit do group :basic_info do label "Content" field :file field :footer field :copyright field :source field :date do strftime_format "%d/%m/%Y" end end group :relations do field :related_entities end group :internal do field :extra_wide do default_value false end field :published do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :tag_list do label "Tags" partial 'tag_list_with_suggestions' end field :notes field :posts_as_header field :updated_at end end end # RailsAdmin configuration config.model 'Annotation' do parent Photo end # RailsAdmin configuration config.model 'Post' do list do field :published, :toggle field :needs_work field :title field :author field :updated_at end edit do group :basic_info do label "Content" field :title field :lead field :content, :ck_editor do help 'Puedes insertar códigos como: [dc url="..."] [qm url="..." text="..."] [gdocs url="..."]' end field :author do inverse_of :posts end field :mentions_in_content do read_only true end end group :internal do label "Internal" field :photo field :show_photo_as_header do default_value false end field :published do default_value false end field :featured do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :slug do help 'Leave blank for the URL slug to be auto-generated' end field :notes field :updated_at end end end # RailsAdmin configuration config.model 'Mention' do parent Post object_label_method do :to_s end end config.model 'Relation' do list do field :published, :toggle field :needs_work field :source field :relation_type field :target field :via field :updated_at end edit do group :basic_info do field :source field :relation_type field :target field :via field :via2 field :via3 end group :timeline do field :from do strftime_format "%d/%m/%Y" end field :to do strftime_format "%d/%m/%Y" end field :at do strftime_format "%d/%m/%Y" end end group :internal do field :published do default_value true end field :needs_work do default_value false end field :facts do read_only true end field :notes field :updated_at end end object_label_method do :to_s end end config.model 'RelationType' do parent Relation list do field :description end edit do field :description field :relations do read_only true end end object_label_method do :description end end config.model 'User' do object_label_method :name end ### User ### # config.model 'User' do # # You can copy this to a 'rails_admin do ... end' block inside your user.rb model definition # # Found associations: # # Found columns: # configure :id, :integer # configure :email, :string # configure :password, :password # Hidden # configure :password_confirmation, :password # Hidden # configure :reset_password_token, :string # Hidden # configure :reset_password_sent_at, :datetime # configure :remember_created_at, :datetime # configure :sign_in_count, :integer # configure :current_sign_in_at, :datetime # configure :last_sign_in_at, :datetime # configure :current_sign_in_ip, :string # configure :last_sign_in_ip, :string # configure :created_at, :datetime # configure :updated_at, :datetime # # Cross-section configuration: # # object_label_method :name # Name of the method called for pretty printing an *instance* of ModelName # # label 'My model' # Name of ModelName (smartly defaults to ActiveRecord's I18n API) # # label_plural 'My models' # Same, plural # # weight 0 # Navigation priority. Bigger is higher. # # parent OtherModel # Set parent model for navigation. MyModel will be nested below. OtherModel will be on first position of the dropdown # # navigation_label # Sets dropdown entry's name in navigation. Only for parents! # # Section specific configuration: # list do # # filters [:id, :name] # Array of field names which filters should be shown by default in the table header # # items_per_page 100 # Override default_items_per_page # # sort_by :id # Sort column (default is primary key) # # sort_reverse true # Sort direction (default is true for primary key, last created first) # end # show do; end # edit do; end # export do; end # # also see the create, update, modal and nested sections, which override edit in specific cases (resp. when creating, updating, modifying from another model in a popup modal or modifying from another model nested form) # # you can override a cross-section field configuration in any section with the same syntax `configure :field_name do ... end` # # using `field` instead of `configure` will exclude all other fields and force the ordering # end end Fix rails admin config for Fact # RailsAdmin config file. Generated on July 29, 2013 23:52 # See github.com/sferik/rails_admin for more informations RailsAdmin.config do |config| ################ Global configuration ################ # Set the admin name here (optional second array element will appear in red). For example: config.main_app_name = ['QuienManda', 'Admin'] # or for a more dynamic name: # config.main_app_name = Proc.new { |controller| [Rails.application.engine_name.titleize, controller.params['action'].titleize] } # RailsAdmin may need a way to know who the current user is] config.current_user_method { current_user } # auto-generated # Authorization via CanCan RailsAdmin.config do |config| config.authorize_with :cancan end # Extra actions: # - toggle: to toggle booleans from index view, see rails_admin_toggleable config.actions do dashboard index new history_index show edit delete history_show toggle show_in_app end # Add our own custom admin stuff config.navigation_static_label = "Extra Admin" config.navigation_static_links = { 'Import' => '/admin/import' } # If you want to track changes on your models: # config.audit_with :history, 'User' # Or with a PaperTrail: (you need to install it first) config.audit_with :paper_trail, 'User' # Display empty fields in show views: # config.compact_show_view = false # Number of default rows per-page: # config.default_items_per_page = 20 # Exclude specific models (keep the others): # config.excluded_models = ['Entity', 'User'] # Include specific models (exclude the others): # config.included_models = ['Entity', 'User'] # Label methods for model instances: # config.label_methods << :description # Default is [:name, :title] ################ Model configuration ################ # Each model configuration can alternatively: # - stay here in a `config.model 'ModelName' do ... end` block # - go in the model definition file in a `rails_admin do ... end` block # This is your choice to make: # - This initializer is loaded once at startup (modifications will show up when restarting the application) but all RailsAdmin configuration would stay in one place. # - Models are reloaded at each request in development mode (when modified), which may smooth your RailsAdmin development workflow. # Now you probably need to tour the wiki a bit: https://github.com/sferik/rails_admin/wiki # Anyway, here is how RailsAdmin saw your application's models when you ran the initializer: config.model 'Entity' do list do field :published, :toggle field :needs_work field :priority field :person field :name field :short_name field :description field :updated_at end edit do group :basic_info do label "Basic info" field :person do default_value true end field :name field :short_name field :description field :priority do default_value Entity::PRIORITY_MEDIUM end field :avatar end group :social_media do label "Social media / web" field :web_page field :twitter_handle field :wikipedia_page field :facebook_page field :open_corporates_page field :flickr_page field :youtube_page field :linkedin_page end group :relations do # Editing the relations through the default RailsAdmin control (moving across # two columns) is very confusing. So disable for now. field :relations_as_source do read_only true inverse_of :source end field :relations_as_target do read_only true inverse_of :target end field :related_photos do read_only true inverse_of :related_entities end end group :internal do label "Internal" field :published do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :slug do help 'Leave blank for the URL slug to be auto-generated' end field :notes field :updated_at end end object_label_method do :short_or_long_name end end config.model 'Fact' do list do field :importer field :relations field :summary end edit do field :importer field :relations do read_only true end field :summary do read_only true end end end config.model 'Photo' do list do field :published, :toggle field :needs_work field :file field :footer field :tag_list field :updated_at end edit do group :basic_info do label "Content" field :file field :footer field :copyright field :source field :date do strftime_format "%d/%m/%Y" end end group :relations do field :related_entities end group :internal do field :extra_wide do default_value false end field :published do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :tag_list do label "Tags" partial 'tag_list_with_suggestions' end field :notes field :posts_as_header field :updated_at end end end # RailsAdmin configuration config.model 'Annotation' do parent Photo end # RailsAdmin configuration config.model 'Post' do list do field :published, :toggle field :needs_work field :title field :author field :updated_at end edit do group :basic_info do label "Content" field :title field :lead field :content, :ck_editor do help 'Puedes insertar códigos como: [dc url="..."] [qm url="..." text="..."] [gdocs url="..."]' end field :author do inverse_of :posts end field :mentions_in_content do read_only true end end group :internal do label "Internal" field :photo field :show_photo_as_header do default_value false end field :published do default_value false end field :featured do default_value false end field :needs_work do default_value true end field :related_posts do read_only true end field :slug do help 'Leave blank for the URL slug to be auto-generated' end field :notes field :updated_at end end end # RailsAdmin configuration config.model 'Mention' do parent Post object_label_method do :to_s end end config.model 'Relation' do list do field :published, :toggle field :needs_work field :source field :relation_type field :target field :via field :updated_at end edit do group :basic_info do field :source field :relation_type field :target field :via field :via2 field :via3 end group :timeline do field :from do strftime_format "%d/%m/%Y" end field :to do strftime_format "%d/%m/%Y" end field :at do strftime_format "%d/%m/%Y" end end group :internal do field :published do default_value true end field :needs_work do default_value false end field :facts do read_only true end field :notes field :updated_at end end object_label_method do :to_s end end config.model 'RelationType' do parent Relation list do field :description end edit do field :description field :relations do read_only true end end object_label_method do :description end end config.model 'User' do object_label_method :name end ### User ### # config.model 'User' do # # You can copy this to a 'rails_admin do ... end' block inside your user.rb model definition # # Found associations: # # Found columns: # configure :id, :integer # configure :email, :string # configure :password, :password # Hidden # configure :password_confirmation, :password # Hidden # configure :reset_password_token, :string # Hidden # configure :reset_password_sent_at, :datetime # configure :remember_created_at, :datetime # configure :sign_in_count, :integer # configure :current_sign_in_at, :datetime # configure :last_sign_in_at, :datetime # configure :current_sign_in_ip, :string # configure :last_sign_in_ip, :string # configure :created_at, :datetime # configure :updated_at, :datetime # # Cross-section configuration: # # object_label_method :name # Name of the method called for pretty printing an *instance* of ModelName # # label 'My model' # Name of ModelName (smartly defaults to ActiveRecord's I18n API) # # label_plural 'My models' # Same, plural # # weight 0 # Navigation priority. Bigger is higher. # # parent OtherModel # Set parent model for navigation. MyModel will be nested below. OtherModel will be on first position of the dropdown # # navigation_label # Sets dropdown entry's name in navigation. Only for parents! # # Section specific configuration: # list do # # filters [:id, :name] # Array of field names which filters should be shown by default in the table header # # items_per_page 100 # Override default_items_per_page # # sort_by :id # Sort column (default is primary key) # # sort_reverse true # Sort direction (default is true for primary key, last created first) # end # show do; end # edit do; end # export do; end # # also see the create, update, modal and nested sections, which override edit in specific cases (resp. when creating, updating, modifying from another model in a popup modal or modifying from another model nested form) # # you can override a cross-section field configuration in any section with the same syntax `configure :field_name do ... end` # # using `field` instead of `configure` will exclude all other fields and force the ordering # end end
require 'mimic' require 'socket' class DataManager class << self def practices_memo { 1 => { id: 1, export_url: "https://optimis.duxware.com", external_id: 3, token: "12345", migrated_at: nil, billing: true, created_at: "2013-07-16T01:16:45Z", updated_at: "2013-07-16T01:16:45Z" } } end def claim_errors_memo { 1 => { error_message: 'Send to optimis.duxware.com failed with ERROR: Did not find this ICD code in DB: 729.90', external_id: 3, id: 1, message_body: '<?xml version="1.0" encoding="UTF-8"?><incomingHeader></incomingHeader>', record_id: 43334, resent_at: nil, created_at: "2013-06-13T16:17:02Z", updated_at: "2013-06-13T16:17:02Z" }, 2 => { error_message: 'Send to optimis.duxware.com failed with ERROR: Did not find this ICD code in DB: 729.90', external_id: 4, id: 1, message_body: '<?xml version="1.0" encoding="UTF-8"?><incomingHeader></incomingHeader>', record_id: 43334, resent_at: nil, created_at: "2013-06-13T16:17:02Z", updated_at: "2013-06-13T16:17:02Z" } } end def practices @practices ||= practices_memo.dup end def reset @claim_errors = claim_errors_memo.dup @practices = practices_memo.dup end def claim_errors @claim_errors ||= claim_errors_memo.dup end end end Mimic.mimic(:port => Comptroller::Configuration::PORT) do if defined?(Rails) p "[NOTICE] Starting fake billing service on port #{Comptroller::Configuration::PORT}" end get '/practices' do [ 200, {}, DataManager.practices.values.to_json ] end get '/practices/:id' do [ 200, {}, DataManager.practices[params[:id].to_i].to_json ] end post '/practices' do practice_params = params[:practice] new_id = DataManager.practices.keys.max + 1 # fake auto incrementing private keys new_practice = { billing: false, created_at: "2013-07-16T01:16:45Z", export_url: practice_params[:export_url], external_id: practice_params[:external_id].to_i, id: new_id, migrated_at: nil, token: practice_params[:token], updated_at: "2013-07-16T01:16:45Z" } DataManager.practices[new_id] = new_practice [ 200, {}, new_practice.to_json ] end put '/practices/:id' do practice = DataManager.practices[params[:id].to_i] practice.merge!(params[:practice]) [ 200, {}, practice.to_json ] end delete '/practices/:id' do deleted_practice = DataManager.practices.extract!(params[:id].to_i)[params[:id].to_i] [ 200, {}, deleted_practice.to_json ] end get '/duxware_errors/count' do [ 200, {}, { :count => 1 }.to_json ] end get '/duxware_errors' do [ 200, {}, DataManager.claim_errors.values.to_json ] end get '/duxware_errors/:id' do [ 200, {}, DataManager.claim_errors[params[:id].to_i].to_json ] end put '/duxware_errors/:id' do claim_error = DataManager.claim_errors[params[:id].to_i] claim_error.merge!(params[:claim_error]) [ 200, {}, claim_error.to_json ] end delete '/duxware_errors/:id' do DataManager.claim_errors[params[:id].to_i] deleted_error = DataManager.claim_errors.extract!(params[:id].to_i)[params[:id].to_i] [ 200, {}, deleted_error.to_json ] end get '/resets' do DataManager.reset end end Fix claim error test data require 'mimic' require 'socket' class DataManager class << self def practices_memo { 1 => { id: 1, export_url: "https://optimis.duxware.com", external_id: 3, token: "12345", migrated_at: nil, billing: true, created_at: "2013-07-16T01:16:45Z", updated_at: "2013-07-16T01:16:45Z" } } end def claim_errors_memo { 1 => { error_message: 'Send to optimis.duxware.com failed with ERROR: Did not find this ICD code in DB: 729.90', external_id: 3, id: 1, message_body: '<?xml version="1.0" encoding="UTF-8"?><incomingHeader></incomingHeader>', record_id: 43334, resent_at: nil, created_at: "2013-06-13T16:17:02Z", updated_at: "2013-06-13T16:17:02Z" }, 2 => { error_message: 'Send to optimis.duxware.com failed with ERROR: Did not find this ICD code in DB: 729.91', external_id: 4, id: 2, message_body: '<?xml version="1.0" encoding="UTF-8"?><incomingHeader></incomingHeader>', record_id: 43335, resent_at: nil, created_at: "2013-06-13T16:17:02Z", updated_at: "2013-06-13T16:17:02Z" } } end def practices @practices ||= practices_memo.dup end def reset @claim_errors = claim_errors_memo.dup @practices = practices_memo.dup end def claim_errors @claim_errors ||= claim_errors_memo.dup end end end Mimic.mimic(:port => Comptroller::Configuration::PORT) do if defined?(Rails) p "[NOTICE] Starting fake billing service on port #{Comptroller::Configuration::PORT}" end get '/practices' do [ 200, {}, DataManager.practices.values.to_json ] end get '/practices/:id' do [ 200, {}, DataManager.practices[params[:id].to_i].to_json ] end post '/practices' do practice_params = params[:practice] new_id = DataManager.practices.keys.max + 1 # fake auto incrementing private keys new_practice = { billing: false, created_at: "2013-07-16T01:16:45Z", export_url: practice_params[:export_url], external_id: practice_params[:external_id].to_i, id: new_id, migrated_at: nil, token: practice_params[:token], updated_at: "2013-07-16T01:16:45Z" } DataManager.practices[new_id] = new_practice [ 200, {}, new_practice.to_json ] end put '/practices/:id' do practice = DataManager.practices[params[:id].to_i] practice.merge!(params[:practice]) [ 200, {}, practice.to_json ] end delete '/practices/:id' do deleted_practice = DataManager.practices.extract!(params[:id].to_i)[params[:id].to_i] [ 200, {}, deleted_practice.to_json ] end get '/duxware_errors/count' do [ 200, {}, { :count => 1 }.to_json ] end get '/duxware_errors' do [ 200, {}, DataManager.claim_errors.values.to_json ] end get '/duxware_errors/:id' do [ 200, {}, DataManager.claim_errors[params[:id].to_i].to_json ] end put '/duxware_errors/:id' do claim_error = DataManager.claim_errors[params[:id].to_i] claim_error.merge!(params[:claim_error]) [ 200, {}, claim_error.to_json ] end delete '/duxware_errors/:id' do DataManager.claim_errors[params[:id].to_i] deleted_error = DataManager.claim_errors.extract!(params[:id].to_i)[params[:id].to_i] [ 200, {}, deleted_error.to_json ] end get '/resets' do DataManager.reset end end
RailsAdmin.config do |config| ### Popular gems integration ## == Devise == config.authenticate_with do warden.authenticate! scope: :user end config.current_user_method(&:current_user) # Authorization with pundit policies config.authorize_with :pundit ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.included_models = ['User'] config.actions do dashboard # mandatory index # mandatory new export bulk_delete show edit delete show_in_app ## With an audit adapter, you can add: # history_index # history_show end end Expose models through rails admin. RailsAdmin.config do |config| ### Popular gems integration ## == Devise == config.authenticate_with do warden.authenticate! scope: :user end config.current_user_method(&:current_user) # Authorization with pundit policies config.authorize_with :pundit ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.included_models = %w(User TempUser Role Membership Project Transaction Competition Membership) config.actions do dashboard # mandatory index # mandatory new export bulk_delete show edit delete show_in_app end end
module Specjour require 'specjour/rspec' require 'specjour/cucumber' class Printer < GServer include Protocol RANDOM_PORT = 0 def self.start(specs_to_run) new(specs_to_run).start end attr_accessor :worker_size, :specs_to_run, :completed_workers, :disconnections, :profiler def initialize(specs_to_run) super( port = RANDOM_PORT, host = "0.0.0.0", max_connections = 100, stdlog = $stderr, audit = true, debug = true ) @completed_workers = 0 @disconnections = 0 @profiler = {} self.specs_to_run = run_order(specs_to_run) end def serve(client) client = Connection.wrap client client.each(TERMINATOR) do |data| process load_object(data), client end end def ready(client) synchronize do client.print specs_to_run.shift client.flush end end def done(client) self.completed_workers += 1 end def exit_status reporters.all? {|r| r.exit_status == true} end def rspec_summary=(client, summary) rspec_report.add(summary) end def cucumber_summary=(client, summary) cucumber_report.add(summary) end def add_to_profiler(client, args) test, time = *args self.profiler[test] = time end protected def disconnecting(client_port) self.disconnections += 1 if disconnections == worker_size shutdown stop unless Specjour.interrupted? end end def log(msg) # noop end def error(exception) Specjour.logger.debug "#{exception.inspect}\n#{exception.backtrace.join("\n")}" end def process(message, client) if message.is_a?(String) $stdout.print message $stdout.flush elsif message.is_a?(Array) send(message.first, client, *message[1..-1]) end end def run_order(specs_to_run) if File.exist?('.specjour/performance') ordered_specs = File.readlines('.specjour/performance').map {|l| l.chop.split(':')[1]} (specs_to_run - ordered_specs) | (ordered_specs & specs_to_run) else specs_to_run end end def rspec_report @rspec_report ||= Rspec::FinalReport.new end def cucumber_report @cucumber_report ||= Cucumber::FinalReport.new end def record_performance File.open('.specjour/performance', 'w') do |file| ordered_specs = profiler.to_a.sort_by {|a| -a[1].to_f}.map do |test, time| file.puts "%6f:%s" % [time, test] end end end def reporters [@rspec_report, @cucumber_report].compact end def stopping summarize_reports warn_if_workers_deserted record_performance unless Specjour.interrupted? end def summarize_reports reporters.each {|r| r.summarize} end def synchronize(&block) @connectionsMutex.synchronize &block end def warn_if_workers_deserted if disconnections != completed_workers && !Specjour.interrupted? puts puts workers_deserted_message end end def workers_deserted_message data = "* ERROR: NOT ALL WORKERS COMPLETED PROPERLY *" filler = "*" * data.size [filler, data, filler].join "\n" end end end Synchronize threads before mutating disconnections module Specjour require 'specjour/rspec' require 'specjour/cucumber' class Printer < GServer include Protocol RANDOM_PORT = 0 def self.start(specs_to_run) new(specs_to_run).start end attr_accessor :worker_size, :specs_to_run, :completed_workers, :disconnections, :profiler def initialize(specs_to_run) super( port = RANDOM_PORT, host = "0.0.0.0", max_connections = 100, stdlog = $stderr, audit = true, debug = true ) @completed_workers = 0 @disconnections = 0 @profiler = {} self.specs_to_run = run_order(specs_to_run) end def serve(client) client = Connection.wrap client client.each(TERMINATOR) do |data| process load_object(data), client end end def ready(client) synchronize do client.print specs_to_run.shift client.flush end end def done(client) self.completed_workers += 1 end def exit_status reporters.all? {|r| r.exit_status == true} end def rspec_summary=(client, summary) rspec_report.add(summary) end def cucumber_summary=(client, summary) cucumber_report.add(summary) end def add_to_profiler(client, args) test, time = *args self.profiler[test] = time end protected def disconnecting(client_port) synchronize { self.disconnections += 1 } if disconnections == worker_size shutdown stop unless Specjour.interrupted? end end def log(msg) # noop end def error(exception) Specjour.logger.debug "#{exception.inspect}\n#{exception.backtrace.join("\n")}" end def process(message, client) if message.is_a?(String) $stdout.print message $stdout.flush elsif message.is_a?(Array) send(message.first, client, *message[1..-1]) end end def run_order(specs_to_run) if File.exist?('.specjour/performance') ordered_specs = File.readlines('.specjour/performance').map {|l| l.chop.split(':')[1]} (specs_to_run - ordered_specs) | (ordered_specs & specs_to_run) else specs_to_run end end def rspec_report @rspec_report ||= Rspec::FinalReport.new end def cucumber_report @cucumber_report ||= Cucumber::FinalReport.new end def record_performance File.open('.specjour/performance', 'w') do |file| ordered_specs = profiler.to_a.sort_by {|a| -a[1].to_f}.map do |test, time| file.puts "%6f:%s" % [time, test] end end end def reporters [@rspec_report, @cucumber_report].compact end def stopping summarize_reports warn_if_workers_deserted record_performance unless Specjour.interrupted? end def summarize_reports reporters.each {|r| r.summarize} end def synchronize(&block) @connectionsMutex.synchronize &block end def warn_if_workers_deserted if disconnections != completed_workers && !Specjour.interrupted? puts puts workers_deserted_message end end def workers_deserted_message data = "* ERROR: NOT ALL WORKERS COMPLETED PROPERLY *" filler = "*" * data.size [filler, data, filler].join "\n" end end end
RailsAdmin.config do |config| config.main_app_name = ['Member Directory', 'Admin'] config.current_user_method { current_admin } config.attr_accessible_role { :admin } config.included_models = ['Member'] end remove old config option for rails_admin RailsAdmin.config do |config| config.main_app_name = ['Member Directory', 'Admin'] config.current_user_method { current_admin } config.included_models = ['Member'] end
[ RailsAdmin::Config::Actions::MemoryUsage, RailsAdmin::Config::Actions::DiskUsage, RailsAdmin::Config::Actions::SendToFlow, RailsAdmin::Config::Actions::LoadModel, RailsAdmin::Config::Actions::ShutdownModel, RailsAdmin::Config::Actions::SwitchNavigation, RailsAdmin::Config::Actions::DataType, RailsAdmin::Config::Actions::Import, #RailsAdmin::Config::Actions::EdiExport, RailsAdmin::Config::Actions::ImportSchema, RailsAdmin::Config::Actions::DeleteAll, RailsAdmin::Config::Actions::TranslatorUpdate, RailsAdmin::Config::Actions::Convert, RailsAdmin::Config::Actions::SimpleShare, RailsAdmin::Config::Actions::BulkShare, RailsAdmin::Config::Actions::Pull, RailsAdmin::Config::Actions::RetryTask, RailsAdmin::Config::Actions::DownloadFile, RailsAdmin::Config::Actions::ProcessFlow, RailsAdmin::Config::Actions::BuildGem, RailsAdmin::Config::Actions::Run, RailsAdmin::Config::Actions::Authorize, RailsAdmin::Config::Actions::SimpleDeleteDataType, RailsAdmin::Config::Actions::BulkDeleteDataType, RailsAdmin::Config::Actions::SimpleGenerate, RailsAdmin::Config::Actions::BulkGenerate, RailsAdmin::Config::Actions::SimpleExpand, RailsAdmin::Config::Actions::BulkExpand, RailsAdmin::Config::Actions::Records, RailsAdmin::Config::Actions::SwitchScheduler, RailsAdmin::Config::Actions::SimpleExport, RailsAdmin::Config::Actions::Schedule, RailsAdmin::Config::Actions::Submit, RailsAdmin::Config::Actions::DeleteCollection, RailsAdmin::Config::Actions::Inspect, RailsAdmin::Config::Actions::Copy, RailsAdmin::Config::Actions::Cancel, RailsAdmin::Config::Actions::Configure, RailsAdmin::Config::Actions::CrossShare, RailsAdmin::Config::Actions::Regist, RailsAdmin::Config::Actions::SharedCollectionIndex ].each { |a| RailsAdmin::Config::Actions.register(a) } RailsAdmin::Config::Actions.register(:export, RailsAdmin::Config::Actions::BulkExport) [ RailsAdmin::Config::Fields::Types::JsonValue, RailsAdmin::Config::Fields::Types::JsonSchema, RailsAdmin::Config::Fields::Types::StorageFile, RailsAdmin::Config::Fields::Types::EnumEdit ].each { |f| RailsAdmin::Config::Fields::Types.register(f) } RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :js_location do bindings[:view].asset_path('codemirror.js') end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :css_location do bindings[:view].asset_path('codemirror.css') end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :config do { mode: 'css', theme: 'neo', } end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :assets do { mode: bindings[:view].asset_path('codemirror/modes/css.js'), theme: bindings[:view].asset_path('codemirror/themes/neo.css'), } end module RailsAdmin module Config class << self def navigation(label, options) navigation_options[label.to_s] = options end def navigation_options @nav_options ||= {} end end end end RailsAdmin.config do |config| config.total_columns_width = 900 ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.authenticate_with do warden.authenticate! scope: :user unless %w(dashboard shared_collection_index index show).include?(action_name) end config.current_user_method { current_user } config.audit_with :mongoid_audit config.authorize_with :cancan config.excluded_models += [Setup::BaseOauthAuthorization, Setup::AwsAuthorization] config.actions do dashboard # mandatory # memory_usage # disk_usage shared_collection_index index # mandatory new { except [Setup::Event, Setup::DataType, Setup::Authorization, Setup::BaseOauthProvider] } import import_schema translator_update convert export bulk_delete show run edit configure copy simple_share bulk_share cross_share build_gem pull download_file load_model shutdown_model process_flow authorize simple_generate bulk_generate simple_expand bulk_expand records switch_navigation switch_scheduler simple_export schedule retry_task submit inspect cancel regist simple_delete_data_type bulk_delete_data_type delete delete_collection #show_in_app send_to_flow delete_all data_type # history_index do # only [Setup::DataType, Setup::Webhook, Setup::Flow, Setup::Schema, Setup::Event, Setup::Connection, Setup::ConnectionRole] # end # history_show do # only [Setup::DataType, Setup::Webhook, Setup::Flow, Setup::Schema, Setup::Event, Setup::Connection, Setup::ConnectionRole, Setup::Notification] # end end #Collections config.navigation 'Collections', icon: 'fa fa-cubes' config.model Setup::SharedCollection do weight -600 label 'Shared Collection' register_instance_option(:discard_submit_buttons) do !(a = bindings[:action]) || a.key != :edit end navigation_label 'Collections' object_label_method { :versioned_name } public_access true extra_associations do Setup::Collection.reflect_on_all_associations(:has_and_belongs_to_many).collect do |association| association = association.dup association[:name] = "data_#{association.name}".to_sym RailsAdmin::Adapters::Mongoid::Association.new(association, abstract_model.model) end end index_template_name :shared_collection_grid index_link_icon 'icon-th-large' group :collections group :workflows group :api_connectors do label 'API Connectors' active true end group :data group :security edit do field :image do visible { !bindings[:object].instance_variable_get(:@sharing) } end field :logo_background field :name do required { true } end field :shared_version do required { true } end field :authors field :summary field :source_collection do visible { !((source_collection = bindings[:object].source_collection) && source_collection.new_record?) } inline_edit false inline_add false associated_collection_scope do source_collection = (obj = bindings[:object]).source_collection Proc.new { |scope| if obj.new_record? scope.where(id: source_collection ? source_collection.id : nil) else scope end } end end field :connections do inline_add false read_only do !((v = bindings[:object].instance_variable_get(:@_selecting_connections)).nil? || v) end help do nil end pretty_value do if bindings[:object].connections.present? v = bindings[:view] ids = '' [value].flatten.select(&:present?).collect do |associated| ids += "<option value=#{associated.id} selected=true/>" amc = polymorphic? ? RailsAdmin.config(associated) : associated_model_config am = amc.abstract_model wording = associated.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, associated)) can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: associated.id), class: 'pjax') : wording end.to_sentence.html_safe + v.select_tag("#{bindings[:controller].instance_variable_get(:@model_config).abstract_model.param_key}[connection_ids][]", ids.html_safe, multiple: true, style: 'display:none').html_safe else 'No connection selected'.html_safe end end visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && obj.source_collection && obj.source_collection.connections.present? end associated_collection_scope do source_collection = bindings[:object].source_collection connections = (source_collection && source_collection.connections) || [] Proc.new { |scope| scope.any_in(id: connections.collect { |connection| connection.id }) } end end field :dependencies do inline_add false read_only do !((v = bindings[:object].instance_variable_get(:@_selecting_dependencies)).nil? || v) end help do nil end pretty_value do if bindings[:object].dependencies.present? v = bindings[:view] ids = '' [value].flatten.select(&:present?).collect do |associated| ids += "<option value=#{associated.id} selected=true/>" amc = polymorphic? ? RailsAdmin.config(associated) : associated_model_config am = amc.abstract_model wording = associated.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, associated)) can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: associated.id), class: 'pjax') : wording end.to_sentence.html_safe + v.select_tag("#{bindings[:controller].instance_variable_get(:@model_config).abstract_model.param_key}[dependency_ids][]", ids.html_safe, multiple: true, style: 'display:none').html_safe else 'No dependencies selected'.html_safe end end visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) end end field :pull_parameters do visible do if !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && !obj.instance_variable_get(:@_selecting_connections) && (pull_parameters_enum = obj.enum_for_pull_parameters).present? bindings[:controller].instance_variable_set(:@shared_parameter_enum, pull_parameters_enum) true else false end end end field :pull_count do visible { Account.current_super_admin? } end field :readme do visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && !obj.instance_variable_get(:@_selecting_connections) end end end show do field :image field :name do pretty_value do bindings[:object].versioned_name end end field :summary do pretty_value do value.html_safe end end field :readme do pretty_value do begin template = value.gsub('&lt;%', '<%').gsub('%&gt;', '%>').gsub('%3C%', '<%').gsub('%%3E', '%>') Setup::Transformation::ActionViewTransform.run(transformation: template, style: 'html.erb', base_url: bindings[:controller].request.base_url, user_key: User.current_number, user_token: User.current_token, collection: nil, shared_collection: bindings[:object]) rescue Exception => ex value end.html_safe end end field :authors field :dependencies field :pull_count field :data_namespaces do group :collections label 'Namespaces' list_fields do %w(name slug) end end field :data_flows do group :workflows label 'Flows' list_fields do %w(namespace name) #TODO Inlude a description field on Flow model end end field :data_translators do group :workflows label 'Translators' list_fields do %w(namespace name type style) end end field :data_events do group :workflows label 'Events' list_fields do %w(namespace name _type) end end field :data_algorithms do group :workflows label 'Algorithms' list_fields do %w(namespace name description) end end field :data_connection_roles do group :api_connectors label 'Connection roles' list_fields do %w(namespace name) end end field :data_webhooks do group :api_connectors label 'Webhooks' list_fields do %w(namespace name path method description) end end field :data_connections do group :api_connectors label 'Connections' list_fields do %w(namespace name url) end end field :data_data_types do group :data label 'Data types' list_fields do %w(title name slug _type) end end field :data_schemas do group :data label 'Schemas' list_fields do %w(namespace uri) end end field :data_custom_validators do group :data label 'Custom validators' list_fields do %w(namespace name _type) #TODO Include a description field for Custom Validator model end end # field :data_data TODO Include collection data field field :data_authorizations do group :security label 'Authorizations' list_fields do %w(namespace name _type) end end field :data_oauth_providers do group :security label 'OAuth providers' list_fields do %w(namespace name response_type authorization_endpoint token_endpoint token_method _type) end end field :data_oauth_clients do group :security label 'OAuth clients' list_fields do %w(provider name) end end field :data_oauth2_scopes do group :security label 'OAuth 2.0 scopes' list_fields do %w(provider name description) end end field :_id field :created_at field :updated_at end list do field :image do thumb_method :icon end field :name do pretty_value do bindings[:object].versioned_name end end field :authors field :summary field :pull_count field :dependencies end end config.model Setup::CollectionAuthor do visible false object_label_method { :label } fields :name, :email end config.model Setup::CollectionPullParameter do visible false object_label_method { :label } field :label field :parameter, :enum do enum do bindings[:controller].instance_variable_get(:@shared_parameter_enum) || [bindings[:object].parameter] end end edit do field :label field :parameter end show do field :label field :parameter field :created_at #field :creator field :updated_at end list do field :label field :parameter field :created_at field :updated_at end fields :label, :parameter end config.model Setup::CollectionData do visible false object_label_method { :label } end config.model Setup::Collection do navigation_label 'Collections' register_instance_option :label_navigation do 'My Collections' end group :workflows configure :flows do group :workflows end configure :events do group :workflows end configure :translators do group :workflows end configure :algorithms do group :workflows end configure :applications do group :workflows end group :api_connectors do label 'API Connectors' active true end configure :connections do group :api_connectors end configure :webhooks do group :api_connectors end configure :connection_roles do group :api_connectors end group :data configure :data_types do group :data end configure :schemas do group :data end configure :data do group :data end configure :custom_validators do group :data end group :security configure :authorizations do group :security end configure :oauth_providers do group :security end configure :oauth_clients do group :security end configure :oauth2_scopes do group :security end edit do field :image field :readme do visible { Account.current_super_admin? } end field :name field :flows field :connection_roles field :translators field :events field :data_types field :schemas field :custom_validators field :algorithms field :applications field :webhooks field :connections field :authorizations field :oauth_providers field :oauth_clients field :oauth2_scopes field :data end show do field :image field :readme do pretty_value do begin template = value.gsub('&lt;%', '<%').gsub('%&gt;', '%>').gsub('%3C%', '<%').gsub('%%3E', '%>') Setup::Transformation::ActionViewTransform.run(transformation: template, style: 'html.erb', base_url: bindings[:controller].request.base_url, user_key: User.current.number, user_token: User.current.token, collection: bindings[:object], shared_collection: nil) rescue Exception => ex value end.html_safe end end field :name field :flows field :connection_roles field :translators field :events field :data_types field :schemas field :custom_validators field :algorithms field :applications field :webhooks field :connections field :authorizations field :oauth_providers field :oauth_clients field :oauth2_scopes field :data field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :image do thumb_method :icon end field :name field :flows do pretty_value do value.count > 0 ? value.count : '-' end end field :connection_roles do pretty_value do value.count > 0 ? value.count : '-' end end field :translators do pretty_value do value.count > 0 ? value.count : '-' end end field :events do pretty_value do value.count > 0 ? value.count : '-' end end field :data_types do pretty_value do value.count > 0 ? value.count : '-' end end field :schemas do pretty_value do value.count > 0 ? value.count : '-' end end field :custom_validators do pretty_value do value.count > 0 ? value.count : '-' end end field :algorithms do pretty_value do value.count > 0 ? value.count : '-' end end field :applications do pretty_value do value.count > 0 ? value.count : '-' end end field :webhooks do pretty_value do value.count > 0 ? value.count : '-' end end field :connections do pretty_value do value.count > 0 ? value.count : '-' end end field :authorizations do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth_providers do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth_clients do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth2_scopes do pretty_value do value.count > 0 ? value.count : '-' end end field :data field :created_at field :updated_at end end config.model Setup::Namespace do navigation_label 'Collections' list do field :name field :slug field :created_at field :updated_at end fields :name, :slug end #Data config.navigation 'Data', icon: 'fa fa-database' config.model Setup::DataType do navigation_label 'Data' weight -450 label 'Data type' label_plural 'Data types' object_label_method { :custom_title } visible true show_in_dashboard false configure :_type do pretty_value do value.split('::').last.to_title end end group :behavior do label 'Behavior' active false end configure :namespace, :enum_edit configure :title do pretty_value do bindings[:object].custom_title end end configure :slug configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :title field :before_save_callbacks field :records_methods field :data_type_methods end list do field :title field :name field :slug field :_type field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::DataType.fields[:used_memory.to_s].type.new(Setup::DataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::DataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :title field :name field :slug field :_type field :storage_size field :activated field :schema do pretty_value do v = if json = JSON.pretty_generate(value) rescue nil "<code class='json'>#{json.gsub('<', '&lt;').gsub('>', '&gt;')}</code>" else value end "<pre>#{v}</pre>".html_safe end end field :_id field :created_at #field :creator field :updated_at #field :updater end fields :namespace, :title, :name, :used_memory end config.model Setup::JsonDataType do navigation_label 'Data' weight -449 label 'JSON Data Type' object_label_method { :custom_title } register_instance_option(:after_form_partials) do %w(shutdown_and_reload) end group :behavior do label 'Behavior' active false end configure :title configure :name do read_only { !bindings[:object].new_record? } end configure :schema, :code_mirror do html_attributes do report = bindings[:object].shutdown(report_only: true) reload = (report[:reloaded].collect(&:data_type) + report[:destroyed].collect(&:data_type)).uniq bindings[:object].instance_variable_set(:@_to_reload, reload) { cols: '74', rows: '15' } end # pretty_value do # "<pre><code class='json'>#{JSON.pretty_generate(value)}</code></pre>".html_safe # end end configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :namespace, :enum_edit field :title field :name field :slug field :schema, :json_schema do help { 'Required' } end field :before_save_callbacks field :records_methods field :data_type_methods end list do field :namespace field :title field :name field :slug field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :namespace field :title field :name field :slug field :storage_size field :activated field :schema do pretty_value do "<pre><code class='ruby'>#{JSON.pretty_generate(value)}</code></pre>".html_safe end end field :before_save_callbacks field :records_methods field :data_type_methods field :_id field :created_at #field :creator field :updated_at #field :updater end end config.model Setup::FileDataType do navigation_label 'Data' weight -448 object_label_method { :custom_title } configure :namespace, :enum_edit group :content do label 'Content' end group :behavior do label 'Behavior' active false end configure :title do pretty_value do bindings[:object].custom_title end end configure :used_memory do pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.records_model.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :validators do group :content inline_add false end configure :schema_data_type do group :content inline_add false inline_edit false end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :namespace field :title field :name field :slug field :validators field :schema_data_type field :before_save_callbacks field :records_methods field :data_type_methods end list do field :title field :name field :slug field :validators field :schema_data_type field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :title field :name field :slug field :activated field :validators field :storage_size field :schema_data_type field :_id field :created_at #field :creator field :updated_at #field :updater end end config.model Setup::Validator do navigation_label 'Data' label 'Schemas & Validators' weight -490 configure :namespace, :enum_edit fields :namespace, :name list do field :namespace field :name field :created_at field :updated_at end show_in_dashboard { false } end config.model Setup::CustomValidator do visible false configure :_type do pretty_value do value.split('::').last.to_title end end list do field :namespace field :name field :_type field :created_at field :updated_at end fields :namespace, :name, :_type end config.model Setup::Schema do parent Setup::Validator weight -489 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace do read_only { !bindings[:object].new_record? } end field :uri do read_only { !bindings[:object].new_record? } html_attributes do { cols: '74', rows: '1' } end end field :schema, :code_mirror do html_attributes do { cols: '74', rows: '15' } end end field :schema_data_type do inline_edit false inline_add false end end show do field :namespace field :uri field :schema do pretty_value do v = if json = JSON.parse(value) rescue nil "<code class='json'>#{JSON.pretty_generate(json).gsub('<', '&lt;').gsub('>', '&gt;')}</code>" elsif (xml = Nokogiri::XML(value)).errors.blank? "<code class='xml'>#{xml.to_xml.gsub('<', '&lt;').gsub('>', '&gt;')}</code>" else "<code>#{value}</code>" end "<pre>#{v}</pre>".html_safe end end field :schema_data_type field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :uri field :schema_data_type field :created_at field :updated_at end fields :namespace, :uri, :schema_data_type end config.model Setup::XsltValidator do parent Setup::Validator weight -488 object_label_method { :custom_title } list do field :namespace field :xslt field :created_at field :updated_at end fields :namespace, :name, :xslt end config.model Setup::EdiValidator do parent Setup::Validator weight -487 object_label_method { :custom_title } label 'EDI Validators' configure :namespace, :enum_edit list do field :namespace field :name field :schema_data_type field :content_type field :created_at field :updated_at end fields :namespace, :name, :schema_data_type, :content_type end config.model Setup::AlgorithmValidator do parent Setup::Validator weight -486 object_label_method { :custom_title } configure :namespace, :enum_edit list do field :namespace field :name field :algorithm field :created_at field :updated_at end fields :namespace, :name, :algorithm end #API Connectors config.navigation 'API Connectors', icon: :api_connectors config.model Setup::Parameter do visible false object_label_method { :to_s } configure :metadata, :json_value edit do field :name field :value field :description field :metadata end list do field :name field :value field :description field :metadata field :created_at field :updated_at end end config.model Setup::Connection do navigation_label 'API Connectors' weight -400 object_label_method { :custom_title } configure :namespace, :enum_edit group :credentials do label 'Credentials' end configure :key, :string do visible { User.current_admin? } html_attributes do { maxlength: 30, size: 30 } end group :credentials end configure :token, :text do visible { User.current_admin? } html_attributes do { cols: '50', rows: '1' } end group :credentials end configure :authorization do group :credentials inline_edit false visible { User.current_admin? } end configure :authorization_handler do group :credentials visible { User.current_admin? } end group :parameters do label 'Parameters & Headers' end configure :parameters do group :parameters visible { User.current_admin? } end configure :headers do group :parameters visible { User.current_admin? } end configure :template_parameters do group :parameters visible { User.current_admin? } end edit do field :namespace field :name field :url field :key field :token field :authorization field :authorization_handler field :parameters field :headers field :template_parameters end show do field :namespace field :name field :url field :key field :token field :authorization field :authorization_handler field :parameters field :headers field :template_parameters field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :url field :key field :token field :authorization field :created_at field :updated_at end fields :namespace, :name, :url, :key, :token, :authorization end config.model Setup::ConnectionRole do navigation_label 'API Connectors' weight -309 object_label_method { :custom_title } configure :namespace, :enum_edit configure :name, :string do help 'Requiered.' html_attributes do { maxlength: 50, size: 50 } end end configure :webhooks do nested_form false end configure :connections do nested_form false end modal do field :namespace field :name field :webhooks field :connections end show do field :namespace field :name field :webhooks field :connections field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :webhooks field :connections field :created_at field :updated_at end fields :namespace, :name, :webhooks, :connections end config.model Setup::Webhook do navigation_label 'API Connectors' weight -308 object_label_method { :custom_title } configure :namespace, :enum_edit configure :metadata, :json_value group :credentials do label 'Credentials' end configure :authorization do group :credentials inline_edit false visible { User.current_admin? } end configure :authorization_handler do group :credentials visible { User.current_admin? } end group :parameters do label 'Parameters & Headers' end configure :path, :string do help 'Requiered. Path of the webhook relative to connection URL.' html_attributes do { maxlength: 255, size: 100 } end end configure :parameters do group :parameters end configure :headers do group :parameters end configure :template_parameters do group :parameters end edit do field :namespace field :name field :path field :method field :description field :metadata, :json_value field :authorization field :authorization_handler field :parameters field :headers field :template_parameters end show do field :namespace field :name field :path field :method field :description field :metadata, :json_value field :authorization field :authorization_handler field :parameters field :headers field :template_parameters field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :path field :method field :description field :authorization field :created_at field :updated_at end fields :namespace, :name, :path, :method, :description, :authorization end #Workflows config.navigation 'Workflows', icon: 'fa fa-cogs' config.model Setup::Flow do navigation_label 'Workflows' weight -300 object_label_method { :custom_title } register_instance_option(:form_synchronized) do [:custom_data_type, :data_type_scope, :scope_filter, :scope_evaluator, :lot_size, :connection_role, :webhook, :response_translator, :response_data_type] end configure :namespace, :enum_edit edit do field :namespace field :name field :event do inline_edit false inline_add false end field :translator do help 'Required' end field :custom_data_type do inline_edit false inline_add false visible do if (f = bindings[:object]).custom_data_type.present? f.nil_data_type = false end if f.translator.present? && f.translator.data_type.nil? && !f.nil_data_type f.instance_variable_set(:@selecting_data_type, f.custom_data_type = f.event && f.event.try(:data_type)) unless f.data_type f.nil_data_type = f.translator.type == :Export && (params = (controller = bindings[:controller]).params) && (params = params[controller.abstract_model.param_key]) && params[:custom_data_type_id].blank? && params.keys.include?(:custom_data_type_id.to_s) true else false end end label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'Source data type' else 'Target data type' end else 'Data type' end end help do if bindings[:object].nil_data_type '' elsif (translator = bindings[:object].translator) && [:Export, :Conversion].include?(translator.type) 'Optional' else 'Required' end end end field :nil_data_type do visible { bindings[:object].nil_data_type } label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'No source data type' else 'No target data type' end else 'No data type' end end end field :data_type_scope do visible do bindings[:controller].instance_variable_set(:@_data_type, bindings[:object].data_type) bindings[:controller].instance_variable_set(:@_update_field, 'translator_id') (f = bindings[:object]).translator.present? && f.translator.type != :Import && f.data_type && !f.instance_variable_get(:@selecting_data_type) end label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'Source scope' else 'Target scope' end else 'Data type scope' end end help 'Required' end field :scope_filter do visible { bindings[:object].scope_symbol == :filtered } partial 'form_triggers' help false end field :scope_evaluator do inline_add false inline_edit false visible { bindings[:object].scope_symbol == :evaluation } associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end help 'Required' end field :lot_size do visible { (f = bindings[:object]).translator.present? && f.translator.type == :Export && !f.nil_data_type && f.data_type_scope && f.scope_symbol != :event_source } end field :webhook do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Import || (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type))) } help 'Required' end field :connection_role do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Import || (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type))) } help 'Optional' end field :response_translator do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type)) && f.ready_to_save? } associated_collection_scope do Proc.new { |scope| scope.where(type: :Import) } end end field :response_data_type do inline_edit false inline_add false visible { (response_translator = bindings[:object].response_translator) && response_translator.type == :Import && response_translator.data_type.nil? } help '' end field :discard_events do visible { (((obj = bindings[:object]).translator && obj.translator.type == :Import) || obj.response_translator.present?) && obj.ready_to_save? } help "Events won't be fired for created or updated records if checked" end field :active do visible { bindings[:object].ready_to_save? } end field :notify_request do visible { (obj = bindings[:object]).translator && [:Import, :Export].include?(obj.translator.type) && obj.ready_to_save? } help 'Track request via notifications if checked' end field :notify_response do visible { (obj = bindings[:object]).translator && [:Import, :Export].include?(obj.translator.type) && obj.ready_to_save? } help 'Track responses via notification if checked' end field :after_process_callbacks do visible { bindings[:object].ready_to_save? } help 'Algorithms executed after flow processing, execution state is supplied as argument' associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end end show do field :namespace field :name field :active field :event field :translator field :custom_data_type field :data_type_scope field :scope_filter field :scope_evaluator field :lot_size field :webhook field :connection_role field :response_translator field :response_data_type field :discard_events field :notify_request field :notify_response field :after_process_callbacks field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :active field :event field :translator field :created_at field :updated_at end fields :namespace, :name, :active, :event, :translator end config.model Setup::Event do navigation_label 'Workflows' weight -209 object_label_method { :custom_title } visible false configure :namespace, :enum_edit configure :_type do pretty_value do value.split('::').last.to_title end end edit do field :namespace field :name end show do field :namespace field :name field :_type field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :_type field :created_at field :updated_at end fields :namespace, :name, :_type end config.model Setup::Observer do navigation_label 'Workflows' weight -208 label 'Data event' object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :data_type do inline_add false inline_edit false associated_collection_scope do data_type = bindings[:object].data_type Proc.new { |scope| if data_type scope.where(id: data_type.id) else scope end } end help 'Required' end field :trigger_evaluator do visible { (obj = bindings[:object]).data_type.blank? || obj.trigger_evaluator.present? || obj.triggers.nil? } associated_collection_scope do Proc.new { |scope| scope.all.or(:parameters.with_size => 1).or(:parameters.with_size => 2) } end end field :triggers do visible do bindings[:controller].instance_variable_set(:@_data_type, data_type = bindings[:object].data_type) bindings[:controller].instance_variable_set(:@_update_field, 'data_type_id') data_type.present? && !bindings[:object].trigger_evaluator.present? end partial 'form_triggers' help false end end show do field :namespace field :name field :data_type field :triggers field :trigger_evaluator field :_id field :created_at #field :creator field :updated_at #field :updater end fields :namespace, :name, :data_type, :triggers, :trigger_evaluator end config.model Setup::Scheduler do navigation_label 'Workflows' weight -207 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :scheduling_method field :expression do visible { bindings[:object].scheduling_method.present? } label do case bindings[:object].scheduling_method when :Once 'Date and time' when :Periodic 'Duration' when :CRON 'CRON Expression' else 'Expression' end end help do case bindings[:object].scheduling_method when :Once 'Select a date and a time' when :Periodic 'Type a time duration' when :CRON 'Type a CRON Expression' else 'Expression' end end partial { bindings[:object].scheduling_method == :Once ? 'form_datetime_wrapper' : 'form_text' } html_attributes do { rows: '1' } end end end show do field :namespace field :name field :expression field :origin field :_id field :created_at #field :creator field :updated_at #field :updater end fields :namespace, :name, :scheduling_method, :expression, :activated, :origin end config.model Setup::AlgorithmParameter do visible false fields :name, :description end config.model Setup::CallLink do visible false edit do field :name do read_only true help { nil } label 'Call name' end field :link do inline_add false inline_edit false help { nil } end end fields :name, :link end config.model Setup::Translator do navigation_label 'Workflows' weight -206 object_label_method { :custom_title } register_instance_option(:form_synchronized) do [:source_data_type, :target_data_type, :transformation, :target_importer, :source_exporter, :discard_chained_records] end configure :namespace, :enum_edit edit do field :namespace field :name field :type field :source_data_type do inline_edit false inline_add false visible { [:Export, :Conversion].include?(bindings[:object].type) } help { bindings[:object].type == :Conversion ? 'Required' : 'Optional' } end field :target_data_type do inline_edit false inline_add false visible { [:Import, :Update, :Conversion].include?(bindings[:object].type) } help { bindings[:object].type == :Conversion ? 'Required' : 'Optional' } end field :discard_events do visible { [:Import, :Update, :Conversion].include?(bindings[:object].type) } help "Events won't be fired for created or updated records if checked" end field :style do visible { bindings[:object].type.present? } help 'Required' end field :bulk_source do visible { bindings[:object].type == :Export && bindings[:object].style.present? && bindings[:object].source_bulkable? } end field :mime_type do label 'MIME type' visible { bindings[:object].type == :Export && bindings[:object].style.present? } end field :file_extension do visible { bindings[:object].type == :Export && !bindings[:object].file_extension_enum.empty? } help { "Extensions for #{bindings[:object].mime_type}" } end field :source_handler do visible { (t = bindings[:object]).style.present? && (t.type == :Update || (t.type == :Conversion && t.style == 'ruby')) } help { 'Handle sources on transformation' } end field :transformation, :code_mirror do visible { bindings[:object].style.present? && bindings[:object].style != 'chain' } help { 'Required' } html_attributes do { cols: '74', rows: '15' } end end field :source_exporter do inline_add { bindings[:object].source_exporter.nil? } visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type } help 'Required' associated_collection_scope do data_type = bindings[:object].source_data_type Proc.new { |scope| scope.all(type: :Conversion, source_data_type: data_type) } end end field :target_importer do inline_add { bindings[:object].target_importer.nil? } visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type && bindings[:object].source_exporter } help 'Required' associated_collection_scope do translator = bindings[:object] source_data_type = if translator.source_exporter translator.source_exporter.target_data_type else translator.source_data_type end target_data_type = bindings[:object].target_data_type Proc.new { |scope| scope = scope.all(type: :Conversion, source_data_type: source_data_type, target_data_type: target_data_type) } end end field :discard_chained_records do visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type && bindings[:object].source_exporter } help "Chained records won't be saved if checked" end end show do field :namespace field :name field :type field :source_data_type field :bulk_source field :target_data_type field :discard_events field :style field :mime_type field :file_extension field :transformation do pretty_value do "<pre><code class='ruby'>#{value}</code></pre>".html_safe end end field :source_exporter field :target_importer field :discard_chained_records field :_id field :created_at #field :creator field :updated_at #field :updater end fields :namespace, :name, :type, :style, :transformation end config.model Setup::Algorithm do navigation_label 'Workflows' weight -205 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :description field :parameters field :code, :code_mirror do help { 'Required' } end field :call_links do visible { bindings[:object].call_links.present? } end end show do field :namespace field :name field :description field :parameters field :code do pretty_value do v = value.gsub('<', '&lt;').gsub('>', '&gt;') "<pre><code class='ruby'>#{v}</code></pre>".html_safe end end field :call_links field :_id end fields :namespace, :name, :description, :parameters, :call_links end config.model Setup::Action do visible false navigation_label 'Workflows' weight -202 object_label_method { :to_s } fields :method, :path, :algorithm end config.model Setup::Application do navigation_label 'Workflows' weight -201 object_label_method { :custom_title } visible { Account.current_super_admin? } configure :namespace, :enum_edit configure :identifier edit do field :namespace field :name field :slug field :actions field :application_parameters end fields :namespace, :name, :slug, :identifier, :secret_token, :actions, :application_parameters end config.model Setup::ApplicationParameter do visible false navigation_label 'Workflows' configure :group, :enum_edit fields :name, :type, :many, :group, :description end #Security config.navigation 'Security', icon: 'fa fa-shield' config.model Setup::OauthClient do navigation_label 'Security' label 'OAuth client' weight -100 object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :identifier do pretty_value do if Account.current.id == bindings[:object].tenant_id value else '<i class="icon-lock"/>'.html_safe end end end configure :secret do pretty_value do if Account.current && Account.current.id == bindings[:object].tenant_id value else '<i class="icon-lock"/>'.html_safe end end end fields :provider, :name, :identifier, :secret, :tenant, :origin end config.model Setup::BaseOauthProvider do navigation_label 'Security' weight -90 object_label_method { :custom_title } label 'Provider' configure :_type do pretty_value do value.split('::').last.to_title end end configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :namespace, :enum_edit fields :namespace, :name, :_type, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :tenant, :origin end config.model Setup::OauthProvider do weight -89 label 'OAuth 1.0 provider' register_instance_option :label_navigation do 'OAuth 1.0' end object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :namespace, :enum_edit configure :refresh_token_algorithm do visible { bindings[:object].refresh_token_strategy == :custom.to_s } end fields :namespace, :name, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :request_token_endpoint, :refresh_token_strategy, :refresh_token_algorithm, :tenant, :origin end config.model Setup::Oauth2Provider do weight -88 label 'OAuth 2.0 provider' register_instance_option :label_navigation do 'OAuth 2.0' end object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :refresh_token_algorithm do visible { bindings[:object].refresh_token_strategy == :custom.to_s } end configure :namespace, :enum_edit fields :namespace, :name, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :scope_separator, :refresh_token_strategy, :refresh_token_algorithm, :tenant, :origin end config.model Setup::Oauth2Scope do navigation_label 'Security' weight -87 label 'OAuth 2.0 scope' object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end fields :provider, :name, :description, :tenant, :origin end config.model Setup::Authorization do navigation_label 'Security' weight -50 object_label_method { :custom_title } configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :namespace, :enum_edit configure :metadata, :json_value configure :_type do pretty_value do value.split('::').last.to_title end end fields :namespace, :name, :status, :_type, :metadata show_in_dashboard { false } end config.model Setup::BasicAuthorization do weight -49 register_instance_option :label_navigation do 'Basic' end object_label_method { :custom_title } configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :namespace, :enum_edit configure :metadata, :json_value edit do field :namespace field :name field :username field :password field :metadata end group :credentials do label 'Credentials' end configure :username do group :credentials end configure :password do group :credentials end show do field :namespace field :name field :status field :username field :password field :metadata field :_id end fields :namespace, :name, :status, :username, :password end config.model Setup::OauthAuthorization do weight -45 label 'OAuth 1.0 authorization' register_instance_option :label_navigation do 'OAuth 1.0' end object_label_method { :custom_title } parent Setup::Authorization configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end edit do field :namespace field :name field :client field :parameters field :metadata end group :credentials do label 'Credentials' end configure :access_token do group :credentials end configure :token_span do group :credentials end configure :authorized_at do group :credentials end configure :access_token_secret do group :credentials end configure :realm_id do group :credentials end show do field :namespace field :name field :status field :client field :parameters field :metadata field :_id field :access_token field :access_token_secret field :realm_id field :token_span field :authorized_at end fields :namespace, :name, :status, :client end config.model Setup::Oauth2Authorization do weight -40 label 'OAuth 2.0 authorization' register_instance_option :label_navigation do 'OAuth 2.0' end object_label_method { :custom_title } parent Setup::Authorization configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :expires_in do pretty_value do "#{value}s" if value end end edit do field :namespace field :name field :client field :scopes do visible { bindings[:object].ready_to_save? } associated_collection_scope do provider = ((obj = bindings[:object]) && obj.provider) || nil Proc.new { |scope| if provider scope.where(provider_id: provider.id) else scope end } end end field :parameters do visible { bindings[:object].ready_to_save? } end field :metadata end group :credentials do label 'Credentials' end configure :access_token do group :credentials end configure :token_span do group :credentials end configure :authorized_at do group :credentials end configure :refresh_token do group :credentials end configure :token_type do group :credentials end show do field :namespace field :name field :status field :client field :scopes field :parameters field :metadata field :_id field :expires_in field :id_token field :token_type field :access_token field :token_span field :authorized_at field :refresh_token field :_id end list do field :namespace field :name field :status field :client field :scopes end end config.model Setup::AwsAuthorization do weight -35 object_label_method { :custom_title } configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end edit do field :namespace field :name field :aws_access_key field :aws_secret_key field :seller field :merchant field :markets field :signature_method field :signature_version field :metadata end group :credentials do label 'Credentials' end configure :aws_access_key do group :credentials end configure :aws_secret_key do group :credentials end show do field :namespace field :name field :aws_access_key field :aws_secret_key field :seller field :merchant field :markets field :signature_method field :signature_version field :metadata end fields :namespace, :name, :aws_access_key, :aws_secret_key, :seller, :merchant, :markets, :signature_method, :signature_version end config.model Setup::OauthAccessGrant do navigation_label 'Security' weight -32 fields :created_at, :application_id, :scope end #Monitors config.navigation 'Monitors', icon: 'fa fa-heartbeat' config.model Setup::Notification do navigation_label 'Monitors' weight -20 object_label_method { :label } show_in_dashboard false configure :created_at configure :type do pretty_value do "<label style='color:#{bindings[:object].color}'>#{value.to_s.capitalize}</label>".html_safe end end configure :message do pretty_value do "<label style='color:#{bindings[:object].color}'>#{value}</label>".html_safe end end configure :attachment, :storage_file list do field :created_at do visible do if account = Account.current account.notifications_listed_at = Time.now end true end end field :type field :message field :attachment field :task end end config.model Setup::Task do navigation_label 'Monitors' weight -18 object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end configure :_type do pretty_value do value.split('::').last.to_title end end edit do field :description end list do field :_type field :description field :scheduler field :attempts_succeded field :retries field :progress field :status end end config.model Setup::FlowExecution do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :flow, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataTypeGeneration do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataTypeExpansion do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Translation do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :translator, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataImport do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :translator, :data, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::SchemasImport do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :namespace, :enum_edit configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :namespace, :base_uri, :data, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Deletion do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end configure :deletion_model do label 'Model' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value) am = amc.abstract_model wording = amc.navigation_label + ' > ' + amc.label can_see = !am.embedded? && (index_action = v.action(:index, am)) (can_see ? v.link_to(amc.contextualized_label(:menu), v.url_for(action: index_action.action_name, model_name: am.to_param), class: 'pjax') : wording).html_safe end end end edit do field :description end fields :deletion_model, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::AlgorithmExecution do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :algorithm, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Submission do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :webhook, :connection, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Storage do navigation_label 'Monitors' show_in_dashboard false weight -15 object_label_method { :label } configure :filename do label 'File name' pretty_value { bindings[:object].storage_name } end configure :length do label 'Size' pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_length) bindings[:controller].instance_variable_set(:@max_length, max = objects.collect { |storage| storage.length }.reject(&:nil?).max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].length }).html_safe else bindings[:view].number_to_human_size(value) end end end configure :storer_model do label 'Model' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value) am = amc.abstract_model wording = amc.navigation_label + ' > ' + amc.label can_see = !am.embedded? && (index_action = v.action(:index, am)) (can_see ? v.link_to(amc.label, v.url_for(action: index_action.action_name, model_name: am.to_param), class: 'pjax') : wording).html_safe end end end configure :storer_object do label 'Object' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value.class) am = amc.abstract_model wording = value.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, value)) (can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: value.id), class: 'pjax') : wording).html_safe end end end configure :storer_property do label 'Property' end fields :storer_model, :storer_object, :storer_property, :filename, :contentType, :length end #Administration config.navigation 'Administration', icon: 'fa fa-wrench' config.model User do weight -1 navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :label } group :credentials do label 'Credentials' active true end group :activity do label 'Activity' active true end configure :name configure :email configure :roles configure :account do read_only { true } end configure :password do group :credentials end configure :password_confirmation do group :credentials end configure :key do group :credentials end configure :authentication_token do group :credentials end configure :confirmed_at do group :activity end configure :sign_in_count do group :activity end configure :current_sign_in_at do group :activity end configure :last_sign_in_at do group :activity end configure :current_sign_in_ip do group :activity end configure :last_sign_in_ip do group :activity end edit do field :picture field :name field :email do visible { Account.current_super_admin? } end field :roles do visible { Account.current_super_admin? } end field :account do label { Account.current_super_admin? ? 'Account' : 'Account settings' } help { nil } end field :password do visible { Account.current_super_admin? } end field :password_confirmation do visible { Account.current_super_admin? } end field :key do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :authentication_token do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :confirmed_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :sign_in_count do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :current_sign_in_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :last_sign_in_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :current_sign_in_ip do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :last_sign_in_ip do visible { !bindings[:object].new_record? && Account.current_super_admin? } end end show do field :picture field :name field :email field :account field :roles field :key field :authentication_token field :sign_in_count field :current_sign_in_at field :last_sign_in_at field :current_sign_in_ip field :last_sign_in_ip end list do field :picture do thumb_method :icon end field :name field :email field :account field :roles field :key field :authentication_token field :sign_in_count end end config.model Account do navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :label } configure :_id do visible { Account.current_super_admin? } end configure :name do visible { Account.current_super_admin? } end configure :owner do read_only { !Account.current_super_admin? } help { nil } end configure :tenant_account do visible { Account.current_super_admin? } end configure :number do visible { Account.current_super_admin? } end configure :users do visible { Account.current_super_admin? } end configure :notification_level fields :_id, :name, :owner, :tenant_account, :number, :users, :notification_level end config.model Role do navigation_label 'Administration' visible { User.current_super_admin? } configure :users do visible { Account.current_super_admin? } end fields :name, :users end config.model Setup::SharedName do visible { false } navigation_label 'Collections' fields :name, :owners end config.model Setup::SharedName do navigation_label 'Administration' visible { User.current_super_admin? } fields :name, :owners end config.model Script do navigation_label 'Administration' visible { User.current_super_admin? } edit do field :name field :description field :code, :code_mirror end show do field :name field :description field :code do pretty_value do v = value.gsub('<', '&lt;').gsub('>', '&gt;') "<pre><code class='ruby'>#{v}</code></pre>".html_safe end end end fields :name, :description, :code end config.model CenitToken do navigation_label 'Administration' visible { User.current_super_admin? } end config.model Setup::DelayedMessage do navigation_label 'Administration' visible { User.current_super_admin? } end config.model Setup::SystemNotification do navigation_label 'Administration' visible { User.current_super_admin? } end config.model RabbitConsumer do navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :to_s } configure :task_id do pretty_value do if (executor = (obj = bindings[:object]).executor) && (task = obj.executing_task) v = bindings[:view] amc = RailsAdmin.config(task.class) am = amc.abstract_model wording = task.send(amc.object_label_method) amc = RailsAdmin.config(Account) am = amc.abstract_model if (inspect_action = v.action(:inspect, am, executor)) task_path = v.show_path(model_name: task.class.to_s.underscore.gsub('/', '~'), id: task.id.to_s) v.link_to(wording, v.url_for(action: inspect_action.action_name, model_name: am.to_param, id: executor.id, params: { return_to: task_path })) else wording end.html_safe end end end fields :created_at, :channel, :tag, :executor, :task_id, :alive end config.model ApplicationId do navigation_label 'Administration' visible { User.current_super_admin? } register_instance_option(:discard_submit_buttons) { bindings[:object].instance_variable_get(:@registering) } configure :name configure :registered, :boolean configure :redirect_uris, :json_value edit do field :oauth_name do visible { bindings[:object].instance_variable_get(:@registering) } end field :redirect_uris do visible { bindings[:object].instance_variable_get(:@registering) } end end fields :created_at, :name, :registered, :account, :identifier end config.model Setup::ScriptExecution do parent { nil } navigation_label 'Administration' object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end fields :script, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end end added more created_at and updated_at to others index [ RailsAdmin::Config::Actions::MemoryUsage, RailsAdmin::Config::Actions::DiskUsage, RailsAdmin::Config::Actions::SendToFlow, RailsAdmin::Config::Actions::LoadModel, RailsAdmin::Config::Actions::ShutdownModel, RailsAdmin::Config::Actions::SwitchNavigation, RailsAdmin::Config::Actions::DataType, RailsAdmin::Config::Actions::Import, #RailsAdmin::Config::Actions::EdiExport, RailsAdmin::Config::Actions::ImportSchema, RailsAdmin::Config::Actions::DeleteAll, RailsAdmin::Config::Actions::TranslatorUpdate, RailsAdmin::Config::Actions::Convert, RailsAdmin::Config::Actions::SimpleShare, RailsAdmin::Config::Actions::BulkShare, RailsAdmin::Config::Actions::Pull, RailsAdmin::Config::Actions::RetryTask, RailsAdmin::Config::Actions::DownloadFile, RailsAdmin::Config::Actions::ProcessFlow, RailsAdmin::Config::Actions::BuildGem, RailsAdmin::Config::Actions::Run, RailsAdmin::Config::Actions::Authorize, RailsAdmin::Config::Actions::SimpleDeleteDataType, RailsAdmin::Config::Actions::BulkDeleteDataType, RailsAdmin::Config::Actions::SimpleGenerate, RailsAdmin::Config::Actions::BulkGenerate, RailsAdmin::Config::Actions::SimpleExpand, RailsAdmin::Config::Actions::BulkExpand, RailsAdmin::Config::Actions::Records, RailsAdmin::Config::Actions::SwitchScheduler, RailsAdmin::Config::Actions::SimpleExport, RailsAdmin::Config::Actions::Schedule, RailsAdmin::Config::Actions::Submit, RailsAdmin::Config::Actions::DeleteCollection, RailsAdmin::Config::Actions::Inspect, RailsAdmin::Config::Actions::Copy, RailsAdmin::Config::Actions::Cancel, RailsAdmin::Config::Actions::Configure, RailsAdmin::Config::Actions::CrossShare, RailsAdmin::Config::Actions::Regist, RailsAdmin::Config::Actions::SharedCollectionIndex ].each { |a| RailsAdmin::Config::Actions.register(a) } RailsAdmin::Config::Actions.register(:export, RailsAdmin::Config::Actions::BulkExport) [ RailsAdmin::Config::Fields::Types::JsonValue, RailsAdmin::Config::Fields::Types::JsonSchema, RailsAdmin::Config::Fields::Types::StorageFile, RailsAdmin::Config::Fields::Types::EnumEdit ].each { |f| RailsAdmin::Config::Fields::Types.register(f) } RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :js_location do bindings[:view].asset_path('codemirror.js') end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :css_location do bindings[:view].asset_path('codemirror.css') end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :config do { mode: 'css', theme: 'neo', } end RailsAdmin::Config::Fields::Types::CodeMirror.register_instance_option :assets do { mode: bindings[:view].asset_path('codemirror/modes/css.js'), theme: bindings[:view].asset_path('codemirror/themes/neo.css'), } end module RailsAdmin module Config class << self def navigation(label, options) navigation_options[label.to_s] = options end def navigation_options @nav_options ||= {} end end end end RailsAdmin.config do |config| config.total_columns_width = 900 ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.authenticate_with do warden.authenticate! scope: :user unless %w(dashboard shared_collection_index index show).include?(action_name) end config.current_user_method { current_user } config.audit_with :mongoid_audit config.authorize_with :cancan config.excluded_models += [Setup::BaseOauthAuthorization, Setup::AwsAuthorization] config.actions do dashboard # mandatory # memory_usage # disk_usage shared_collection_index index # mandatory new { except [Setup::Event, Setup::DataType, Setup::Authorization, Setup::BaseOauthProvider] } import import_schema translator_update convert export bulk_delete show run edit configure copy simple_share bulk_share cross_share build_gem pull download_file load_model shutdown_model process_flow authorize simple_generate bulk_generate simple_expand bulk_expand records switch_navigation switch_scheduler simple_export schedule retry_task submit inspect cancel regist simple_delete_data_type bulk_delete_data_type delete delete_collection #show_in_app send_to_flow delete_all data_type # history_index do # only [Setup::DataType, Setup::Webhook, Setup::Flow, Setup::Schema, Setup::Event, Setup::Connection, Setup::ConnectionRole] # end # history_show do # only [Setup::DataType, Setup::Webhook, Setup::Flow, Setup::Schema, Setup::Event, Setup::Connection, Setup::ConnectionRole, Setup::Notification] # end end #Collections config.navigation 'Collections', icon: 'fa fa-cubes' config.model Setup::SharedCollection do weight -600 label 'Shared Collection' register_instance_option(:discard_submit_buttons) do !(a = bindings[:action]) || a.key != :edit end navigation_label 'Collections' object_label_method { :versioned_name } public_access true extra_associations do Setup::Collection.reflect_on_all_associations(:has_and_belongs_to_many).collect do |association| association = association.dup association[:name] = "data_#{association.name}".to_sym RailsAdmin::Adapters::Mongoid::Association.new(association, abstract_model.model) end end index_template_name :shared_collection_grid index_link_icon 'icon-th-large' group :collections group :workflows group :api_connectors do label 'API Connectors' active true end group :data group :security edit do field :image do visible { !bindings[:object].instance_variable_get(:@sharing) } end field :logo_background field :name do required { true } end field :shared_version do required { true } end field :authors field :summary field :source_collection do visible { !((source_collection = bindings[:object].source_collection) && source_collection.new_record?) } inline_edit false inline_add false associated_collection_scope do source_collection = (obj = bindings[:object]).source_collection Proc.new { |scope| if obj.new_record? scope.where(id: source_collection ? source_collection.id : nil) else scope end } end end field :connections do inline_add false read_only do !((v = bindings[:object].instance_variable_get(:@_selecting_connections)).nil? || v) end help do nil end pretty_value do if bindings[:object].connections.present? v = bindings[:view] ids = '' [value].flatten.select(&:present?).collect do |associated| ids += "<option value=#{associated.id} selected=true/>" amc = polymorphic? ? RailsAdmin.config(associated) : associated_model_config am = amc.abstract_model wording = associated.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, associated)) can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: associated.id), class: 'pjax') : wording end.to_sentence.html_safe + v.select_tag("#{bindings[:controller].instance_variable_get(:@model_config).abstract_model.param_key}[connection_ids][]", ids.html_safe, multiple: true, style: 'display:none').html_safe else 'No connection selected'.html_safe end end visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && obj.source_collection && obj.source_collection.connections.present? end associated_collection_scope do source_collection = bindings[:object].source_collection connections = (source_collection && source_collection.connections) || [] Proc.new { |scope| scope.any_in(id: connections.collect { |connection| connection.id }) } end end field :dependencies do inline_add false read_only do !((v = bindings[:object].instance_variable_get(:@_selecting_dependencies)).nil? || v) end help do nil end pretty_value do if bindings[:object].dependencies.present? v = bindings[:view] ids = '' [value].flatten.select(&:present?).collect do |associated| ids += "<option value=#{associated.id} selected=true/>" amc = polymorphic? ? RailsAdmin.config(associated) : associated_model_config am = amc.abstract_model wording = associated.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, associated)) can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: associated.id), class: 'pjax') : wording end.to_sentence.html_safe + v.select_tag("#{bindings[:controller].instance_variable_get(:@model_config).abstract_model.param_key}[dependency_ids][]", ids.html_safe, multiple: true, style: 'display:none').html_safe else 'No dependencies selected'.html_safe end end visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) end end field :pull_parameters do visible do if !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && !obj.instance_variable_get(:@_selecting_connections) && (pull_parameters_enum = obj.enum_for_pull_parameters).present? bindings[:controller].instance_variable_set(:@shared_parameter_enum, pull_parameters_enum) true else false end end end field :pull_count do visible { Account.current_super_admin? } end field :readme do visible do !(obj = bindings[:object]).instance_variable_get(:@_selecting_collection) && !obj.instance_variable_get(:@_selecting_connections) end end end show do field :image field :name do pretty_value do bindings[:object].versioned_name end end field :summary do pretty_value do value.html_safe end end field :readme do pretty_value do begin template = value.gsub('&lt;%', '<%').gsub('%&gt;', '%>').gsub('%3C%', '<%').gsub('%%3E', '%>') Setup::Transformation::ActionViewTransform.run(transformation: template, style: 'html.erb', base_url: bindings[:controller].request.base_url, user_key: User.current_number, user_token: User.current_token, collection: nil, shared_collection: bindings[:object]) rescue Exception => ex value end.html_safe end end field :authors field :dependencies field :pull_count field :data_namespaces do group :collections label 'Namespaces' list_fields do %w(name slug) end end field :data_flows do group :workflows label 'Flows' list_fields do %w(namespace name) #TODO Inlude a description field on Flow model end end field :data_translators do group :workflows label 'Translators' list_fields do %w(namespace name type style) end end field :data_events do group :workflows label 'Events' list_fields do %w(namespace name _type) end end field :data_algorithms do group :workflows label 'Algorithms' list_fields do %w(namespace name description) end end field :data_connection_roles do group :api_connectors label 'Connection roles' list_fields do %w(namespace name) end end field :data_webhooks do group :api_connectors label 'Webhooks' list_fields do %w(namespace name path method description) end end field :data_connections do group :api_connectors label 'Connections' list_fields do %w(namespace name url) end end field :data_data_types do group :data label 'Data types' list_fields do %w(title name slug _type) end end field :data_schemas do group :data label 'Schemas' list_fields do %w(namespace uri) end end field :data_custom_validators do group :data label 'Custom validators' list_fields do %w(namespace name _type) #TODO Include a description field for Custom Validator model end end # field :data_data TODO Include collection data field field :data_authorizations do group :security label 'Authorizations' list_fields do %w(namespace name _type) end end field :data_oauth_providers do group :security label 'OAuth providers' list_fields do %w(namespace name response_type authorization_endpoint token_endpoint token_method _type) end end field :data_oauth_clients do group :security label 'OAuth clients' list_fields do %w(provider name) end end field :data_oauth2_scopes do group :security label 'OAuth 2.0 scopes' list_fields do %w(provider name description) end end field :_id field :created_at field :updated_at end list do field :image do thumb_method :icon end field :name do pretty_value do bindings[:object].versioned_name end end field :authors field :summary field :pull_count field :dependencies end end config.model Setup::CollectionAuthor do visible false object_label_method { :label } fields :name, :email end config.model Setup::CollectionPullParameter do visible false object_label_method { :label } field :label field :parameter, :enum do enum do bindings[:controller].instance_variable_get(:@shared_parameter_enum) || [bindings[:object].parameter] end end edit do field :label field :parameter end show do field :label field :parameter field :created_at #field :creator field :updated_at end list do field :label field :parameter field :created_at field :updated_at end fields :label, :parameter end config.model Setup::CollectionData do visible false object_label_method { :label } end config.model Setup::Collection do navigation_label 'Collections' register_instance_option :label_navigation do 'My Collections' end group :workflows configure :flows do group :workflows end configure :events do group :workflows end configure :translators do group :workflows end configure :algorithms do group :workflows end configure :applications do group :workflows end group :api_connectors do label 'API Connectors' active true end configure :connections do group :api_connectors end configure :webhooks do group :api_connectors end configure :connection_roles do group :api_connectors end group :data configure :data_types do group :data end configure :schemas do group :data end configure :data do group :data end configure :custom_validators do group :data end group :security configure :authorizations do group :security end configure :oauth_providers do group :security end configure :oauth_clients do group :security end configure :oauth2_scopes do group :security end edit do field :image field :readme do visible { Account.current_super_admin? } end field :name field :flows field :connection_roles field :translators field :events field :data_types field :schemas field :custom_validators field :algorithms field :applications field :webhooks field :connections field :authorizations field :oauth_providers field :oauth_clients field :oauth2_scopes field :data end show do field :image field :readme do pretty_value do begin template = value.gsub('&lt;%', '<%').gsub('%&gt;', '%>').gsub('%3C%', '<%').gsub('%%3E', '%>') Setup::Transformation::ActionViewTransform.run(transformation: template, style: 'html.erb', base_url: bindings[:controller].request.base_url, user_key: User.current.number, user_token: User.current.token, collection: bindings[:object], shared_collection: nil) rescue Exception => ex value end.html_safe end end field :name field :flows field :connection_roles field :translators field :events field :data_types field :schemas field :custom_validators field :algorithms field :applications field :webhooks field :connections field :authorizations field :oauth_providers field :oauth_clients field :oauth2_scopes field :data field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :image do thumb_method :icon end field :name field :flows do pretty_value do value.count > 0 ? value.count : '-' end end field :connection_roles do pretty_value do value.count > 0 ? value.count : '-' end end field :translators do pretty_value do value.count > 0 ? value.count : '-' end end field :events do pretty_value do value.count > 0 ? value.count : '-' end end field :data_types do pretty_value do value.count > 0 ? value.count : '-' end end field :schemas do pretty_value do value.count > 0 ? value.count : '-' end end field :custom_validators do pretty_value do value.count > 0 ? value.count : '-' end end field :algorithms do pretty_value do value.count > 0 ? value.count : '-' end end field :applications do pretty_value do value.count > 0 ? value.count : '-' end end field :webhooks do pretty_value do value.count > 0 ? value.count : '-' end end field :connections do pretty_value do value.count > 0 ? value.count : '-' end end field :authorizations do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth_providers do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth_clients do pretty_value do value.count > 0 ? value.count : '-' end end field :oauth2_scopes do pretty_value do value.count > 0 ? value.count : '-' end end field :data field :created_at field :updated_at end end config.model Setup::Namespace do navigation_label 'Collections' list do field :name field :slug field :created_at field :updated_at end fields :name, :slug end #Data config.navigation 'Data', icon: 'fa fa-database' config.model Setup::DataType do navigation_label 'Data' weight -450 label 'Data type' label_plural 'Data types' object_label_method { :custom_title } visible true show_in_dashboard false configure :_type do pretty_value do value.split('::').last.to_title end end group :behavior do label 'Behavior' active false end configure :namespace, :enum_edit configure :title do pretty_value do bindings[:object].custom_title end end configure :slug configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :title field :before_save_callbacks field :records_methods field :data_type_methods end list do field :title field :name field :slug field :_type field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::DataType.fields[:used_memory.to_s].type.new(Setup::DataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::DataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :title field :name field :slug field :_type field :storage_size field :activated field :schema do pretty_value do v = if json = JSON.pretty_generate(value) rescue nil "<code class='json'>#{json.gsub('<', '&lt;').gsub('>', '&gt;')}</code>" else value end "<pre>#{v}</pre>".html_safe end end field :_id field :created_at #field :creator field :updated_at #field :updater end fields :namespace, :title, :name, :used_memory end config.model Setup::JsonDataType do navigation_label 'Data' weight -449 label 'JSON Data Type' object_label_method { :custom_title } register_instance_option(:after_form_partials) do %w(shutdown_and_reload) end group :behavior do label 'Behavior' active false end configure :title configure :name do read_only { !bindings[:object].new_record? } end configure :schema, :code_mirror do html_attributes do report = bindings[:object].shutdown(report_only: true) reload = (report[:reloaded].collect(&:data_type) + report[:destroyed].collect(&:data_type)).uniq bindings[:object].instance_variable_set(:@_to_reload, reload) { cols: '74', rows: '15' } end # pretty_value do # "<pre><code class='json'>#{JSON.pretty_generate(value)}</code></pre>".html_safe # end end configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :namespace, :enum_edit field :title field :name field :slug field :schema, :json_schema do help { 'Required' } end field :before_save_callbacks field :records_methods field :data_type_methods end list do field :namespace field :title field :name field :slug field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :namespace field :title field :name field :slug field :storage_size field :activated field :schema do pretty_value do "<pre><code class='ruby'>#{JSON.pretty_generate(value)}</code></pre>".html_safe end end field :before_save_callbacks field :records_methods field :data_type_methods field :_id field :created_at #field :creator field :updated_at #field :updater end end config.model Setup::FileDataType do navigation_label 'Data' weight -448 object_label_method { :custom_title } configure :namespace, :enum_edit group :content do label 'Content' end group :behavior do label 'Behavior' active false end configure :title do pretty_value do bindings[:object].custom_title end end configure :used_memory do pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end configure :storage_size, :decimal do pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_storage_size) bindings[:controller].instance_variable_set(:@max_storage_size, max = objects.collect { |data_type| data_type.records_model.storage_size }.max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].records_model.storage_size }).html_safe else bindings[:view].number_to_human_size(value) end end read_only true end configure :validators do group :content inline_add false end configure :schema_data_type do group :content inline_add false inline_edit false end configure :before_save_callbacks do group :behavior inline_add false associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end configure :records_methods do group :behavior inline_add false end configure :data_type_methods do group :behavior inline_add false end edit do field :namespace field :title field :name field :slug field :validators field :schema_data_type field :before_save_callbacks field :records_methods field :data_type_methods end list do field :title field :name field :slug field :validators field :schema_data_type field :used_memory do visible { Cenit.dynamic_model_loading? } pretty_value do unless max = bindings[:controller].instance_variable_get(:@max_used_memory) bindings[:controller].instance_variable_set(:@max_used_memory, max = Setup::JsonDataType.fields[:used_memory.to_s].type.new(Setup::JsonDataType.max(:used_memory))) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: Setup::JsonDataType.fields[:used_memory.to_s].type.new(value) }).html_safe end end field :storage_size field :created_at field :updated_at end show do field :title field :name field :slug field :activated field :validators field :storage_size field :schema_data_type field :_id field :created_at #field :creator field :updated_at #field :updater end end config.model Setup::Validator do navigation_label 'Data' label 'Schemas & Validators' weight -490 configure :namespace, :enum_edit fields :namespace, :name list do field :namespace field :name field :created_at field :updated_at end show_in_dashboard { false } end config.model Setup::CustomValidator do visible false configure :_type do pretty_value do value.split('::').last.to_title end end list do field :namespace field :name field :_type field :created_at field :updated_at end fields :namespace, :name, :_type end config.model Setup::Schema do parent Setup::Validator weight -489 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace do read_only { !bindings[:object].new_record? } end field :uri do read_only { !bindings[:object].new_record? } html_attributes do { cols: '74', rows: '1' } end end field :schema, :code_mirror do html_attributes do { cols: '74', rows: '15' } end end field :schema_data_type do inline_edit false inline_add false end end show do field :namespace field :uri field :schema do pretty_value do v = if json = JSON.parse(value) rescue nil "<code class='json'>#{JSON.pretty_generate(json).gsub('<', '&lt;').gsub('>', '&gt;')}</code>" elsif (xml = Nokogiri::XML(value)).errors.blank? "<code class='xml'>#{xml.to_xml.gsub('<', '&lt;').gsub('>', '&gt;')}</code>" else "<code>#{value}</code>" end "<pre>#{v}</pre>".html_safe end end field :schema_data_type field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :uri field :schema_data_type field :created_at field :updated_at end fields :namespace, :uri, :schema_data_type end config.model Setup::XsltValidator do parent Setup::Validator weight -488 object_label_method { :custom_title } list do field :namespace field :xslt field :created_at field :updated_at end fields :namespace, :name, :xslt end config.model Setup::EdiValidator do parent Setup::Validator weight -487 object_label_method { :custom_title } label 'EDI Validators' configure :namespace, :enum_edit list do field :namespace field :name field :schema_data_type field :content_type field :created_at field :updated_at end fields :namespace, :name, :schema_data_type, :content_type end config.model Setup::AlgorithmValidator do parent Setup::Validator weight -486 object_label_method { :custom_title } configure :namespace, :enum_edit list do field :namespace field :name field :algorithm field :created_at field :updated_at end fields :namespace, :name, :algorithm end #API Connectors config.navigation 'API Connectors', icon: :api_connectors config.model Setup::Parameter do visible false object_label_method { :to_s } configure :metadata, :json_value edit do field :name field :value field :description field :metadata end list do field :name field :value field :description field :metadata field :created_at field :updated_at end end config.model Setup::Connection do navigation_label 'API Connectors' weight -400 object_label_method { :custom_title } configure :namespace, :enum_edit group :credentials do label 'Credentials' end configure :key, :string do visible { User.current_admin? } html_attributes do { maxlength: 30, size: 30 } end group :credentials end configure :token, :text do visible { User.current_admin? } html_attributes do { cols: '50', rows: '1' } end group :credentials end configure :authorization do group :credentials inline_edit false visible { User.current_admin? } end configure :authorization_handler do group :credentials visible { User.current_admin? } end group :parameters do label 'Parameters & Headers' end configure :parameters do group :parameters visible { User.current_admin? } end configure :headers do group :parameters visible { User.current_admin? } end configure :template_parameters do group :parameters visible { User.current_admin? } end edit do field :namespace field :name field :url field :key field :token field :authorization field :authorization_handler field :parameters field :headers field :template_parameters end show do field :namespace field :name field :url field :key field :token field :authorization field :authorization_handler field :parameters field :headers field :template_parameters field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :url field :key field :token field :authorization field :created_at field :updated_at end fields :namespace, :name, :url, :key, :token, :authorization end config.model Setup::ConnectionRole do navigation_label 'API Connectors' weight -309 object_label_method { :custom_title } configure :namespace, :enum_edit configure :name, :string do help 'Requiered.' html_attributes do { maxlength: 50, size: 50 } end end configure :webhooks do nested_form false end configure :connections do nested_form false end modal do field :namespace field :name field :webhooks field :connections end show do field :namespace field :name field :webhooks field :connections field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :webhooks field :connections field :created_at field :updated_at end fields :namespace, :name, :webhooks, :connections end config.model Setup::Webhook do navigation_label 'API Connectors' weight -308 object_label_method { :custom_title } configure :namespace, :enum_edit configure :metadata, :json_value group :credentials do label 'Credentials' end configure :authorization do group :credentials inline_edit false visible { User.current_admin? } end configure :authorization_handler do group :credentials visible { User.current_admin? } end group :parameters do label 'Parameters & Headers' end configure :path, :string do help 'Requiered. Path of the webhook relative to connection URL.' html_attributes do { maxlength: 255, size: 100 } end end configure :parameters do group :parameters end configure :headers do group :parameters end configure :template_parameters do group :parameters end edit do field :namespace field :name field :path field :method field :description field :metadata, :json_value field :authorization field :authorization_handler field :parameters field :headers field :template_parameters end show do field :namespace field :name field :path field :method field :description field :metadata, :json_value field :authorization field :authorization_handler field :parameters field :headers field :template_parameters field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :path field :method field :description field :authorization field :created_at field :updated_at end fields :namespace, :name, :path, :method, :description, :authorization end #Workflows config.navigation 'Workflows', icon: 'fa fa-cogs' config.model Setup::Flow do navigation_label 'Workflows' weight -300 object_label_method { :custom_title } register_instance_option(:form_synchronized) do [:custom_data_type, :data_type_scope, :scope_filter, :scope_evaluator, :lot_size, :connection_role, :webhook, :response_translator, :response_data_type] end configure :namespace, :enum_edit edit do field :namespace field :name field :event do inline_edit false inline_add false end field :translator do help 'Required' end field :custom_data_type do inline_edit false inline_add false visible do if (f = bindings[:object]).custom_data_type.present? f.nil_data_type = false end if f.translator.present? && f.translator.data_type.nil? && !f.nil_data_type f.instance_variable_set(:@selecting_data_type, f.custom_data_type = f.event && f.event.try(:data_type)) unless f.data_type f.nil_data_type = f.translator.type == :Export && (params = (controller = bindings[:controller]).params) && (params = params[controller.abstract_model.param_key]) && params[:custom_data_type_id].blank? && params.keys.include?(:custom_data_type_id.to_s) true else false end end label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'Source data type' else 'Target data type' end else 'Data type' end end help do if bindings[:object].nil_data_type '' elsif (translator = bindings[:object].translator) && [:Export, :Conversion].include?(translator.type) 'Optional' else 'Required' end end end field :nil_data_type do visible { bindings[:object].nil_data_type } label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'No source data type' else 'No target data type' end else 'No data type' end end end field :data_type_scope do visible do bindings[:controller].instance_variable_set(:@_data_type, bindings[:object].data_type) bindings[:controller].instance_variable_set(:@_update_field, 'translator_id') (f = bindings[:object]).translator.present? && f.translator.type != :Import && f.data_type && !f.instance_variable_get(:@selecting_data_type) end label do if (translator = bindings[:object].translator) if [:Export, :Conversion].include?(translator.type) 'Source scope' else 'Target scope' end else 'Data type scope' end end help 'Required' end field :scope_filter do visible { bindings[:object].scope_symbol == :filtered } partial 'form_triggers' help false end field :scope_evaluator do inline_add false inline_edit false visible { bindings[:object].scope_symbol == :evaluation } associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end help 'Required' end field :lot_size do visible { (f = bindings[:object]).translator.present? && f.translator.type == :Export && !f.nil_data_type && f.data_type_scope && f.scope_symbol != :event_source } end field :webhook do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Import || (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type))) } help 'Required' end field :connection_role do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Import || (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type))) } help 'Optional' end field :response_translator do visible { (translator = (f = bindings[:object]).translator) && (translator.type == :Export && (bindings[:object].data_type_scope.present? || f.nil_data_type)) && f.ready_to_save? } associated_collection_scope do Proc.new { |scope| scope.where(type: :Import) } end end field :response_data_type do inline_edit false inline_add false visible { (response_translator = bindings[:object].response_translator) && response_translator.type == :Import && response_translator.data_type.nil? } help '' end field :discard_events do visible { (((obj = bindings[:object]).translator && obj.translator.type == :Import) || obj.response_translator.present?) && obj.ready_to_save? } help "Events won't be fired for created or updated records if checked" end field :active do visible { bindings[:object].ready_to_save? } end field :notify_request do visible { (obj = bindings[:object]).translator && [:Import, :Export].include?(obj.translator.type) && obj.ready_to_save? } help 'Track request via notifications if checked' end field :notify_response do visible { (obj = bindings[:object]).translator && [:Import, :Export].include?(obj.translator.type) && obj.ready_to_save? } help 'Track responses via notification if checked' end field :after_process_callbacks do visible { bindings[:object].ready_to_save? } help 'Algorithms executed after flow processing, execution state is supplied as argument' associated_collection_scope do Proc.new { |scope| scope.where(:parameters.with_size => 1) } end end end show do field :namespace field :name field :active field :event field :translator field :custom_data_type field :data_type_scope field :scope_filter field :scope_evaluator field :lot_size field :webhook field :connection_role field :response_translator field :response_data_type field :discard_events field :notify_request field :notify_response field :after_process_callbacks field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :active field :event field :translator field :created_at field :updated_at end fields :namespace, :name, :active, :event, :translator end config.model Setup::Event do navigation_label 'Workflows' weight -209 object_label_method { :custom_title } visible false configure :namespace, :enum_edit configure :_type do pretty_value do value.split('::').last.to_title end end edit do field :namespace field :name end show do field :namespace field :name field :_type field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :_type field :created_at field :updated_at end fields :namespace, :name, :_type end config.model Setup::Observer do navigation_label 'Workflows' weight -208 label 'Data event' object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :data_type do inline_add false inline_edit false associated_collection_scope do data_type = bindings[:object].data_type Proc.new { |scope| if data_type scope.where(id: data_type.id) else scope end } end help 'Required' end field :trigger_evaluator do visible { (obj = bindings[:object]).data_type.blank? || obj.trigger_evaluator.present? || obj.triggers.nil? } associated_collection_scope do Proc.new { |scope| scope.all.or(:parameters.with_size => 1).or(:parameters.with_size => 2) } end end field :triggers do visible do bindings[:controller].instance_variable_set(:@_data_type, data_type = bindings[:object].data_type) bindings[:controller].instance_variable_set(:@_update_field, 'data_type_id') data_type.present? && !bindings[:object].trigger_evaluator.present? end partial 'form_triggers' help false end end show do field :namespace field :name field :data_type field :triggers field :trigger_evaluator field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :data_type field :triggers field :trigger_evaluator field :created_at field :updated_at end fields :namespace, :name, :data_type, :triggers, :trigger_evaluator end config.model Setup::Scheduler do navigation_label 'Workflows' weight -207 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :scheduling_method field :expression do visible { bindings[:object].scheduling_method.present? } label do case bindings[:object].scheduling_method when :Once 'Date and time' when :Periodic 'Duration' when :CRON 'CRON Expression' else 'Expression' end end help do case bindings[:object].scheduling_method when :Once 'Select a date and a time' when :Periodic 'Type a time duration' when :CRON 'Type a CRON Expression' else 'Expression' end end partial { bindings[:object].scheduling_method == :Once ? 'form_datetime_wrapper' : 'form_text' } html_attributes do { rows: '1' } end end end show do field :namespace field :name field :expression field :origin field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :scheduling_method field :expression field :activated field :origin field :created_at field :updated_at end fields :namespace, :name, :scheduling_method, :expression, :activated, :origin end config.model Setup::AlgorithmParameter do visible false fields :name, :description end config.model Setup::CallLink do visible false edit do field :name do read_only true help { nil } label 'Call name' end field :link do inline_add false inline_edit false help { nil } end end fields :name, :link end config.model Setup::Translator do navigation_label 'Workflows' weight -206 object_label_method { :custom_title } register_instance_option(:form_synchronized) do [:source_data_type, :target_data_type, :transformation, :target_importer, :source_exporter, :discard_chained_records] end configure :namespace, :enum_edit edit do field :namespace field :name field :type field :source_data_type do inline_edit false inline_add false visible { [:Export, :Conversion].include?(bindings[:object].type) } help { bindings[:object].type == :Conversion ? 'Required' : 'Optional' } end field :target_data_type do inline_edit false inline_add false visible { [:Import, :Update, :Conversion].include?(bindings[:object].type) } help { bindings[:object].type == :Conversion ? 'Required' : 'Optional' } end field :discard_events do visible { [:Import, :Update, :Conversion].include?(bindings[:object].type) } help "Events won't be fired for created or updated records if checked" end field :style do visible { bindings[:object].type.present? } help 'Required' end field :bulk_source do visible { bindings[:object].type == :Export && bindings[:object].style.present? && bindings[:object].source_bulkable? } end field :mime_type do label 'MIME type' visible { bindings[:object].type == :Export && bindings[:object].style.present? } end field :file_extension do visible { bindings[:object].type == :Export && !bindings[:object].file_extension_enum.empty? } help { "Extensions for #{bindings[:object].mime_type}" } end field :source_handler do visible { (t = bindings[:object]).style.present? && (t.type == :Update || (t.type == :Conversion && t.style == 'ruby')) } help { 'Handle sources on transformation' } end field :transformation, :code_mirror do visible { bindings[:object].style.present? && bindings[:object].style != 'chain' } help { 'Required' } html_attributes do { cols: '74', rows: '15' } end end field :source_exporter do inline_add { bindings[:object].source_exporter.nil? } visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type } help 'Required' associated_collection_scope do data_type = bindings[:object].source_data_type Proc.new { |scope| scope.all(type: :Conversion, source_data_type: data_type) } end end field :target_importer do inline_add { bindings[:object].target_importer.nil? } visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type && bindings[:object].source_exporter } help 'Required' associated_collection_scope do translator = bindings[:object] source_data_type = if translator.source_exporter translator.source_exporter.target_data_type else translator.source_data_type end target_data_type = bindings[:object].target_data_type Proc.new { |scope| scope = scope.all(type: :Conversion, source_data_type: source_data_type, target_data_type: target_data_type) } end end field :discard_chained_records do visible { bindings[:object].style == 'chain' && bindings[:object].source_data_type && bindings[:object].target_data_type && bindings[:object].source_exporter } help "Chained records won't be saved if checked" end end show do field :namespace field :name field :type field :source_data_type field :bulk_source field :target_data_type field :discard_events field :style field :mime_type field :file_extension field :transformation do pretty_value do "<pre><code class='ruby'>#{value}</code></pre>".html_safe end end field :source_exporter field :target_importer field :discard_chained_records field :_id field :created_at #field :creator field :updated_at #field :updater end list do field :namespace field :name field :type field :style field :mime_type field :file_extension field :transformation field :created_at field :updated_at end fields :namespace, :name, :type, :style, :transformation end config.model Setup::Algorithm do navigation_label 'Workflows' weight -205 object_label_method { :custom_title } configure :namespace, :enum_edit edit do field :namespace field :name field :description field :parameters field :code, :code_mirror do help { 'Required' } end field :call_links do visible { bindings[:object].call_links.present? } end end show do field :namespace field :name field :description field :parameters field :code do pretty_value do v = value.gsub('<', '&lt;').gsub('>', '&gt;') "<pre><code class='ruby'>#{v}</code></pre>".html_safe end end field :call_links field :_id end list do field :namespace field :name field :description field :parameters field :call_links field :created_at field :updated_at end fields :namespace, :name, :description, :parameters, :call_links end config.model Setup::Action do visible false navigation_label 'Workflows' weight -202 object_label_method { :to_s } fields :method, :path, :algorithm end config.model Setup::Application do navigation_label 'Workflows' weight -201 object_label_method { :custom_title } visible { Account.current_super_admin? } configure :namespace, :enum_edit configure :identifier edit do field :namespace field :name field :slug field :actions field :application_parameters end list do field :namespace field :name field :slug field :identifier field :secret_token field :actions field :application_parameters field :created_at field :updated_at end fields :namespace, :name, :slug, :identifier, :secret_token, :actions, :application_parameters end config.model Setup::ApplicationParameter do visible false navigation_label 'Workflows' configure :group, :enum_edit list do field :name field :type field :many field :group field :description field :created_at field :updated_at end fields :name, :type, :many, :group, :description end #Security config.navigation 'Security', icon: 'fa fa-shield' config.model Setup::OauthClient do navigation_label 'Security' label 'OAuth client' weight -100 object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :identifier do pretty_value do if Account.current.id == bindings[:object].tenant_id value else '<i class="icon-lock"/>'.html_safe end end end configure :secret do pretty_value do if Account.current && Account.current.id == bindings[:object].tenant_id value else '<i class="icon-lock"/>'.html_safe end end end list do field :provider field :name field :identifier field :secret field :tenant field :origin field :created_at field :updated_at end fields :provider, :name, :identifier, :secret, :tenant, :origin end config.model Setup::BaseOauthProvider do navigation_label 'Security' weight -90 object_label_method { :custom_title } label 'Provider' configure :_type do pretty_value do value.split('::').last.to_title end end configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :namespace, :enum_edit list do field :namespace field :name field :_type field :response_type field :authorization_endpoint field :token_endpoint field :token_method field :tenant field :origin field :created_at field :updated_at end fields :namespace, :name, :_type, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :tenant, :origin end config.model Setup::OauthProvider do weight -89 label 'OAuth 1.0 provider' register_instance_option :label_navigation do 'OAuth 1.0' end object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :namespace, :enum_edit configure :refresh_token_algorithm do visible { bindings[:object].refresh_token_strategy == :custom.to_s } end list do field :namespace field :name field :response_type field :authorization_endpoint field :token_endpoint field :token_method field :request_token_endpoint field :refresh_token_strategy field :refresh_token_algorithm field :tenant field :origin field :created_at field :updated_at end fields :namespace, :name, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :request_token_endpoint, :refresh_token_strategy, :refresh_token_algorithm, :tenant, :origin end config.model Setup::Oauth2Provider do weight -88 label 'OAuth 2.0 provider' register_instance_option :label_navigation do 'OAuth 2.0' end object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end configure :refresh_token_algorithm do visible { bindings[:object].refresh_token_strategy == :custom.to_s } end configure :namespace, :enum_edit list do field :namespace field :name field :response_type field :authorization_endpoint field :token_endpoint field :token_method field :scope_separator field :request_token_endpoint field :refresh_token_strategy field :refresh_token_algorithm field :tenant field :origin field :created_at field :updated_at end fields :namespace, :name, :response_type, :authorization_endpoint, :token_endpoint, :token_method, :scope_separator, :refresh_token_strategy, :refresh_token_algorithm, :tenant, :origin end config.model Setup::Oauth2Scope do navigation_label 'Security' weight -87 label 'OAuth 2.0 scope' object_label_method { :custom_title } configure :tenant do visible { Account.current_super_admin? } read_only { true } help '' end configure :origin do visible { Account.current_super_admin? } end list do field :provider field :name field :description field :tenant field :origin field :created_at field :updated_at end fields :provider, :name, :description, :tenant, :origin end config.model Setup::Authorization do navigation_label 'Security' weight -50 object_label_method { :custom_title } configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :namespace, :enum_edit configure :metadata, :json_value configure :_type do pretty_value do value.split('::').last.to_title end end list do field :namespace field :name field :status field :_type field :metadata field :created_at field :updated_at end fields :namespace, :name, :status, :_type, :metadata show_in_dashboard { false } end config.model Setup::BasicAuthorization do weight -49 register_instance_option :label_navigation do 'Basic' end object_label_method { :custom_title } configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :namespace, :enum_edit configure :metadata, :json_value edit do field :namespace field :name field :username field :password field :metadata end group :credentials do label 'Credentials' end configure :username do group :credentials end configure :password do group :credentials end show do field :namespace field :name field :status field :username field :password field :metadata field :_id end list do field :namespace field :name field :status field :username field :password field :created_at field :updated_at end fields :namespace, :name, :status, :username, :password end config.model Setup::OauthAuthorization do weight -45 label 'OAuth 1.0 authorization' register_instance_option :label_navigation do 'OAuth 1.0' end object_label_method { :custom_title } parent Setup::Authorization configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end edit do field :namespace field :name field :client field :parameters field :metadata end group :credentials do label 'Credentials' end configure :access_token do group :credentials end configure :token_span do group :credentials end configure :authorized_at do group :credentials end configure :access_token_secret do group :credentials end configure :realm_id do group :credentials end show do field :namespace field :name field :status field :client field :parameters field :metadata field :_id field :access_token field :access_token_secret field :realm_id field :token_span field :authorized_at end list do field :namespace field :name field :status field :client field :created_at field :updated_at end fields :namespace, :name, :status, :client end config.model Setup::Oauth2Authorization do weight -40 label 'OAuth 2.0 authorization' register_instance_option :label_navigation do 'OAuth 2.0' end object_label_method { :custom_title } parent Setup::Authorization configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end configure :expires_in do pretty_value do "#{value}s" if value end end edit do field :namespace field :name field :client field :scopes do visible { bindings[:object].ready_to_save? } associated_collection_scope do provider = ((obj = bindings[:object]) && obj.provider) || nil Proc.new { |scope| if provider scope.where(provider_id: provider.id) else scope end } end end field :parameters do visible { bindings[:object].ready_to_save? } end field :metadata end group :credentials do label 'Credentials' end configure :access_token do group :credentials end configure :token_span do group :credentials end configure :authorized_at do group :credentials end configure :refresh_token do group :credentials end configure :token_type do group :credentials end show do field :namespace field :name field :status field :client field :scopes field :parameters field :metadata field :_id field :expires_in field :id_token field :token_type field :access_token field :token_span field :authorized_at field :refresh_token field :_id end list do field :namespace field :name field :status field :client field :scopes field :created_at field :updated_at end end config.model Setup::AwsAuthorization do weight -35 object_label_method { :custom_title } configure :namespace, :enum_edit configure :metadata, :json_value configure :status do pretty_value do "<span class=\"label label-#{bindings[:object].status_class}\">#{value.to_s.capitalize}</span>".html_safe end end edit do field :namespace field :name field :aws_access_key field :aws_secret_key field :seller field :merchant field :markets field :signature_method field :signature_version field :metadata end group :credentials do label 'Credentials' end configure :aws_access_key do group :credentials end configure :aws_secret_key do group :credentials end show do field :namespace field :name field :aws_access_key field :aws_secret_key field :seller field :merchant field :markets field :signature_method field :signature_version field :metadata end list do field :namespace field :name field :aws_access_key field :aws_secret_key field :seller field :merchant field :markets field :signature_method field :signature_version field :created_at field :updated_at end fields :namespace, :name, :aws_access_key, :aws_secret_key, :seller, :merchant, :markets, :signature_method, :signature_version end config.model Setup::OauthAccessGrant do navigation_label 'Security' weight -32 fields :created_at, :application_id, :scope end #Monitors config.navigation 'Monitors', icon: 'fa fa-heartbeat' config.model Setup::Notification do navigation_label 'Monitors' weight -20 object_label_method { :label } show_in_dashboard false configure :created_at configure :type do pretty_value do "<label style='color:#{bindings[:object].color}'>#{value.to_s.capitalize}</label>".html_safe end end configure :message do pretty_value do "<label style='color:#{bindings[:object].color}'>#{value}</label>".html_safe end end configure :attachment, :storage_file list do field :created_at do visible do if account = Account.current account.notifications_listed_at = Time.now end true end end field :type field :message field :attachment field :task field :created_at field :updated_at end end config.model Setup::Task do navigation_label 'Monitors' weight -18 object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end configure :_type do pretty_value do value.split('::').last.to_title end end edit do field :description end list do field :_type field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :created_at field :updated_at end end config.model Setup::FlowExecution do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :flow field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :flow, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataTypeGeneration do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataTypeExpansion do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Translation do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :translator field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :translator, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::DataImport do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :translator field :data field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :translator, :data, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::SchemasImport do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :namespace, :enum_edit configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :namespace field :base_uri field :data field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :namespace, :base_uri, :data, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Deletion do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end configure :deletion_model do label 'Model' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value) am = amc.abstract_model wording = amc.navigation_label + ' > ' + amc.label can_see = !am.embedded? && (index_action = v.action(:index, am)) (can_see ? v.link_to(amc.contextualized_label(:menu), v.url_for(action: index_action.action_name, model_name: am.to_param), class: 'pjax') : wording).html_safe end end end edit do field :description end list do field :deletion_model field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :deletion_model, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::AlgorithmExecution do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :algorithm field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :algorithm, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Submission do navigation_label 'Monitors' visible false object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :webhook field :connection field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :webhook, :connection, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end config.model Setup::Storage do navigation_label 'Monitors' show_in_dashboard false weight -15 object_label_method { :label } configure :filename do label 'File name' pretty_value { bindings[:object].storage_name } end configure :length do label 'Size' pretty_value do if objects = bindings[:controller].instance_variable_get(:@objects) unless max = bindings[:controller].instance_variable_get(:@max_length) bindings[:controller].instance_variable_set(:@max_length, max = objects.collect { |storage| storage.length }.reject(&:nil?).max) end (bindings[:view].render partial: 'used_memory_bar', locals: { max: max, value: bindings[:object].length }).html_safe else bindings[:view].number_to_human_size(value) end end end configure :storer_model do label 'Model' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value) am = amc.abstract_model wording = amc.navigation_label + ' > ' + amc.label can_see = !am.embedded? && (index_action = v.action(:index, am)) (can_see ? v.link_to(amc.label, v.url_for(action: index_action.action_name, model_name: am.to_param), class: 'pjax') : wording).html_safe end end end configure :storer_object do label 'Object' pretty_value do if value v = bindings[:view] amc = RailsAdmin.config(value.class) am = amc.abstract_model wording = value.send(amc.object_label_method) can_see = !am.embedded? && (show_action = v.action(:show, am, value)) (can_see ? v.link_to(wording, v.url_for(action: show_action.action_name, model_name: am.to_param, id: value.id), class: 'pjax') : wording).html_safe end end end configure :storer_property do label 'Property' end list do field :storer_model field :storer_object field :storer_property field :filename field :contentType field :length field :created_at field :updated_at end fields :storer_model, :storer_object, :storer_property, :filename, :contentType, :length end #Administration config.navigation 'Administration', icon: 'fa fa-wrench' config.model User do weight -1 navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :label } group :credentials do label 'Credentials' active true end group :activity do label 'Activity' active true end configure :name configure :email configure :roles configure :account do read_only { true } end configure :password do group :credentials end configure :password_confirmation do group :credentials end configure :key do group :credentials end configure :authentication_token do group :credentials end configure :confirmed_at do group :activity end configure :sign_in_count do group :activity end configure :current_sign_in_at do group :activity end configure :last_sign_in_at do group :activity end configure :current_sign_in_ip do group :activity end configure :last_sign_in_ip do group :activity end edit do field :picture field :name field :email do visible { Account.current_super_admin? } end field :roles do visible { Account.current_super_admin? } end field :account do label { Account.current_super_admin? ? 'Account' : 'Account settings' } help { nil } end field :password do visible { Account.current_super_admin? } end field :password_confirmation do visible { Account.current_super_admin? } end field :key do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :authentication_token do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :confirmed_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :sign_in_count do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :current_sign_in_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :last_sign_in_at do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :current_sign_in_ip do visible { !bindings[:object].new_record? && Account.current_super_admin? } end field :last_sign_in_ip do visible { !bindings[:object].new_record? && Account.current_super_admin? } end end show do field :picture field :name field :email field :account field :roles field :key field :authentication_token field :sign_in_count field :current_sign_in_at field :last_sign_in_at field :current_sign_in_ip field :last_sign_in_ip end list do field :picture do thumb_method :icon end field :name field :email field :account field :roles field :key field :authentication_token field :sign_in_count field :created_at field :updated_at end end config.model Account do navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :label } configure :_id do visible { Account.current_super_admin? } end configure :name do visible { Account.current_super_admin? } end configure :owner do read_only { !Account.current_super_admin? } help { nil } end configure :tenant_account do visible { Account.current_super_admin? } end configure :number do visible { Account.current_super_admin? } end configure :users do visible { Account.current_super_admin? } end configure :notification_level list do field :_id field :name field :owner field :tenant_account field :number field :users field :notification_level field :created_at field :updated_at end fields :_id, :name, :owner, :tenant_account, :number, :users, :notification_level end config.model Role do navigation_label 'Administration' visible { User.current_super_admin? } configure :users do visible { Account.current_super_admin? } end fields :name, :users end config.model Setup::SharedName do navigation_label 'Administration' visible { User.current_super_admin? } list do field :name field :owners field :created_at field :updated_at end fields :name, :owners end config.model Script do navigation_label 'Administration' visible { User.current_super_admin? } edit do field :name field :description field :code, :code_mirror end show do field :name field :description field :code do pretty_value do v = value.gsub('<', '&lt;').gsub('>', '&gt;') "<pre><code class='ruby'>#{v}</code></pre>".html_safe end end end list do field :name field :description field :code field :created_at field :updated_at end fields :name, :description, :code end config.model CenitToken do navigation_label 'Administration' visible { User.current_super_admin? } end config.model Setup::DelayedMessage do navigation_label 'Administration' visible { User.current_super_admin? } end config.model Setup::SystemNotification do navigation_label 'Administration' visible { User.current_super_admin? } end config.model RabbitConsumer do navigation_label 'Administration' visible { User.current_super_admin? } object_label_method { :to_s } configure :task_id do pretty_value do if (executor = (obj = bindings[:object]).executor) && (task = obj.executing_task) v = bindings[:view] amc = RailsAdmin.config(task.class) am = amc.abstract_model wording = task.send(amc.object_label_method) amc = RailsAdmin.config(Account) am = amc.abstract_model if (inspect_action = v.action(:inspect, am, executor)) task_path = v.show_path(model_name: task.class.to_s.underscore.gsub('/', '~'), id: task.id.to_s) v.link_to(wording, v.url_for(action: inspect_action.action_name, model_name: am.to_param, id: executor.id, params: { return_to: task_path })) else wording end.html_safe end end end list do field :channel field :tag field :executor field :task_id field :alive field :created_at field :updated_at end fields :created_at, :channel, :tag, :executor, :task_id, :alive end config.model ApplicationId do navigation_label 'Administration' visible { User.current_super_admin? } register_instance_option(:discard_submit_buttons) { bindings[:object].instance_variable_get(:@registering) } configure :name configure :registered, :boolean configure :redirect_uris, :json_value edit do field :oauth_name do visible { bindings[:object].instance_variable_get(:@registering) } end field :redirect_uris do visible { bindings[:object].instance_variable_get(:@registering) } end end list do field :channel field :name field :registered field :account field :identifier field :created_at field :updated_at end fields :created_at, :name, :registered, :account, :identifier end config.model Setup::ScriptExecution do parent { nil } navigation_label 'Administration' object_label_method { :to_s } configure :attempts_succeded, :text do label 'Attempts/Succedded' end edit do field :description end list do field :script field :description field :scheduler field :attempts_succeded field :retries field :progress field :status field :notifications field :created_at field :updated_at end fields :script, :description, :scheduler, :attempts_succeded, :retries, :progress, :status, :notifications end end
require 'account' [ RailsAdmin::Config::Actions::DiskUsage, RailsAdmin::Config::Actions::SendToFlow, RailsAdmin::Config::Actions::SwitchNavigation, RailsAdmin::Config::Actions::DataType, RailsAdmin::Config::Actions::Filters, RailsAdmin::Config::Actions::DataEvents, RailsAdmin::Config::Actions::Flows, RailsAdmin::Config::Actions::Import, #RailsAdmin::Config::Actions::EdiExport, RailsAdmin::Config::Actions::ImportSchema, RailsAdmin::Config::Actions::DeleteAll, RailsAdmin::Config::Actions::TranslatorUpdate, RailsAdmin::Config::Actions::Convert, RailsAdmin::Config::Actions::Pull, RailsAdmin::Config::Actions::RetryTask, RailsAdmin::Config::Actions::DownloadFile, RailsAdmin::Config::Actions::ProcessFlow, RailsAdmin::Config::Actions::BuildGem, RailsAdmin::Config::Actions::Run, RailsAdmin::Config::Actions::Authorize, RailsAdmin::Config::Actions::SimpleDeleteDataType, RailsAdmin::Config::Actions::BulkDeleteDataType, RailsAdmin::Config::Actions::SimpleGenerate, RailsAdmin::Config::Actions::BulkGenerate, RailsAdmin::Config::Actions::SimpleExpand, RailsAdmin::Config::Actions::BulkExpand, RailsAdmin::Config::Actions::Records, RailsAdmin::Config::Actions::FilterDataType, RailsAdmin::Config::Actions::SwitchScheduler, RailsAdmin::Config::Actions::SimpleExport, RailsAdmin::Config::Actions::Schedule, RailsAdmin::Config::Actions::Submit, RailsAdmin::Config::Actions::Trash, RailsAdmin::Config::Actions::Inspect, RailsAdmin::Config::Actions::Copy, RailsAdmin::Config::Actions::Cancel, RailsAdmin::Config::Actions::Configure, RailsAdmin::Config::Actions::SimpleCross, RailsAdmin::Config::Actions::BulkCross, RailsAdmin::Config::Actions::Regist, RailsAdmin::Config::Actions::SharedCollectionIndex, RailsAdmin::Config::Actions::EcommerceIndex, RailsAdmin::Config::Actions::BulkPull, RailsAdmin::Config::Actions::CleanUp, RailsAdmin::Config::Actions::ShowRecords, RailsAdmin::Config::Actions::RunScript, RailsAdmin::Config::Actions::Play, RailsAdmin::Config::Actions::PullImport, RailsAdmin::Config::Actions::State, RailsAdmin::Config::Actions::Documentation, RailsAdmin::Config::Actions::Push, RailsAdmin::Config::Actions::Share, RailsAdmin::Config::Actions::Reinstall, RailsAdmin::Config::Actions::Swagger, RailsAdmin::Config::Actions::AlgorithmDependencies, RailsAdmin::Config::Actions::RestApi1, RailsAdmin::Config::Actions::RestApi2, RailsAdmin::Config::Actions::LinkDataType, RailsAdmin::Config::Actions::ImportApiSpec ].each { |a| RailsAdmin::Config::Actions.register(a) } [ RailsAdmin::Config::Actions::Notebooks, RailsAdmin::Config::Actions::NotebooksRoot ].each { |a| RailsAdmin::Config::Actions.register(a) } if Cenit.jupyter_notebooks RailsAdmin::Config::Actions.register(:export, RailsAdmin::Config::Actions::BulkExport) [ RailsAdmin::Config::Fields::Types::JsonValue, RailsAdmin::Config::Fields::Types::JsonSchema, RailsAdmin::Config::Fields::Types::StorageFile, RailsAdmin::Config::Fields::Types::EnumEdit, RailsAdmin::Config::Fields::Types::Model, RailsAdmin::Config::Fields::Types::Record, RailsAdmin::Config::Fields::Types::HtmlErb, RailsAdmin::Config::Fields::Types::OptionalBelongsTo, RailsAdmin::Config::Fields::Types::Code, RailsAdmin::Config::Fields::Types::Tag, RailsAdmin::Config::Fields::Types::TimeSpan, RailsAdmin::Config::Fields::Types::NonEmptyString, RailsAdmin::Config::Fields::Types::NonEmptyText, RailsAdmin::Config::Fields::Types::MongoffFileUpload, RailsAdmin::Config::Fields::Types::Url ].each { |f| RailsAdmin::Config::Fields::Types.register(f) } require 'rails_admin/config/fields/factories/tag' module RailsAdmin module Config class << self def navigation(label, options) navigation_options[label.to_s] = options end def navigation_options @nav_options ||= {} end end end end RailsAdmin.config do |config| config.total_columns_width = 900 ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.authenticate_with do warden.authenticate! scope: :user unless %w(dashboard shared_collection_index ecommerce_index index show, notebooks_root).include?(action_name) end config.current_user_method { current_user } config.audit_with :mongoid_audit config.authorize_with :cancan config.excluded_models += [Setup::BaseOauthAuthorization, Setup::AwsAuthorization] config.actions do dashboard # mandatory # disk_usage shared_collection_index ecommerce_index link_data_type index # mandatory new { except [Setup::Event, Setup::DataType, Setup::Authorization, Setup::BaseOauthProvider] } filters data_events flows import import_schema import_api_spec pull_import translator_update convert export bulk_delete show show_records run run_script edit swagger { only [Setup::ApiSpec] } configure play copy share simple_cross bulk_cross build_gem pull bulk_pull push download_file process_flow authorize simple_generate bulk_generate simple_expand bulk_expand records filter_data_type switch_navigation switch_scheduler simple_export schedule state retry_task submit inspect cancel regist reinstall simple_delete_data_type bulk_delete_data_type delete trash notebooks_root if Cenit.jupyter_notebooks clean_up #show_in_app send_to_flow delete_all data_type #history_index history_show do only do [ Setup::Algorithm, Setup::Connection, Setup::PlainWebhook, Setup::Operation, Setup::Resource, Setup::Translator, Setup::Flow, Setup::OauthClient, Setup::Oauth2Scope, Setup::Snippet ] + Setup::DataType.class_hierarchy + Setup::Validator.class_hierarchy + Setup::BaseOauthProvider.class_hierarchy end visible { only.include?((obj = bindings[:object]).class) && obj.try(:shared?) } end algorithm_dependencies do only do Setup::Algorithm end end rest_api1 rest_api2 documentation notebooks if Cenit.jupyter_notebooks end config.navigation 'Collections', icon: 'fa fa-cubes' Setup::Tag Setup::CrossCollectionAuthor Setup::CrossCollectionPullParameter Setup::CrossSharedCollection Setup::SharedCollection Setup::CollectionAuthor Setup::CollectionPullParameter Setup::CollectionData Setup::Collection #Definitions config.navigation 'Definitions', icon: 'fa fa-puzzle-piece' Setup::Validator Setup::CustomValidator Setup::Schema Setup::XsltValidator Setup::EdiValidator Setup::AlgorithmValidator Setup::DataType Setup::JsonDataType Setup::FileDataType Setup::CenitDataType #Connectors config.navigation 'Connectors', icon: 'fa fa-plug' Setup::ApiSpec Setup::Connection Setup::ConnectionRole Setup::Section Setup::Resource Setup::Webhook Setup::Operation Setup::Representation Setup::PlainWebhook #Security config.navigation 'Security', icon: 'fa fa-shield' Setup::OauthClient Setup::RemoteOauthClient Setup::BaseOauthProvider Setup::OauthProvider Setup::Oauth2Provider Setup::Oauth2Scope Setup::Authorization Setup::BasicAuthorization Setup::OauthAuthorization Setup::Oauth2Authorization Setup::AwsAuthorization Cenit::OauthAccessGrant #Compute config.navigation 'Compute', icon: 'fa fa-cog' Setup::AlgorithmParameter Setup::CallLink Setup::Algorithm Setup::AlgorithmOutput Setup::Action Setup::Application Cenit::ApplicationParameter Setup::Filter Setup::Notebook if Cenit.jupyter_notebooks #Transformations config.navigation 'Transformations', icon: 'fa fa-random' Setup::Translator Setup::Renderer Setup::Parser Setup::Converter Setup::Updater Setup::AlgorithmOutput Setup::Action Setup::Application Cenit::ApplicationParameter Setup::Snippet #Workflows config.navigation 'Workflows', icon: 'fa fa-cogs' Setup::Flow Setup::Event Setup::Observer Setup::Scheduler #Monitors config.navigation 'Monitors', icon: 'fa fa-heartbeat' Setup::Notification Setup::Task Setup::Execution Setup::FlowExecution Setup::DataTypeGeneration Setup::DataTypeExpansion Setup::Translation Setup::DataImport Setup::Push Setup::BasePull Setup::PullImport Setup::SharedCollectionPull Setup::ApiPull Setup::SchemasImport Setup::ApiSpecImport Setup::Deletion Setup::AlgorithmExecution Setup::Submission Setup::Crossing Setup::Storage #Configuration config.navigation 'Configuration', icon: 'fa fa-sliders' Setup::Namespace Setup::DataTypeConfig Setup::FlowConfig Setup::ConnectionConfig Setup::Pin Setup::Binding Setup::Parameter #Administration config.navigation 'Administration', icon: 'fa fa-user-secret' User Account Role Setup::SharedName Setup::CrossSharedName Script Cenit::BasicToken Cenit::BasicTenantToken Setup::TaskToken Setup::DelayedMessage Setup::SystemNotification RabbitConsumer Cenit::ApplicationId Setup::ScriptExecution Setup::Category end Fixing typo require 'account' [ RailsAdmin::Config::Actions::DiskUsage, RailsAdmin::Config::Actions::SendToFlow, RailsAdmin::Config::Actions::SwitchNavigation, RailsAdmin::Config::Actions::DataType, RailsAdmin::Config::Actions::Filters, RailsAdmin::Config::Actions::DataEvents, RailsAdmin::Config::Actions::Flows, RailsAdmin::Config::Actions::Import, #RailsAdmin::Config::Actions::EdiExport, RailsAdmin::Config::Actions::ImportSchema, RailsAdmin::Config::Actions::DeleteAll, RailsAdmin::Config::Actions::TranslatorUpdate, RailsAdmin::Config::Actions::Convert, RailsAdmin::Config::Actions::Pull, RailsAdmin::Config::Actions::RetryTask, RailsAdmin::Config::Actions::DownloadFile, RailsAdmin::Config::Actions::ProcessFlow, RailsAdmin::Config::Actions::BuildGem, RailsAdmin::Config::Actions::Run, RailsAdmin::Config::Actions::Authorize, RailsAdmin::Config::Actions::SimpleDeleteDataType, RailsAdmin::Config::Actions::BulkDeleteDataType, RailsAdmin::Config::Actions::SimpleGenerate, RailsAdmin::Config::Actions::BulkGenerate, RailsAdmin::Config::Actions::SimpleExpand, RailsAdmin::Config::Actions::BulkExpand, RailsAdmin::Config::Actions::Records, RailsAdmin::Config::Actions::FilterDataType, RailsAdmin::Config::Actions::SwitchScheduler, RailsAdmin::Config::Actions::SimpleExport, RailsAdmin::Config::Actions::Schedule, RailsAdmin::Config::Actions::Submit, RailsAdmin::Config::Actions::Trash, RailsAdmin::Config::Actions::Inspect, RailsAdmin::Config::Actions::Copy, RailsAdmin::Config::Actions::Cancel, RailsAdmin::Config::Actions::Configure, RailsAdmin::Config::Actions::SimpleCross, RailsAdmin::Config::Actions::BulkCross, RailsAdmin::Config::Actions::Regist, RailsAdmin::Config::Actions::SharedCollectionIndex, RailsAdmin::Config::Actions::EcommerceIndex, RailsAdmin::Config::Actions::BulkPull, RailsAdmin::Config::Actions::CleanUp, RailsAdmin::Config::Actions::ShowRecords, RailsAdmin::Config::Actions::RunScript, RailsAdmin::Config::Actions::Play, RailsAdmin::Config::Actions::PullImport, RailsAdmin::Config::Actions::State, RailsAdmin::Config::Actions::Documentation, RailsAdmin::Config::Actions::Push, RailsAdmin::Config::Actions::Share, RailsAdmin::Config::Actions::Reinstall, RailsAdmin::Config::Actions::Swagger, RailsAdmin::Config::Actions::AlgorithmDependencies, RailsAdmin::Config::Actions::RestApi1, RailsAdmin::Config::Actions::RestApi2, RailsAdmin::Config::Actions::LinkDataType, RailsAdmin::Config::Actions::ImportApiSpec ].each { |a| RailsAdmin::Config::Actions.register(a) } [ RailsAdmin::Config::Actions::Notebooks, RailsAdmin::Config::Actions::NotebooksRoot ].each { |a| RailsAdmin::Config::Actions.register(a) } if Cenit.jupyter_notebooks RailsAdmin::Config::Actions.register(:export, RailsAdmin::Config::Actions::BulkExport) [ RailsAdmin::Config::Fields::Types::JsonValue, RailsAdmin::Config::Fields::Types::JsonSchema, RailsAdmin::Config::Fields::Types::StorageFile, RailsAdmin::Config::Fields::Types::EnumEdit, RailsAdmin::Config::Fields::Types::Model, RailsAdmin::Config::Fields::Types::Record, RailsAdmin::Config::Fields::Types::HtmlErb, RailsAdmin::Config::Fields::Types::OptionalBelongsTo, RailsAdmin::Config::Fields::Types::Code, RailsAdmin::Config::Fields::Types::Tag, RailsAdmin::Config::Fields::Types::TimeSpan, RailsAdmin::Config::Fields::Types::NonEmptyString, RailsAdmin::Config::Fields::Types::NonEmptyText, RailsAdmin::Config::Fields::Types::MongoffFileUpload, RailsAdmin::Config::Fields::Types::Url ].each { |f| RailsAdmin::Config::Fields::Types.register(f) } require 'rails_admin/config/fields/factories/tag' module RailsAdmin module Config class << self def navigation(label, options) navigation_options[label.to_s] = options end def navigation_options @nav_options ||= {} end end end end RailsAdmin.config do |config| config.total_columns_width = 900 ## == PaperTrail == # config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.authenticate_with do warden.authenticate! scope: :user unless %w(dashboard shared_collection_index ecommerce_index index show notebooks_root).include?(action_name) end config.current_user_method { current_user } config.audit_with :mongoid_audit config.authorize_with :cancan config.excluded_models += [Setup::BaseOauthAuthorization, Setup::AwsAuthorization] config.actions do dashboard # mandatory # disk_usage shared_collection_index ecommerce_index link_data_type index # mandatory new { except [Setup::Event, Setup::DataType, Setup::Authorization, Setup::BaseOauthProvider] } filters data_events flows import import_schema import_api_spec pull_import translator_update convert export bulk_delete show show_records run run_script edit swagger { only [Setup::ApiSpec] } configure play copy share simple_cross bulk_cross build_gem pull bulk_pull push download_file process_flow authorize simple_generate bulk_generate simple_expand bulk_expand records filter_data_type switch_navigation switch_scheduler simple_export schedule state retry_task submit inspect cancel regist reinstall simple_delete_data_type bulk_delete_data_type delete trash notebooks_root if Cenit.jupyter_notebooks clean_up #show_in_app send_to_flow delete_all data_type #history_index history_show do only do [ Setup::Algorithm, Setup::Connection, Setup::PlainWebhook, Setup::Operation, Setup::Resource, Setup::Translator, Setup::Flow, Setup::OauthClient, Setup::Oauth2Scope, Setup::Snippet ] + Setup::DataType.class_hierarchy + Setup::Validator.class_hierarchy + Setup::BaseOauthProvider.class_hierarchy end visible { only.include?((obj = bindings[:object]).class) && obj.try(:shared?) } end algorithm_dependencies do only do Setup::Algorithm end end rest_api1 rest_api2 documentation notebooks if Cenit.jupyter_notebooks end config.navigation 'Collections', icon: 'fa fa-cubes' Setup::Tag Setup::CrossCollectionAuthor Setup::CrossCollectionPullParameter Setup::CrossSharedCollection Setup::SharedCollection Setup::CollectionAuthor Setup::CollectionPullParameter Setup::CollectionData Setup::Collection #Definitions config.navigation 'Definitions', icon: 'fa fa-puzzle-piece' Setup::Validator Setup::CustomValidator Setup::Schema Setup::XsltValidator Setup::EdiValidator Setup::AlgorithmValidator Setup::DataType Setup::JsonDataType Setup::FileDataType Setup::CenitDataType #Connectors config.navigation 'Connectors', icon: 'fa fa-plug' Setup::ApiSpec Setup::Connection Setup::ConnectionRole Setup::Section Setup::Resource Setup::Webhook Setup::Operation Setup::Representation Setup::PlainWebhook #Security config.navigation 'Security', icon: 'fa fa-shield' Setup::OauthClient Setup::RemoteOauthClient Setup::BaseOauthProvider Setup::OauthProvider Setup::Oauth2Provider Setup::Oauth2Scope Setup::Authorization Setup::BasicAuthorization Setup::OauthAuthorization Setup::Oauth2Authorization Setup::AwsAuthorization Cenit::OauthAccessGrant #Compute config.navigation 'Compute', icon: 'fa fa-cog' Setup::AlgorithmParameter Setup::CallLink Setup::Algorithm Setup::AlgorithmOutput Setup::Action Setup::Application Cenit::ApplicationParameter Setup::Filter Setup::Notebook if Cenit.jupyter_notebooks #Transformations config.navigation 'Transformations', icon: 'fa fa-random' Setup::Translator Setup::Renderer Setup::Parser Setup::Converter Setup::Updater Setup::AlgorithmOutput Setup::Action Setup::Application Cenit::ApplicationParameter Setup::Snippet #Workflows config.navigation 'Workflows', icon: 'fa fa-cogs' Setup::Flow Setup::Event Setup::Observer Setup::Scheduler #Monitors config.navigation 'Monitors', icon: 'fa fa-heartbeat' Setup::Notification Setup::Task Setup::Execution Setup::FlowExecution Setup::DataTypeGeneration Setup::DataTypeExpansion Setup::Translation Setup::DataImport Setup::Push Setup::BasePull Setup::PullImport Setup::SharedCollectionPull Setup::ApiPull Setup::SchemasImport Setup::ApiSpecImport Setup::Deletion Setup::AlgorithmExecution Setup::Submission Setup::Crossing Setup::Storage #Configuration config.navigation 'Configuration', icon: 'fa fa-sliders' Setup::Namespace Setup::DataTypeConfig Setup::FlowConfig Setup::ConnectionConfig Setup::Pin Setup::Binding Setup::Parameter #Administration config.navigation 'Administration', icon: 'fa fa-user-secret' User Account Role Setup::SharedName Setup::CrossSharedName Script Cenit::BasicToken Cenit::BasicTenantToken Setup::TaskToken Setup::DelayedMessage Setup::SystemNotification RabbitConsumer Cenit::ApplicationId Setup::ScriptExecution Setup::Category end
unless Rails.env == 'test' require Rails.root.join('lib', 'rails_admin_send_broadcast_email.rb') require Rails.root.join('lib', 'rails_admin_add_to_mailchimp_list.rb') require Rails.root.join('lib', 'rails_admin_list_scope.rb') RailsAdmin.config do |config| module RailsAdmin module Config module Actions class SendBroadcastEmail < RailsAdmin::Config::Actions::Base RailsAdmin::Config::Actions.register(self) end class AddToMailchimpList < RailsAdmin::Config::Actions::Base RailsAdmin::Config::Actions.register(self) end end end end config.current_user_method { current_person } #auto-generated config.authorize_with :cancan config.attr_accessible_role {:admin} config.authenticate_with { unless current_person session[:return_to] = request.url redirect_to '/login', :notice => "You must first log in or sign up before accessing this page." end } config.actions do dashboard index new send_broadcast_email add_to_mailchimp_list show edit delete export end config.included_models = [Account,Address,State,AccountDeactivated,Preference,Exchange,ForumPost,FeedPost,BroadcastEmail,Person,PersonDeactivated,Category,Neighborhood,Req,Offer,BusinessType,ActivityStatus,PlanType, ExchangeDeleted, TimeZone] config.default_items_per_page = 100 config.model State do visible false end config.model Address do visible false configure :person, :belongs_to_association object_label_method do :address_line_1 end list do field :address_line_1 field :city field :zipcode_plus_4 end edit do field :address_line_1 field :address_line_2 field :address_line_3 field :city field :state field :zipcode_plus_4 field :address_privacy do label "Public" end field :primary end end config.model Account do list do scope do joins(:person).where( people: { deactivated:false } ) end field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end sortable "accounts.balance + accounts.offset" sort_reverse true end field :credit_limit field :updated_at do label "Last Transaction" end end edit do field :name field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit end export do field :person field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit field :updated_at do label "Last Transaction" end end end config.model AccountDeactivated do label do 'Deactivated account' end list do scope do joins(:person).where( people: { deactivated:true } ) end field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end sortable "accounts.balance + accounts.offset" sort_reverse true end field :credit_limit field :updated_at do label "Last Transaction" end end edit do field :name field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit end export do field :person field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit field :updated_at do label "Last Transaction" end end end config.model Req do label "Request" label_plural "Requests" list do field :name field :person do label "Requested by" end field :created_at end edit do field :group field :person do label "Requested by" end field :name field :estimated_hours field :due_date, :date field :description field :categories field :neighborhoods end end config.model Offer do list do field :name field :person do label "Offered by" end field :created_at end edit do field :group field :person do label "Offered by" end field :name field :total_available field :price field :expiration_date, :date field :description field :categories field :neighborhoods end end config.model Preference do configure :default_profile_picture do pretty_value do %{<a href="/photos/default_profile_picture" target="_blank">Change default profile image</a>} end end configure :default_group_picture do pretty_value do %{<a href="/photos/default_group_picture" target="_blank">Change default group image</a>} end end list do field :app_name end edit do group :default do help "*** Some preferences require server restart to take effect after change ***" end field :app_name field :server_name field :groups field :default_group_id do properties[:collection] = Group.all.map {|g| [g.name,g.id]} partial "select" end field :locale do properties[:collection] = [['English','en'],['Spanish','es'],['French','fr'],['Greek','gr']] partial "select" end field :logout_url field :blog_feed_url field :new_member_notification field :googlemap_api_key field :gmail field :email_notifications field :email_verifications field :protected_categories field :whitelist field :openid field :public_uploads field :public_private_bid do label "Public/Private Bids" end field :mailchimp_list_id do label "Mailchimp List ID" end field :mailchimp_send_welcome field :registration_intro field :agreement field :about field :practice field :steps field :questions field :contact field :analytics field :display_orgicon field :default_profile_picture field :default_group_picture end end config.model Exchange do list do field :created_at field :customer field :worker field :amount field :notes do label "Memo" formatted_value do bindings[:object].memo end end end edit do field :worker do label "Credits in" end field :customer do label "Credits out" end field :amount field :group_id, :enum do label "Unit" enum_method do :group_id_enum end end field :notes, :text #field :metadata end end config.model ExchangeDeleted do label do 'Deleted exchange' end list do scope do only_deleted end field :created_at field :customer field :worker field :amount field :notes do label "Memo" formatted_value do bindings[:object].memo end end end end config.model FeedPost do list do field :title field :date_published field :created_at field :updated_at end edit do field :title field :content, :text do #ckeditor true end end end config.model ForumPost do list do field :person field :body field :created_at end edit do field :body end end config.model BroadcastEmail do edit do field :subject field :message, :text do #ckeditor true end end end config.model Category do list do field :name end edit do field :name field :parent_id do properties[:collection] = [['',nil]] + Category.by_long_name.map {|c| [c.long_name, c.id]} partial "select" end field :description end end config.model Neighborhood do list do field :name end edit do field :name field :parent_id do properties[:collection] = [['',nil]] + Neighborhood.by_long_name.map {|n| [n.long_name, n.id]} partial "select" end field :description end end config.model BusinessType do list do field :name sort_by :name end edit do field :name field :description end end config.model ActivityStatus do list do field :name sort_by :name end edit do field :name field :description end end config.model PlanType do list do field :name sort_by :name end edit do field :name field :description end end config.model Person do object_label_method do :display_name end list do scope do where deactivated: false end field :last_logged_in_at do label "Last login" end field :name field :business_name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :mailchimp_subscribed field :openid_identifier sort_by :last_logged_in_at end export do field :last_logged_in_at do label "Last login" end field :name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact end edit do field :name field :email field :password field :password_confirmation field :deactivated field :email_verified field :phone field :phoneprivacy do label "Share Phone?" end field :admin field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact field :description, :text do #ckeditor true end field :addresses # generally not appropriate for admin to edit openid since it is an assertion end end config.model PersonDeactivated do object_label_method do :display_name end label do 'Deactivated people' end list do scope do where deactivated: true end field :last_logged_in_at do label "Last login" end field :name field :business_name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :openid_identifier sort_by :last_logged_in_at end edit do field :name field :email field :password field :password_confirmation field :deactivated field :email_verified field :phone field :phoneprivacy do label "Share Phone?" end field :admin field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact field :description, :text do #ckeditor true end field :addresses # generally not appropriate for admin to edit openid since it is an assertion end end config.model TimeZone do label "Time Zone" label_plural "Time Zone" field :time_zone, :enum do enum do ActiveSupport::TimeZone.zones_map.map {|x|[x[1], x[0]]} end end field :date_style, :enum do enum do TimeZone::Date_Style.keys end end end end end search accounts in admin interface by associated person. closes gh-424 unless Rails.env == 'test' require Rails.root.join('lib', 'rails_admin_send_broadcast_email.rb') require Rails.root.join('lib', 'rails_admin_add_to_mailchimp_list.rb') require Rails.root.join('lib', 'rails_admin_list_scope.rb') RailsAdmin.config do |config| module RailsAdmin module Config module Actions class SendBroadcastEmail < RailsAdmin::Config::Actions::Base RailsAdmin::Config::Actions.register(self) end class AddToMailchimpList < RailsAdmin::Config::Actions::Base RailsAdmin::Config::Actions.register(self) end end end end config.current_user_method { current_person } #auto-generated config.authorize_with :cancan config.attr_accessible_role {:admin} config.authenticate_with { unless current_person session[:return_to] = request.url redirect_to '/login', :notice => "You must first log in or sign up before accessing this page." end } config.actions do dashboard index new send_broadcast_email add_to_mailchimp_list show edit delete export end config.included_models = [Account,Address,State,AccountDeactivated,Preference,Exchange,ForumPost,FeedPost,BroadcastEmail,Person,PersonDeactivated,Category,Neighborhood,Req,Offer,BusinessType,ActivityStatus,PlanType, ExchangeDeleted, TimeZone] config.default_items_per_page = 100 config.model State do visible false end config.model Address do visible false configure :person, :belongs_to_association object_label_method do :address_line_1 end list do field :address_line_1 field :city field :zipcode_plus_4 end edit do field :address_line_1 field :address_line_2 field :address_line_3 field :city field :state field :zipcode_plus_4 field :address_privacy do label "Public" end field :primary end end config.model Account do list do scope do joins(:person).where( people: { deactivated:false } ) end field :person do label "Name" searchable [{Person => :name}] queryable true end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end sortable "accounts.balance + accounts.offset" sort_reverse true end field :credit_limit field :updated_at do label "Last Transaction" end end edit do field :name field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit end export do field :person field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit field :updated_at do label "Last Transaction" end end end config.model AccountDeactivated do label do 'Deactivated account' end list do scope do joins(:person).where( people: { deactivated:true } ) end field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end sortable "accounts.balance + accounts.offset" sort_reverse true end field :credit_limit field :updated_at do label "Last Transaction" end end edit do field :name field :person do label "Name" end field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit end export do field :person field :offset do label "Starting Balance" end field :balance do formatted_value do (bindings[:object].balance_with_initial_offset).to_s end end field :credit_limit field :updated_at do label "Last Transaction" end end end config.model Req do label "Request" label_plural "Requests" list do field :name field :person do label "Requested by" end field :created_at end edit do field :group field :person do label "Requested by" end field :name field :estimated_hours field :due_date, :date field :description field :categories field :neighborhoods end end config.model Offer do list do field :name field :person do label "Offered by" end field :created_at end edit do field :group field :person do label "Offered by" end field :name field :total_available field :price field :expiration_date, :date field :description field :categories field :neighborhoods end end config.model Preference do configure :default_profile_picture do pretty_value do %{<a href="/photos/default_profile_picture" target="_blank">Change default profile image</a>} end end configure :default_group_picture do pretty_value do %{<a href="/photos/default_group_picture" target="_blank">Change default group image</a>} end end list do field :app_name end edit do group :default do help "*** Some preferences require server restart to take effect after change ***" end field :app_name field :server_name field :groups field :default_group_id do properties[:collection] = Group.all.map {|g| [g.name,g.id]} partial "select" end field :locale do properties[:collection] = [['English','en'],['Spanish','es'],['French','fr'],['Greek','gr']] partial "select" end field :logout_url field :blog_feed_url field :new_member_notification field :googlemap_api_key field :gmail field :email_notifications field :email_verifications field :protected_categories field :whitelist field :openid field :public_uploads field :public_private_bid do label "Public/Private Bids" end field :mailchimp_list_id do label "Mailchimp List ID" end field :mailchimp_send_welcome field :registration_intro field :agreement field :about field :practice field :steps field :questions field :contact field :analytics field :display_orgicon field :default_profile_picture field :default_group_picture end end config.model Exchange do list do field :created_at field :customer field :worker field :amount field :notes do label "Memo" formatted_value do bindings[:object].memo end end end edit do field :worker do label "Credits in" end field :customer do label "Credits out" end field :amount field :group_id, :enum do label "Unit" enum_method do :group_id_enum end end field :notes, :text #field :metadata end end config.model ExchangeDeleted do label do 'Deleted exchange' end list do scope do only_deleted end field :created_at field :customer field :worker field :amount field :notes do label "Memo" formatted_value do bindings[:object].memo end end end end config.model FeedPost do list do field :title field :date_published field :created_at field :updated_at end edit do field :title field :content, :text do #ckeditor true end end end config.model ForumPost do list do field :person field :body field :created_at end edit do field :body end end config.model BroadcastEmail do edit do field :subject field :message, :text do #ckeditor true end end end config.model Category do list do field :name end edit do field :name field :parent_id do properties[:collection] = [['',nil]] + Category.by_long_name.map {|c| [c.long_name, c.id]} partial "select" end field :description end end config.model Neighborhood do list do field :name end edit do field :name field :parent_id do properties[:collection] = [['',nil]] + Neighborhood.by_long_name.map {|n| [n.long_name, n.id]} partial "select" end field :description end end config.model BusinessType do list do field :name sort_by :name end edit do field :name field :description end end config.model ActivityStatus do list do field :name sort_by :name end edit do field :name field :description end end config.model PlanType do list do field :name sort_by :name end edit do field :name field :description end end config.model Person do object_label_method do :display_name end list do scope do where deactivated: false end field :last_logged_in_at do label "Last login" end field :name field :business_name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :mailchimp_subscribed field :openid_identifier sort_by :last_logged_in_at end export do field :last_logged_in_at do label "Last login" end field :name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact end edit do field :name field :email field :password field :password_confirmation field :deactivated field :email_verified field :phone field :phoneprivacy do label "Share Phone?" end field :admin field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact field :description, :text do #ckeditor true end field :addresses # generally not appropriate for admin to edit openid since it is an assertion end end config.model PersonDeactivated do object_label_method do :display_name end label do 'Deactivated people' end list do scope do where deactivated: true end field :last_logged_in_at do label "Last login" end field :name field :business_name field :email field :deactivated do label "Disabled" end field :email_verified field :phone field :admin field :org field :openid_identifier sort_by :last_logged_in_at end edit do field :name field :email field :password field :password_confirmation field :deactivated field :email_verified field :phone field :phoneprivacy do label "Share Phone?" end field :admin field :web_site_url field :org field :title field :business_name field :legal_business_name field :business_type field :activity_status field :plan_type field :support_contact field :description, :text do #ckeditor true end field :addresses # generally not appropriate for admin to edit openid since it is an assertion end end config.model TimeZone do label "Time Zone" label_plural "Time Zone" field :time_zone, :enum do enum do ActiveSupport::TimeZone.zones_map.map {|x|[x[1], x[0]]} end end field :date_style, :enum do enum do TimeZone::Date_Style.keys end end end end end
require_relative '../../lib/rails_admin_extensions/rails_admin_change_state.rb' RailsAdmin.config do |config| ### Popular gems integration ## == Devise == config.authenticate_with do warden.authenticate! scope: :user end config.current_user_method(&:current_user) ## == Auth == # config.authorize_with do |req| # redirect_to main_app.root_path unless current_user.try(:admin?) # if req.action_name == 'statistics' && current_user.role != 'superuser' # redirect_to dashboard_path # end # end config.authorize_with :cancan, Ability config.current_user_method &:current_user # config.excluded_models = ['AgeFilter', 'FederalState', # 'OrganizationConnection', 'Filter'] ## == PaperTrail == config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.included_models = %w( Organization Website Location FederalState Offer Opening Category Email UpdateRequest LanguageFilter User Contact Keyword Definition Note Area OrganizationConnection SearchLocation ContactPerson Subscription ) config.actions do dashboard # mandatory index # mandatory new do except ['User', 'FederalState'] end export bulk_delete do except ['User', 'FederalState'] end show edit delete do except ['User', 'FederalState'] end show_in_app clone # nested_set do # only ['Category'] # end nestable do only ['Category'] end change_state ## With an audit adapter, you can add: history_index history_show end config.model 'Organization' do list do field :offers_count field :name field :renewed field :aasm_state field :creator field :locations_count field :created_by sort_by :offers_count end weight(-3) field :name field :description do css_class 'js-count-character' end field :comment do css_class 'js-count-character' end field :locations field :legal_form field :charitable field :accredited_institution field :founded field :umbrella field :parents field :children field :slug do read_only do bindings[:object].new_record? end end field :websites field :mailings_enabled field :renewed field :aasm_state do read_only true help false end # Hidden fields edit do field :created_by, :hidden do visible do bindings[:object].new_record? end default_value do bindings[:view]._current_user.id end end end show do field :offers field :locations field :created_by field :approved_by end clone_config do custom_method :partial_dup end export do field :id end end config.label_methods << :url config.model 'Website' do field :host field :url show do field :offers field :organizations end end config.label_methods << :display_name config.model 'Location' do list do field :name field :organization field :zip field :federal_state field :display_name end weight(-2) field :organization field :name field :street field :addition field :zip field :city field :area_code field :local_number field :email field :federal_state do inline_add false inline_edit false end field :hq field :latitude do read_only true end field :longitude do read_only true end show do field :offers field :display_name end export do field :id end object_label_method :display_name end config.model 'FederalState' do weight 2 list do field :id do sort_reverse false end field :name end end config.model 'Offer' do weight(-1) list do field :name field :section_filters field :location field :renewed field :aasm_state field :creator field :expires_at field :approved_at field :organizations do searchable :name end field :created_by end field :section_filters field :name do css_class 'js-category-suggestions__trigger' end field :description do css_class 'js-count-character' end field :comment do css_class 'js-count-character' end field :notes field :next_steps do css_class 'js-count-character' end field :legal_information field :contact_people field :encounter field :slug do read_only do bindings[:object].new_record? end end field :location field :area field :organizations do help do 'Required. Only approved organizations.' end end field :categories do css_class 'js-category-suggestions' end field :language_filters field :target_gender field :target_audience_filters do help do 'Richtet sich das Angebot direkt an das Kind, oder an Erwachsene wie z.B. die Eltern, einen Nachbarn oder einen Lotsen' end end field :age_from field :age_to field :openings field :opening_specification do help do 'Bitte einigt euch auf eine einheitliche Ausdrucksweise. Wie etwa "jeden 1. Montag im Monat" oder "jeden 2. Freitag". Sagt mir (Konstantin) auch gern bescheid, wenn ihr ein einheitliches Format gefunden habt, mit dem alle Fälle abgedeckt werden können.' end end field :websites field :keywords do inverse_of :offers end field :expires_at field :renewed field :aasm_state do read_only true help false end # Hidden fields edit do field :created_by, :hidden do visible do bindings[:object].new_record? end default_value do bindings[:view]._current_user.id end end end show do field :created_at do strftime_format "%d. %B %Y" end field :created_by field :approved_by end clone_config do custom_method :partial_dup end export do field :id end end config.model 'ContactPerson' do object_label_method :display_name list do field :id field :first_name field :last_name field :organization field :offers field :email_address field :operational_name field :local_number_1 field :local_number_2 end field :gender field :academic_title field :first_name field :last_name field :operational_name do help do "Falls es sich nicht um einen persönlichen Ansprechpartner handelt hier z.B. 'Zentrale' eintragen" end end field :responsibility do help do "Z.b. 'Zuständig für alle Anfragen von Menschen deren Nachname mit den Buchstaben A-M anfangen'" end end field :area_code_1 field :local_number_1 field :area_code_2 field :local_number_2 field :fax_area_code field :fax_number field :email field :organization field :offers field :spoc do help do "Single Point of Contact / Zentrale Anlaufstelle." end end export do field :id end clone_config do custom_method :partial_dup end show do field :referencing_notes end end config.model 'Opening' do field :day do help do 'Required. Wenn weder "Open" noch "Close" angegeben werden, bedeutet das an diesem Tag "nach Absprache".' end end field :open do help do 'Required if "Close" given.' end end field :close do help do 'Required if "Open" given.' end end field :name do visible false end list do sort_by :sort_value field :sort_value do sort_reverse false visible false end end end config.model 'Category' do field :name field :section_filters field :parent field :sort_order object_label_method :name_with_world_suffix_and_optional_asterisk list do sort_by :name end show do field :offers field :icon end # nested_set(max_depth: 5) nestable_tree(max_depth: 5) end config.model 'Definition' do field :key field :explanation object_label_method :key end config.model 'Email' do field :address field :aasm_state do read_only true help false end list do field :contact_people end show do field :contact_people end object_label_method :address end config.model 'Note' do list do field :text field :topic field :user field :created_at field :notable field :referencable end edit do field :notable field :text field :topic field :referencable field :user_id, :hidden do default_value do bindings[:view]._current_user.id end end end nested do field :notable do visible false end field :text do read_only do !bindings[:object].new_record? end end field :topic do read_only do !bindings[:object].new_record? end end field :referencable do read_only do !bindings[:object].new_record? end end end update do field :id do read_only true end field :text do read_only true end field :topic do read_only true end field :user do read_only true end field :notable do read_only true end field :user_id do read_only true visible false end field :referencable end end config.model 'Filter' do weight 1 list do field :id field :name field :identifier field :offers end end config.model 'LanguageFilter' do parent Filter end config.model 'AgeFilter' do parent Filter end config.model 'Target_AudienceFilter' do parent Filter end config.model 'User' do weight 1 list do field :id field :name field :email field :role field :created_at field :updated_at end edit do field :name do read_only do bindings[:object] != bindings[:view].current_user end end field :email do read_only do bindings[:object] != bindings[:view].current_user end end field :password do visible do bindings[:object] == bindings[:view].current_user end end end end config.model 'Keyword' do weight 1 end config.model 'Area' do weight 1 end config.model 'Contact' do weight 2 end config.model 'Subscription' do weight 2 end config.model 'UpdateRequest' do weight 2 end config.model 'ContactPersonOffer' do weight 3 end config.model 'OrganizationConnection' do weight 3 end config.model 'SearchLocation' do weight 3 field :query do read_only true end field :latitude do read_only true end field :longitude do read_only true end end end changes for #420 TODO: carat_base update und dann bundle_install bzw. bundle_update carat_base require_relative '../../lib/rails_admin_extensions/rails_admin_change_state.rb' RailsAdmin.config do |config| ### Popular gems integration ## == Devise == config.authenticate_with do warden.authenticate! scope: :user end config.current_user_method(&:current_user) ## == Auth == # config.authorize_with do |req| # redirect_to main_app.root_path unless current_user.try(:admin?) # if req.action_name == 'statistics' && current_user.role != 'superuser' # redirect_to dashboard_path # end # end config.authorize_with :cancan, Ability config.current_user_method &:current_user # config.excluded_models = ['AgeFilter', 'FederalState', # 'OrganizationConnection', 'Filter'] ## == PaperTrail == config.audit_with :paper_trail, 'User', 'PaperTrail::Version' # PaperTrail >= 3.0.0 ### More at https://github.com/sferik/rails_admin/wiki/Base-configuration config.included_models = %w( Organization Website Location FederalState Offer Opening Category Email UpdateRequest LanguageFilter User Contact Keyword Definition Note Area OrganizationConnection SearchLocation ContactPerson Subscription ) config.actions do dashboard # mandatory index # mandatory new do except ['User', 'FederalState'] end export bulk_delete do except ['User', 'FederalState'] end show edit delete do except ['User', 'FederalState'] end show_in_app clone # nested_set do # only ['Category'] # end nestable do only ['Category'] end change_state ## With an audit adapter, you can add: history_index history_show end config.model 'Organization' do list do field :offers_count field :name field :renewed field :aasm_state field :creator field :locations_count field :created_by sort_by :offers_count end weight(-3) field :name field :description do css_class 'js-count-character' end field :comment do css_class 'js-count-character' end field :locations field :legal_form field :charitable field :accredited_institution field :founded field :umbrella field :parents field :children field :slug do read_only do bindings[:object].new_record? end end field :websites field :mailings_enabled field :renewed field :aasm_state do read_only true help false end # Hidden fields edit do field :created_by, :hidden do visible do bindings[:object].new_record? end default_value do bindings[:view]._current_user.id end end end show do field :offers field :locations field :created_by field :approved_by end clone_config do custom_method :partial_dup end export do field :id end end config.label_methods << :url config.model 'Website' do field :host field :url show do field :offers field :organizations end end config.label_methods << :display_name config.model 'Location' do list do field :name field :organization field :zip field :federal_state field :display_name end weight(-2) field :organization field :name field :street field :addition field :zip field :city field :area_code field :local_number field :email field :federal_state do inline_add false inline_edit false end field :hq field :latitude do read_only true end field :longitude do read_only true end show do field :offers field :display_name end export do field :id end object_label_method :display_name end config.model 'FederalState' do weight 2 list do field :id do sort_reverse false end field :name end end config.model 'Offer' do weight(-1) list do field :name field :section_filters field :location field :renewed field :aasm_state field :creator field :expires_at field :approved_at field :organizations do searchable :name end field :created_by end field :section_filters field :name do css_class 'js-category-suggestions__trigger' end field :description do css_class 'js-count-character' end field :comment do css_class 'js-count-character' end field :notes field :next_steps do css_class 'js-count-character' end field :legal_information field :contact_people field :encounter field :slug do read_only do bindings[:object].new_record? end end field :location field :area field :organizations do help do 'Required. Only approved organizations.' end end field :categories do css_class 'js-category-suggestions' end field :language_filters field :exclusive_gender do help do 'Optional. Leer bedeutet, dass das Angebot alle Geschlechter bedient.' end end field :target_audience_filters do help do 'Richtet sich das Angebot direkt an das Kind, oder an Erwachsene wie z.B. die Eltern, einen Nachbarn oder einen Lotsen' end end field :age_from field :age_to field :openings field :opening_specification do help do 'Bitte einigt euch auf eine einheitliche Ausdrucksweise. Wie etwa "jeden 1. Montag im Monat" oder "jeden 2. Freitag". Sagt mir (Konstantin) auch gern bescheid, wenn ihr ein einheitliches Format gefunden habt, mit dem alle Fälle abgedeckt werden können.' end end field :websites field :keywords do inverse_of :offers end field :expires_at field :renewed field :aasm_state do read_only true help false end # Hidden fields edit do field :created_by, :hidden do visible do bindings[:object].new_record? end default_value do bindings[:view]._current_user.id end end end show do field :created_at do strftime_format "%d. %B %Y" end field :created_by field :approved_by end clone_config do custom_method :partial_dup end export do field :id end end config.model 'ContactPerson' do object_label_method :display_name list do field :id field :first_name field :last_name field :organization field :offers field :email_address field :operational_name field :local_number_1 field :local_number_2 end field :gender field :academic_title field :first_name field :last_name field :operational_name do help do "Falls es sich nicht um einen persönlichen Ansprechpartner handelt hier z.B. 'Zentrale' eintragen" end end field :responsibility do help do "Z.b. 'Zuständig für alle Anfragen von Menschen deren Nachname mit den Buchstaben A-M anfangen'" end end field :area_code_1 field :local_number_1 field :area_code_2 field :local_number_2 field :fax_area_code field :fax_number field :email field :organization field :offers field :spoc do help do "Single Point of Contact / Zentrale Anlaufstelle." end end export do field :id end clone_config do custom_method :partial_dup end show do field :referencing_notes end end config.model 'Opening' do field :day do help do 'Required. Wenn weder "Open" noch "Close" angegeben werden, bedeutet das an diesem Tag "nach Absprache".' end end field :open do help do 'Required if "Close" given.' end end field :close do help do 'Required if "Open" given.' end end field :name do visible false end list do sort_by :sort_value field :sort_value do sort_reverse false visible false end end end config.model 'Category' do field :name field :section_filters field :parent field :sort_order object_label_method :name_with_world_suffix_and_optional_asterisk list do sort_by :name end show do field :offers field :icon end # nested_set(max_depth: 5) nestable_tree(max_depth: 5) end config.model 'Definition' do field :key field :explanation object_label_method :key end config.model 'Email' do field :address field :aasm_state do read_only true help false end list do field :contact_people end show do field :contact_people end object_label_method :address end config.model 'Note' do list do field :text field :topic field :user field :created_at field :notable field :referencable end edit do field :notable field :text field :topic field :referencable field :user_id, :hidden do default_value do bindings[:view]._current_user.id end end end nested do field :notable do visible false end field :text do read_only do !bindings[:object].new_record? end end field :topic do read_only do !bindings[:object].new_record? end end field :referencable do read_only do !bindings[:object].new_record? end end end update do field :id do read_only true end field :text do read_only true end field :topic do read_only true end field :user do read_only true end field :notable do read_only true end field :user_id do read_only true visible false end field :referencable end end config.model 'Filter' do weight 1 list do field :id field :name field :identifier field :offers end end config.model 'LanguageFilter' do parent Filter end config.model 'AgeFilter' do parent Filter end config.model 'Target_AudienceFilter' do parent Filter end config.model 'User' do weight 1 list do field :id field :name field :email field :role field :created_at field :updated_at end edit do field :name do read_only do bindings[:object] != bindings[:view].current_user end end field :email do read_only do bindings[:object] != bindings[:view].current_user end end field :password do visible do bindings[:object] == bindings[:view].current_user end end end end config.model 'Keyword' do weight 1 end config.model 'Area' do weight 1 end config.model 'Contact' do weight 2 end config.model 'Subscription' do weight 2 end config.model 'UpdateRequest' do weight 2 end config.model 'ContactPersonOffer' do weight 3 end config.model 'OrganizationConnection' do weight 3 end config.model 'SearchLocation' do weight 3 field :query do read_only true end field :latitude do read_only true end field :longitude do read_only true end end end
Pod::Spec.new do |s| s.name = "UIColor-Hex" s.version = "1.1.0" s.summary = "Initializes the UIColor using hexadecimal." s.homepage = "http://github.com/nakajijapan" s.license = 'MIT' s.author = { "nakajijapan" => "pp.kupepo.gattyanmo@gmail.com" } s.source = { :git => "https://github.com/nakajijapan/UIColor-Hex.git", :tag => s.version.to_s } s.social_media_url = 'https://twitter.com/nakajijapan' s.requires_arc = true s.source_files = 'Classes' s.osx.exclude_files = 'Classes/ios' s.platform = :ios, '7.1' s.ios.deployment_target = '8.0' end v1.1.1 Pod::Spec.new do |s| s.name = "UIColor-Hex" s.version = "1.1.1" s.summary = "Initializes the UIColor using hexadecimal." s.homepage = "http://github.com/nakajijapan" s.license = 'MIT' s.author = { "nakajijapan" => "pp.kupepo.gattyanmo@gmail.com" } s.source = { :git => "https://github.com/nakajijapan/UIColor-Hex.git", :tag => s.version.to_s } s.social_media_url = 'https://twitter.com/nakajijapan' s.requires_arc = true s.source_files = 'Classes' s.osx.exclude_files = 'Classes/ios' s.platform = :ios, '7.1' s.ios.deployment_target = '8.0' end
Pod::Spec.new do |s| s.name = "UIColorRGBA" s.version = "0.4" s.summary = "UIColor extension" s.description = "Provides convenience UIColor method for setting color from HEX string in Swift" s.homepage = "https://github.com/maximbilan/UIColorRGBA" s.license = { :type => "MIT" } s.author = { "Maxim Bilan" => "maximb.mail@gmail.com" } s.platform = :ios, "8.0" s.source = { :git => "https://github.com/maximbilan/UIColorRGBA.git", :tag => s.version.to_s } s.source_files = "Source", "*.{swift}" s.requires_arc = true end Updated podspec Pod::Spec.new do |s| s.name = "UIColorRGBA" s.version = "0.4.1" s.summary = "UIColor extension" s.description = "Provides a convenience UIColor method for the setting color from a HEX string in Swift" s.homepage = "https://github.com/maximbilan/UIColorRGBA" s.license = { :type => "MIT" } s.author = { "Maxim Bilan" => "maximb.mail@gmail.com" } s.platform = :ios, "8.0" s.source = { :git => "https://github.com/maximbilan/UIColorRGBA.git", :tag => s.version.to_s } s.source_files = "Source", "*.{swift}" s.requires_arc = true end
# encoding: UTF-8 module Spontaneous class Site < Spontaneous::Facet include Plugins::Site::Instance include Plugins::Site::Publishing include Plugins::Site::State include Plugins::Site::Selectors include Plugins::Site::Map include Plugins::Site::Search include Plugins::Site::Features include Plugins::Site::Schema include Plugins::Site::Level include Plugins::Site::Storage include Plugins::Site::URL include Plugins::Site::Hooks include Plugins::Site::Paths include Plugins::Site::Helpers attr_accessor :database attr_reader :environment, :mode def initialize(root, env, mode) super(root) @environment, @mode = env, mode end def initialize! load_config! connect_to_database! find_plugins! load_facets! init_facets! init_indexes! end def init_facets! facets.each do |facet| facet.init! end end def init_indexes! facets.each do |facet| facet.load_indexes! end end def load_facets! load_order.each do |category| facets.each { |facet| facet.load_files(category) } end end def reload! schema.reload! facets.each { |facet| facet.reload_all! } schema.validate! end def connect_to_database! self.database = Sequel.connect(db_settings) self.database.logger = logger if config.log_queries end def db_settings config_dir = paths.expanded(:config).first @db_settings = YAML.load_file(File.join(config_dir, "database.yml")) self.config.db = @db_settings[environment] end def config @config ||= Spontaneous::Config.new(environment, mode) end def find_plugins! paths.expanded(:plugins).each do |glob| Dir[glob].each do |path| load_plugin(path) end end end def plugins @plugins ||= [] end def facets [self] + plugins end def load_plugin(plugin_root) plugin = Spontaneous::Application::Plugin.new(plugin_root) self.plugins << plugin plugin end # used by publishing mechanism to place files into the appropriate subdirectories # in the public folder. # Site#file_namespace returns nil so that it's files are placed at the root def file_namespace nil end def revision_root @revision_dir ||= Pathname.new(@root / 'cache/revisions').realpath.to_s end def revision_dir(revision=nil, root = revision_root) root ||= revision_root return root / 'current' if revision.nil? root / Spontaneous::Paths.pad_revision_number(revision) end def media_dir(*path) media_root = root / "cache/media" return media_root if path.empty? File.join(media_root, *path) end def cache_dir(*path) cache_root = root / "cache" return cache_root if path.empty? File.join(cache_root, *path) end end end Make the cache path if it doesn't exist # encoding: UTF-8 module Spontaneous class Site < Spontaneous::Facet include Plugins::Site::Instance include Plugins::Site::Publishing include Plugins::Site::State include Plugins::Site::Selectors include Plugins::Site::Map include Plugins::Site::Search include Plugins::Site::Features include Plugins::Site::Schema include Plugins::Site::Level include Plugins::Site::Storage include Plugins::Site::URL include Plugins::Site::Hooks include Plugins::Site::Paths include Plugins::Site::Helpers attr_accessor :database attr_reader :environment, :mode def initialize(root, env, mode) super(root) @environment, @mode = env, mode end def initialize! load_config! connect_to_database! find_plugins! load_facets! init_facets! init_indexes! end def init_facets! facets.each do |facet| facet.init! end end def init_indexes! facets.each do |facet| facet.load_indexes! end end def load_facets! load_order.each do |category| facets.each { |facet| facet.load_files(category) } end end def reload! schema.reload! facets.each { |facet| facet.reload_all! } schema.validate! end def connect_to_database! self.database = Sequel.connect(db_settings) self.database.logger = logger if config.log_queries end def db_settings config_dir = paths.expanded(:config).first @db_settings = YAML.load_file(File.join(config_dir, "database.yml")) self.config.db = @db_settings[environment] end def config @config ||= Spontaneous::Config.new(environment, mode) end def find_plugins! paths.expanded(:plugins).each do |glob| Dir[glob].each do |path| load_plugin(path) end end end def plugins @plugins ||= [] end def facets [self] + plugins end def load_plugin(plugin_root) plugin = Spontaneous::Application::Plugin.new(plugin_root) self.plugins << plugin plugin end # used by publishing mechanism to place files into the appropriate subdirectories # in the public folder. # Site#file_namespace returns nil so that it's files are placed at the root def file_namespace nil end def revision_root @revision_dir ||= begin path = Pathname.new(@root / 'cache/revisions') path.mkpath unless path.exist? path.realpath.to_s end end def revision_dir(revision=nil, root = revision_root) root ||= revision_root return root / 'current' if revision.nil? root / Spontaneous::Paths.pad_revision_number(revision) end def media_dir(*path) media_root = root / "cache/media" return media_root if path.empty? File.join(media_root, *path) end def cache_dir(*path) cache_root = root / "cache" return cache_root if path.empty? File.join(cache_root, *path) end end end
#!/usr/bin/env ruby require 'optparse' require_relative 'prepare_training_data.rb' user_args = {} opt_parser = OptionParser.new do |opt| opt.banner = "Usage example: ruby start.rb -p 1 to run with Matlab preprocessing" opt.separator "" opt.on("-p", "--preprocess g", Integer, "Preprocessing mode: Runs preprocessing if 1 otherwise not.") do |preprocess| user_args[:preprocess] = preprocess end end begin opt_parser.parse! rescue OptionParser::MissingArgument puts "Incorrect input argument(s) passed\n" puts opt_parser.help exit end # Run preprocessing. matlab_file = "preprocessing.m" run_matlab = "matlab -nodisplay -nosplash -nodesktop -r \"run('${PWD}/#{matlab_file}'); quit\"" system(run_matlab) if user_args[:preprocess] == 1 # Determine number of features retrieved in preprocessing. begin file = File.open("mnist.train.txt", "r") line = file.gets feature_size = line.split(',').count-1 file.close rescue raise 'Somethings wrong with input file, try to run with argument -p 1 to generate features' end training_samples = [20, 100, 1000, 2000, 5000, 10000] training_samples.each do |samples| PrepareTraningData.new("mnist.train.txt", samples) end number_neurons = [5, 10, 50, 100, 200, 500, 784, 1000] number_epochs = [5, 10, 50, 100] learning_rates = [0.0001, 0.001, 0.01, 0.1] training_data_file_idxs = [6, 12, 30, 60, 600, 3000] parameters_list = [number_neurons, number_epochs, learning_rates, training_data_file_idxs] parameters_list = parameters_list.first.product(*parameters_list[1..-1]) # Train neuronal network. parameters_list.each do |parameters| nn = parameters[0] e = parameters[1] lr = parameters[2] training_data_file_idx = parameters[3] puts "Parameters: nn=#{nn} e=#{e} lr=#{lr} training_data_file_idx=#{training_data_file_idx}" filename = "a_SIGMOID_f_#{feature_size}_n_#{nn}_o_10_l_#{lr}_e_#{e}_td_#{training_data_file_idx}.txt" unless File.exist?(filename) command = "java -jar -Xmx512m nn.jar -a SIGMOID -f #{feature_size} -n #{nn} -o 10 -l #{lr} -e #{e} mnist.train.#{training_data_file_idx}.txt mnist.test.txt mnist.train.output.txt mnist.test.output.txt > #{filename}" system(command) else puts "Skipping file #{filename} - it already exists!" end end Adatped input parameter values #!/usr/bin/env ruby require 'optparse' require_relative 'prepare_training_data.rb' user_args = {} opt_parser = OptionParser.new do |opt| opt.banner = "Usage example: ruby start.rb -p 1 to run with Matlab preprocessing" opt.separator "" opt.on("-p", "--preprocess g", Integer, "Preprocessing mode: Runs preprocessing if 1 otherwise not.") do |preprocess| user_args[:preprocess] = preprocess end end begin opt_parser.parse! rescue OptionParser::MissingArgument puts "Incorrect input argument(s) passed\n" puts opt_parser.help exit end # Run preprocessing. matlab_file = "preprocessing.m" run_matlab = "matlab -nodisplay -nosplash -nodesktop -r \"run('${PWD}/#{matlab_file}'); quit\"" system(run_matlab) if user_args[:preprocess] == 1 # Determine number of features retrieved in preprocessing. begin file = File.open("mnist.train.txt", "r") line = file.gets feature_size = line.split(',').count-1 file.close rescue raise 'Somethings wrong with input file, try to run with argument -p 1 to generate features' end training_samples = [20, 100, 1000, 2000, 5000, 10000] training_samples.each do |samples| PrepareTraningData.new("mnist.train.txt", samples) end number_neurons = [5, 10, 50, 100, 200, 500, 784, 1000, 2*784] number_epochs = [100] learning_rates = [0.0001, 0.001, 0.01, 0.1] training_data_file_idxs = [6, 12, 30, 60, 600, 3000] parameters_list = [number_neurons, number_epochs, learning_rates, training_data_file_idxs] parameters_list = parameters_list.first.product(*parameters_list[1..-1]) # Train neuronal network. parameters_list.each do |parameters| nn = parameters[0] e = parameters[1] lr = parameters[2] training_data_file_idx = parameters[3] puts "Parameters: nn=#{nn} e=#{e} lr=#{lr} training_data_file_idx=#{training_data_file_idx}" filename = "a_SIGMOID_f_#{feature_size}_n_#{nn}_o_10_l_#{lr}_e_#{e}_td_#{training_data_file_idx}.txt" unless File.exist?(filename) command = "java -jar -Xmx512m nn.jar -a SIGMOID -f #{feature_size} -n #{nn} -o 10 -l #{lr} -e #{e} mnist.train.#{training_data_file_idx}.txt mnist.test.txt mnist.train.output.txt mnist.test.output.txt > #{filename}" system(command) else puts "Skipping file #{filename} - it already exists!" end end
#!/usr/bin/env ruby require 'optparse' require_relative 'prepare_training_data.rb' user_args = {} opt_parser = OptionParser.new do |opt| opt.banner = "Usage example: ruby start.rb -p 1 to run with Matlab preprocessing" opt.separator "" opt.on("-p", "--preprocess g", Integer, "Preprocessing mode: Runs preprocessing if 1 otherwise not.") do |preprocess| user_args[:preprocess] = preprocess end end begin opt_parser.parse! rescue OptionParser::MissingArgument puts "Incorrect input argument(s) passed\n" puts opt_parser.help exit end # Run preprocessing. matlab_file = "preprocessing.m" run_matlab = "matlab -nodisplay -nosplash -nodesktop -r \"run('${PWD}/#{matlab_file}'); quit\"" system(run_matlab) if user_args[:preprocess] == 1 # Determine number of features retrieved in preprocessing. begin file = File.open("mnist.train.txt", "r") line = file.gets feature_size = line.split(',').count-1 file.close rescue raise 'Somethings wrong with input file, try to run with argument -p 1 to generate features' end training_samples = [20, 100, 1000, 2000, 5000, 10000] training_samples.each do |samples| PrepareTraningData.new("mnist.train.txt", samples) end number_neurons = [5, 10, 50, 100, 200, 500, 784, 1000, 2*784] number_epochs = [100] learning_rates = [0.0001, 0.001, 0.01, 0.1] training_data_file_idxs = [6, 12, 30, 60, 600, 3000] parameters_list = [number_neurons, number_epochs, learning_rates, training_data_file_idxs] parameters_list = parameters_list.first.product(*parameters_list[1..-1]) # Train neuronal network. parameters_list.each do |parameters| nn = parameters[0] e = parameters[1] lr = parameters[2] training_data_file_idx = parameters[3] puts "Parameters: nn=#{nn} e=#{e} lr=#{lr} training_data_file_idx=#{training_data_file_idx}" filename = "a_SIGMOID_f_#{feature_size}_n_#{nn}_o_10_l_#{lr}_e_#{e}_td_#{training_data_file_idx}.txt" unless File.exist?(filename) command = "java -jar -Xmx512m nn.jar -a SIGMOID -f #{feature_size} -n #{nn} -o 10 -l #{lr} -e #{e} mnist.train.#{training_data_file_idx}.txt mnist.test.txt mnist.train.output.txt mnist.test.output.txt > #{filename}" system(command) else puts "Skipping file #{filename} - it already exists!" end end storing generated output files in output folder #!/usr/bin/env ruby require 'optparse' require_relative 'prepare_training_data.rb' user_args = {} opt_parser = OptionParser.new do |opt| opt.banner = "Usage example: ruby start.rb -p 1 to run with Matlab preprocessing" opt.separator "" opt.on("-p", "--preprocess g", Integer, "Preprocessing mode: Runs preprocessing if 1 otherwise not.") do |preprocess| user_args[:preprocess] = preprocess end end begin opt_parser.parse! rescue OptionParser::MissingArgument puts "Incorrect input argument(s) passed\n" puts opt_parser.help exit end # Run preprocessing. matlab_file = "preprocessing.m" run_matlab = "matlab -nodisplay -nosplash -nodesktop -r \"run('${PWD}/#{matlab_file}'); quit\"" system(run_matlab) if user_args[:preprocess] == 1 # Determine number of features retrieved in preprocessing. begin file = File.open("mnist.train.txt", "r") line = file.gets feature_size = line.split(',').count-1 file.close rescue raise 'Somethings wrong with input file, try to run with argument -p 1 to generate features' end training_samples = [20, 100, 1000, 2000, 5000, 10000] training_samples.each do |samples| PrepareTraningData.new("mnist.train.txt", samples) end number_neurons = [5, 10, 50, 100, 200, 500, 784, 1000, 2*784] number_epochs = [100] learning_rates = [0.0001, 0.001, 0.01, 0.1] training_data_file_idxs = [6, 12, 30, 60, 600, 3000] parameters_list = [number_neurons, number_epochs, learning_rates, training_data_file_idxs] parameters_list = parameters_list.first.product(*parameters_list[1..-1]) # Train neuronal network. parameters_list.each do |parameters| nn = parameters[0] e = parameters[1] lr = parameters[2] training_data_file_idx = parameters[3] puts "Parameters: nn=#{nn} e=#{e} lr=#{lr} training_data_file_idx=#{training_data_file_idx}" filename = "a_SIGMOID_f_#{feature_size}_n_#{nn}_o_10_l_#{lr}_e_#{e}_td_#{training_data_file_idx}.txt" file_path_name = "output/#{filename}" unless File.exist?(file_path_name) command = "java -jar -Xmx512m nn.jar -a SIGMOID -f #{feature_size} -n #{nn} -o 10 -l #{lr} -e #{e} mnist.train.#{training_data_file_idx}.txt mnist.test.txt mnist.train.output.txt mnist.test.output.txt > #{file_path_name}" system(command) else puts "Skipping file #{filename} - it already exists!" end end
# BioCatalogue: app/helpers/api_helper.rb # # Copyright (c) 2008-2009, University of Manchester, The European Bioinformatics # Institute (EMBL-EBI) and the University of Southampton. # See license.txt for details. module ApiHelper def xml_root_attributes { "xmlns" => "http://www.sysmo-db.org/2009/xml/rest", "xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation" => "http://www.sysmo-db.org/2009/xml/rest/schema-v1.xsd", "xmlns:xlink" => "http://www.w3.org/1999/xlink", "xmlns:dc" => "http://purl.org/dc/elements/1.1/", "xmlns:dcterms" => "http://purl.org/dc/terms/" } end def uri_for_path(path, *args) Sysmo::Api.uri_for_path(path, *args) end def uri_for_collection(resource_name, *args) Sysmo::Api.uri_for_collection(resource_name, *args) end def uri_for_object(resource_obj, *args) Sysmo::Api.uri_for_object(resource_obj, *args) end # def xml_for_filters(builder, filters, filter_key, results_resource_type) # return nil if builder.nil? or filters.blank? # # filter_key_humanised = Sysmo::Filtering.filter_type_to_display_name(filter_key).singularize.downcase # # filters.each do |f| # # attribs = xlink_attributes(generate_include_filter_url(filter_key, f["id"], results_resource_type.underscore), :title => xlink_title("Filter by #{filter_key_humanised}: '#{f['name']}'")) # attribs.update({ # :urlValue => f["id"], # :name => f["name"], # :count => f['count'], # :resourceType => results_resource_type # }) # # builder.filter attribs do # # xml_for_filters(builder, f["children"], filter_key, results_resource_type) # # end # # end # end def previous_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Previous page of results")) end def next_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Next page of results")) end def xlink_attributes(resource_uri, *args) attribs = { } attribs_in = args.extract_options! attribs["xlink:href"] = resource_uri attribs_in.each do |k,v| attribs["xlink:#{k.to_s}"] = v end return attribs end def xlink_title(item, item_type_name=nil) case item when String return item else if item_type_name.blank? item_type_name = case item when User "User" else item.class.name.titleize end end return "#{item_type_name} - #{display_name(item, false)}" end end def display_name item,escape_html=false result = item.title if item.respond_to?("title") result = item.name if item.respond_to?("name") && result.nil? result = h(result) if escape_html return result end def dc_xml_tag(builder, term, value, *attributes) builder.tag! "dc:#{term}", value, attributes end def dcterms_xml_tag(builder, term, value, *attributes) # For dates... if [ :created, :modified, "created", "modified" ].include?(term) value = value.iso8601 end builder.tag! "dcterms:#{term}", value, attributes end def core_xml builder,object dc_core_xml builder,object builder.tag! "uuid",object.uuid if object.respond_to?("uuid") end def dc_core_xml builder,object dc_xml_tag builder,:title,object.title if object.respond_to?("title") dc_xml_tag builder,:description,object.description if object.respond_to?("description") dcterms_xml_tag builder,:created,object.created_at if object.respond_to?("created_at") dcterms_xml_tag builder,:modified,object.updated_at if object.respond_to?("updated_at") end end added creator/submitter info to core_xml # BioCatalogue: app/helpers/api_helper.rb # # Copyright (c) 2008-2009, University of Manchester, The European Bioinformatics # Institute (EMBL-EBI) and the University of Southampton. # See license.txt for details. module ApiHelper def xml_root_attributes { "xmlns" => "http://www.sysmo-db.org/2009/xml/rest", "xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation" => "http://www.sysmo-db.org/2009/xml/rest/schema-v1.xsd", "xmlns:xlink" => "http://www.w3.org/1999/xlink", "xmlns:dc" => "http://purl.org/dc/elements/1.1/", "xmlns:dcterms" => "http://purl.org/dc/terms/" } end def uri_for_path(path, *args) Sysmo::Api.uri_for_path(path, *args) end def uri_for_collection(resource_name, *args) Sysmo::Api.uri_for_collection(resource_name, *args) end def uri_for_object(resource_obj, *args) Sysmo::Api.uri_for_object(resource_obj, *args) end # def xml_for_filters(builder, filters, filter_key, results_resource_type) # return nil if builder.nil? or filters.blank? # # filter_key_humanised = Sysmo::Filtering.filter_type_to_display_name(filter_key).singularize.downcase # # filters.each do |f| # # attribs = xlink_attributes(generate_include_filter_url(filter_key, f["id"], results_resource_type.underscore), :title => xlink_title("Filter by #{filter_key_humanised}: '#{f['name']}'")) # attribs.update({ # :urlValue => f["id"], # :name => f["name"], # :count => f['count'], # :resourceType => results_resource_type # }) # # builder.filter attribs do # # xml_for_filters(builder, f["children"], filter_key, results_resource_type) # # end # # end # end def previous_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Previous page of results")) end def next_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Next page of results")) end def xlink_attributes(resource_uri, *args) attribs = { } attribs_in = args.extract_options! attribs["xlink:href"] = resource_uri attribs_in.each do |k,v| attribs["xlink:#{k.to_s}"] = v end return attribs end def xlink_title(item, item_type_name=nil) case item when String return item else if item_type_name.blank? item_type_name = case item when User "User" else item.class.name.titleize end end return "#{item_type_name} - #{display_name(item, false)}" end end def display_name item,escape_html=false result = item.title if item.respond_to?("title") result = item.name if item.respond_to?("name") && result.nil? result = h(result) if escape_html return result end def dc_xml_tag(builder, term, value, *attributes) builder.tag! "dc:#{term}", value, attributes end def dcterms_xml_tag(builder, term, value, *attributes) # For dates... if [ :created, :modified, "created", "modified" ].include?(term) value = value.iso8601 end builder.tag! "dcterms:#{term}", value, attributes end def core_xml builder,object dc_core_xml builder,object builder.tag! "uuid",object.uuid if object.respond_to?("uuid") submitter = determine_submitter object builder.tag! "submitter",submitter.name,xlink_attributes(uri_for_object(submitter),:resourceType => submitter.class.name) if submitter end def dc_core_xml builder,object submitter = determine_submitter object dc_xml_tag builder,:title,object.title if object.respond_to?("title") dc_xml_tag builder,:description,object.description if object.respond_to?("description") dc_xml_tag builder,:creator,submitter.name if submitter dcterms_xml_tag builder,:created,object.created_at if object.respond_to?("created_at") dcterms_xml_tag builder,:modified,object.updated_at if object.respond_to?("updated_at") end def determine_submitter object return object.owner if object.respond_to?("owner") return object.contributor if object.respond_to?("contributor") return nil end end
# BioCatalogue: app/helpers/api_helper.rb # # Copyright (c) 2008-2009, University of Manchester, The European Bioinformatics # Institute (EMBL-EBI) and the University of Southampton. # See license.txt for details. module ApiHelper def xml_root_attributes { "xmlns" => "http://www.sysmo-db.org/2010/xml/rest", "xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation" => "http://www.sysmo-db.org/2010/xml/rest/schema-v1.xsd", "xmlns:xlink" => "http://www.w3.org/1999/xlink", "xmlns:dc" => "http://purl.org/dc/elements/1.1/", "xmlns:dcterms" => "http://purl.org/dc/terms/" } end def uri_for_path(path, *args) Seek::Api.uri_for_path(path, *args) end def api_partial_path_for_item object Seek::Api.api_partial_path_for_item(object) end def uri_for_collection(resource_name, *args) Seek::Api.uri_for_collection(resource_name, *args) end def uri_for_object(resource_obj, *args) Seek::Api.uri_for_object(resource_obj, *args) end def previous_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Previous page of results")) end def next_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Next page of results")) end def core_xlink object,include_title=true if (object.class.name.include?("::Version")) xlink=xlink_attributes(uri_for_object(object.parent,{:params=>{:version=>object.version}})) else xlink=xlink_attributes(uri_for_object(object)) end xlink["xlink:title"]=xlink_title(object) unless !include_title || display_name(object,false).nil? xlink["id"]=object.id xlink["uuid"]=object.uuid if object.respond_to?("uuid") xlink["resourceType"] = object.class.name.include?("::Version") ? object.parent.class.name : object.class.name return xlink end #requires a slightly different handling to core_xlink because the route is nested def avatar_xlink avatar return {"xsi:nil"=>"true"} if avatar.nil? uri=uri_for_object(avatar.owner) uri="#{uri}/avatars/#{avatar.id}" xlink=xlink_attributes(uri) xlink["id"]=avatar.id xlink["resourceType"]=avatar.class.name return xlink end def xlink_attributes(resource_uri, *args) attribs = { } attribs_in = args.extract_options! attribs["xlink:href"] = resource_uri attribs_in.each do |k,v| attribs["xlink:#{k.to_s}"] = v end return attribs end def xlink_title(item, item_type_name=nil) case item when String return item else if item_type_name.blank? item_type_name = case item when User "User" else if (item.class.name.include?("::Version")) item.parent.class.name else item.class.name end end end #return "#{item_type_name} - #{display_name(item, false)}" return "#{display_name(item, false)}" end end def display_name item,escape_html=false result = nil result = item.title if item.respond_to?("title") result = item.name if item.respond_to?("name") && result.nil? result = h(result) if escape_html && !result.nil? return result end def dc_xml_tag(builder, term, value, *attributes) builder.tag! "dc:#{term}", value, attributes end def dcterms_xml_tag(builder, term, value, *attributes) # For dates... if [ :created, :modified, "created", "modified" ].include?(term) value = value.iso8601 end builder.tag! "dcterms:#{term}", value, attributes end def core_xml builder,object builder.tag! "id",object.id dc_core_xml builder,object builder.tag! "uuid",object.uuid if object.respond_to?("uuid") end def extended_xml builder,object submitter = determine_submitter object builder.tag! "submitter" do api_partial(builder,submitter) end if submitter builder.tag! "organisms" do organisms=[] organisms=object.organisms if object.respond_to?("organisms") organisms << object.organism if object.respond_to?("organism") && object.organism api_partial_collection builder,organisms end if object.respond_to?("organism") || object.respond_to?("organisms") builder.tag! "creators" do api_partial_collection builder,(object.creators || []) end if object.respond_to?("creators") unless hide_contact_details? builder.tag! "email",object.email if object.respond_to?("email") builder.tag! "webpage",object.webpage if object.respond_to?("webpage") builder.tag! "internal_webpage",object.internal_webpage if object.respond_to?("internal_webpage") builder.tag! "phone",object.phone if object.respond_to?("phone") end builder.tag! "bioportal_concepts" do concepts=[] concepts=object.bioportal_concepts if object.respond_to?("bioportal_concepts") concepts << object.bioportal_concept if object.respond_to?("bioportal_concept") concepts.compact.each do |concept| builder.tag! "bioportal_concept" do builder.tag! "ontology_id",concept.ontology_id builder.tag! "ontology_version_id",concept.ontology_version_id builder.tag! "concept_id",concept.concept_uri end end end if object.respond_to?("bioportal_concept") || object.respond_to?("bioportal_concepts") builder.tag! "content_type",object.content_type if object.respond_to?("content_type") builder.tag! "version",object.version if object.respond_to?("version") builder.tag! "revision_comments",object.revision_comments if object.respond_to?("revision_comments") builder.tag! "latest_version",core_xlink(object.latest_version) if object.respond_to?("latest_version") if (object.respond_to?("versions")) builder.tag! "versions" do object.versions.each do |v| builder.tag! "version",core_xlink(v) end end end policy_xml builder,object if try_block{current_user.person.is_admin?} && object.respond_to?("policy") blob_xml builder,object.content_blob if object.respond_to?("content_blob") if object.respond_to?("avatar") builder.tag! "avatars" do builder.tag! "avatar",avatar_xlink(object.avatar) unless object.avatar.nil? end end end def policy_xml builder,asset policy = asset.policy unless policy.nil? builder.tag! "policy" do dc_core_xml builder,policy builder.tag! "sharing_scope",policy.sharing_scope builder.tag! "access_type",policy.access_type builder.tag! "use_blacklist",policy.use_blacklist ? policy.use_blacklist : false builder.tag! "use_whitelist",policy.use_whitelist ? policy.use_whitelist : false builder.tag! "permissions" do policy.permissions.select{|p| p.contributor_type!="FavouriteGroup"}.each do |permission| builder.tag! "permission" do dc_core_xml builder,permission builder.tag! "contributor",core_xlink(permission.contributor) builder.tag! "access_type",permission.access_type end end end end else builder.tag! "policy",{"xsi:nil"=>"true"} end end def resource_xml builder,resource builder.tag! "resource",core_xlink(resource) end def blob_xml builder,blob builder.tag! "blob",core_xlink(blob) do builder.tag! "uuid",blob.uuid builder.tag! "md5sum",blob.md5sum builder.tag! "url",blob.url builder.tag! "is_remote",!blob.file_exists? end end def assets_list_xml builder,assets,tag="assets",include_core=true,include_resource=true builder.tag! tag do assets.each do |asset| asset_xml builder,asset,include_core,include_resource end end end def associated_resources_xml builder, object #FIXME: this needs fixing, with some refactoring of the version->asset linkage - see http://www.mygrid.org.uk/dev/issues/browse/SYSMO-362 object=object.parent if (object.class.name.include?("::Version")) associated = get_related_resources object builder.tag! "associated" do associated.keys.each do |key| attr={} attr[:total]=associated[key][:items].count if (associated[key][:hidden_count]) attr[:total]=attr[:total]+associated[key][:hidden_count] attr[:hidden_count]=associated[key][:hidden_count] end generic_list_xml(builder, associated[key][:items],key.underscore.pluralize,attr) end end end def generic_list_xml builder,list,tag,attr={} builder.tag! tag,attr do list.each do |item| if (item.class.name.include?("::Version")) #versioned items need to be handled slightly differently. parent=item.parent builder.tag! parent.class.name.underscore,core_xlink(item) else builder.tag! item.class.name.underscore,core_xlink(item) end end end end def api_partial builder,object, is_root=false parent_object = object.class.name.include?("::Version") ? object.parent : object path=api_partial_path_for_item(parent_object) classname=parent_object.class.name.underscore render :partial=>path,:locals=>{:parent_xml => builder,:is_root=>is_root,classname.to_sym=>object} end def api_partial_collection builder,objects,is_root=false objects.each{|o| api_partial builder,o,is_root} end def parent_child_elements builder,object builder.tag! "parents" do api_partial_collection(builder, object.parents, is_root = false) end builder.tag! "children" do api_partial_collection(builder, object.children, is_root = false) end end def dc_core_xml builder,object submitter = determine_submitter object dc_xml_tag builder,:title,object.title if object.respond_to?("title") dc_xml_tag builder,:description,object.description if object.respond_to?("description") dcterms_xml_tag builder,:created,object.created_at if object.respond_to?("created_at") dcterms_xml_tag builder,:modified,object.updated_at if object.respond_to?("updated_at") dc_xml_tag builder,:creator,submitter.name if submitter end def determine_submitter object #FIXME: needs to be the creators for assets return object.owner if object.respond_to?("owner") result = object.contributor if object.respond_to?("contributor") && !object.kind_of?(Permission) if (result) return result if result.instance_of?(Person) return result.person if result.instance_of?(User) end return nil end def assay_data_relationships_xml builder,assay relationships={} assay.assay_assets.each do |aa| if aa.relationship_type relationships[aa.relationship_type.title] ||= [] relationships[aa.relationship_type.title] << aa.asset end end builder.tag! "data_relationships" do relationships.keys.each do |k| generic_list_xml(builder, relationships[k],"data_relationship",{:type=>k}) end end end end no content type attribute for publications. # BioCatalogue: app/helpers/api_helper.rb # # Copyright (c) 2008-2009, University of Manchester, The European Bioinformatics # Institute (EMBL-EBI) and the University of Southampton. # See license.txt for details. module ApiHelper def xml_root_attributes { "xmlns" => "http://www.sysmo-db.org/2010/xml/rest", "xmlns:xsi" => "http://www.w3.org/2001/XMLSchema-instance", "xsi:schemaLocation" => "http://www.sysmo-db.org/2010/xml/rest/schema-v1.xsd", "xmlns:xlink" => "http://www.w3.org/1999/xlink", "xmlns:dc" => "http://purl.org/dc/elements/1.1/", "xmlns:dcterms" => "http://purl.org/dc/terms/" } end def uri_for_path(path, *args) Seek::Api.uri_for_path(path, *args) end def api_partial_path_for_item object Seek::Api.api_partial_path_for_item(object) end def uri_for_collection(resource_name, *args) Seek::Api.uri_for_collection(resource_name, *args) end def uri_for_object(resource_obj, *args) Seek::Api.uri_for_object(resource_obj, *args) end def previous_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Previous page of results")) end def next_link_xml_attributes(resource_uri) xlink_attributes(resource_uri, :title => xlink_title("Next page of results")) end def core_xlink object,include_title=true if (object.class.name.include?("::Version")) xlink=xlink_attributes(uri_for_object(object.parent,{:params=>{:version=>object.version}})) else xlink=xlink_attributes(uri_for_object(object)) end xlink["xlink:title"]=xlink_title(object) unless !include_title || display_name(object,false).nil? xlink["id"]=object.id xlink["uuid"]=object.uuid if object.respond_to?("uuid") xlink["resourceType"] = object.class.name.include?("::Version") ? object.parent.class.name : object.class.name return xlink end #requires a slightly different handling to core_xlink because the route is nested def avatar_xlink avatar return {"xsi:nil"=>"true"} if avatar.nil? uri=uri_for_object(avatar.owner) uri="#{uri}/avatars/#{avatar.id}" xlink=xlink_attributes(uri) xlink["id"]=avatar.id xlink["resourceType"]=avatar.class.name return xlink end def xlink_attributes(resource_uri, *args) attribs = { } attribs_in = args.extract_options! attribs["xlink:href"] = resource_uri attribs_in.each do |k,v| attribs["xlink:#{k.to_s}"] = v end return attribs end def xlink_title(item, item_type_name=nil) case item when String return item else if item_type_name.blank? item_type_name = case item when User "User" else if (item.class.name.include?("::Version")) item.parent.class.name else item.class.name end end end #return "#{item_type_name} - #{display_name(item, false)}" return "#{display_name(item, false)}" end end def display_name item,escape_html=false result = nil result = item.title if item.respond_to?("title") result = item.name if item.respond_to?("name") && result.nil? result = h(result) if escape_html && !result.nil? return result end def dc_xml_tag(builder, term, value, *attributes) builder.tag! "dc:#{term}", value, attributes end def dcterms_xml_tag(builder, term, value, *attributes) # For dates... if [ :created, :modified, "created", "modified" ].include?(term) value = value.iso8601 end builder.tag! "dcterms:#{term}", value, attributes end def core_xml builder,object builder.tag! "id",object.id dc_core_xml builder,object builder.tag! "uuid",object.uuid if object.respond_to?("uuid") end def extended_xml builder,object submitter = determine_submitter object builder.tag! "submitter" do api_partial(builder,submitter) end if submitter builder.tag! "organisms" do organisms=[] organisms=object.organisms if object.respond_to?("organisms") organisms << object.organism if object.respond_to?("organism") && object.organism api_partial_collection builder,organisms end if object.respond_to?("organism") || object.respond_to?("organisms") builder.tag! "creators" do api_partial_collection builder,(object.creators || []) end if object.respond_to?("creators") unless hide_contact_details? builder.tag! "email",object.email if object.respond_to?("email") builder.tag! "webpage",object.webpage if object.respond_to?("webpage") builder.tag! "internal_webpage",object.internal_webpage if object.respond_to?("internal_webpage") builder.tag! "phone",object.phone if object.respond_to?("phone") end builder.tag! "bioportal_concepts" do concepts=[] concepts=object.bioportal_concepts if object.respond_to?("bioportal_concepts") concepts << object.bioportal_concept if object.respond_to?("bioportal_concept") concepts.compact.each do |concept| builder.tag! "bioportal_concept" do builder.tag! "ontology_id",concept.ontology_id builder.tag! "ontology_version_id",concept.ontology_version_id builder.tag! "concept_id",concept.concept_uri end end end if object.respond_to?("bioportal_concept") || object.respond_to?("bioportal_concepts") builder.tag! "content_type",object.content_type if object.respond_to?("content_type") && !object.is_a?(Publication) builder.tag! "version",object.version if object.respond_to?("version") builder.tag! "revision_comments",object.revision_comments if object.respond_to?("revision_comments") builder.tag! "latest_version",core_xlink(object.latest_version) if object.respond_to?("latest_version") if (object.respond_to?("versions")) builder.tag! "versions" do object.versions.each do |v| builder.tag! "version",core_xlink(v) end end end policy_xml builder,object if try_block{current_user.person.is_admin?} && object.respond_to?("policy") blob_xml builder,object.content_blob if object.respond_to?("content_blob") if object.respond_to?("avatar") builder.tag! "avatars" do builder.tag! "avatar",avatar_xlink(object.avatar) unless object.avatar.nil? end end end def policy_xml builder,asset policy = asset.policy unless policy.nil? builder.tag! "policy" do dc_core_xml builder,policy builder.tag! "sharing_scope",policy.sharing_scope builder.tag! "access_type",policy.access_type builder.tag! "use_blacklist",policy.use_blacklist ? policy.use_blacklist : false builder.tag! "use_whitelist",policy.use_whitelist ? policy.use_whitelist : false builder.tag! "permissions" do policy.permissions.select{|p| p.contributor_type!="FavouriteGroup"}.each do |permission| builder.tag! "permission" do dc_core_xml builder,permission builder.tag! "contributor",core_xlink(permission.contributor) builder.tag! "access_type",permission.access_type end end end end else builder.tag! "policy",{"xsi:nil"=>"true"} end end def resource_xml builder,resource builder.tag! "resource",core_xlink(resource) end def blob_xml builder,blob builder.tag! "blob",core_xlink(blob) do builder.tag! "uuid",blob.uuid builder.tag! "md5sum",blob.md5sum builder.tag! "url",blob.url builder.tag! "is_remote",!blob.file_exists? end end def assets_list_xml builder,assets,tag="assets",include_core=true,include_resource=true builder.tag! tag do assets.each do |asset| asset_xml builder,asset,include_core,include_resource end end end def associated_resources_xml builder, object #FIXME: this needs fixing, with some refactoring of the version->asset linkage - see http://www.mygrid.org.uk/dev/issues/browse/SYSMO-362 object=object.parent if (object.class.name.include?("::Version")) associated = get_related_resources object builder.tag! "associated" do associated.keys.each do |key| attr={} attr[:total]=associated[key][:items].count if (associated[key][:hidden_count]) attr[:total]=attr[:total]+associated[key][:hidden_count] attr[:hidden_count]=associated[key][:hidden_count] end generic_list_xml(builder, associated[key][:items],key.underscore.pluralize,attr) end end end def generic_list_xml builder,list,tag,attr={} builder.tag! tag,attr do list.each do |item| if (item.class.name.include?("::Version")) #versioned items need to be handled slightly differently. parent=item.parent builder.tag! parent.class.name.underscore,core_xlink(item) else builder.tag! item.class.name.underscore,core_xlink(item) end end end end def api_partial builder,object, is_root=false parent_object = object.class.name.include?("::Version") ? object.parent : object path=api_partial_path_for_item(parent_object) classname=parent_object.class.name.underscore render :partial=>path,:locals=>{:parent_xml => builder,:is_root=>is_root,classname.to_sym=>object} end def api_partial_collection builder,objects,is_root=false objects.each{|o| api_partial builder,o,is_root} end def parent_child_elements builder,object builder.tag! "parents" do api_partial_collection(builder, object.parents, is_root = false) end builder.tag! "children" do api_partial_collection(builder, object.children, is_root = false) end end def dc_core_xml builder,object submitter = determine_submitter object dc_xml_tag builder,:title,object.title if object.respond_to?("title") dc_xml_tag builder,:description,object.description if object.respond_to?("description") dcterms_xml_tag builder,:created,object.created_at if object.respond_to?("created_at") dcterms_xml_tag builder,:modified,object.updated_at if object.respond_to?("updated_at") dc_xml_tag builder,:creator,submitter.name if submitter end def determine_submitter object #FIXME: needs to be the creators for assets return object.owner if object.respond_to?("owner") result = object.contributor if object.respond_to?("contributor") && !object.kind_of?(Permission) if (result) return result if result.instance_of?(Person) return result.person if result.instance_of?(User) end return nil end def assay_data_relationships_xml builder,assay relationships={} assay.assay_assets.each do |aa| if aa.relationship_type relationships[aa.relationship_type.title] ||= [] relationships[aa.relationship_type.title] << aa.asset end end builder.tag! "data_relationships" do relationships.keys.each do |k| generic_list_xml(builder, relationships[k],"data_relationship",{:type=>k}) end end end end
module GuiHelper def progress_bar(now, max, classes='green', width=200) ratio = 1.0*now/max "<div class='progressbar #{classes}' style='width: #{width}px'> <div class='bar1' style='width:#{width*ratio}px'></div> <div class='bar2' style='width:#{width*ratio}px'></div> <div class='bar3' style='width:#{width*ratio}px'></div> </div>".html_safe end def warning_progress_bar(now, max, width=200) ratio = 1.0*now/max classes = case ratio when 0..0.6 then :green when 0.6..0.8 then :yellow else :red end progress_bar now, max, classes, width end end Fix invalid width for progress bars module GuiHelper def progress_bar(now, max, classes='green', width=200) ratio = [1.0*now/max, 1.0].min "<div class='progressbar #{classes}' style='width: #{width}px'> <div class='bar1' style='width:#{width*ratio}px'></div> <div class='bar2' style='width:#{width*ratio}px'></div> <div class='bar3' style='width:#{width*ratio}px'></div> </div>".html_safe end def warning_progress_bar(now, max, width=200) ratio = [1.0*now/max, 1.0].min classes = case ratio when 0..0.6 then :green when 0.6..0.8 then :yellow else :red end progress_bar now, max, classes, width end end
require 'squid/base' module Squid # Adds all the gridlines and axis values to the graph. class Grid < Base def draw y = bounds.top data.each.with_index do |labels, index| y -= bounds.height / data.size draw_gridline y unless index == data.size - 1 labels.each{|position, label| draw_label label, y, position} end end private def draw_gridline(y) transparent(0.25) do with(line_width: 0.5) do left = width(:left) + label_padding(:left) right = bounds.right - width(:right) - label_padding(:right) stroke_horizontal_line left, right, at: y end end end def draw_label(label, y, position) label_options = {height: text_height, size: font_size} label_options[:width] = width position label_options[:align] = align position label_options[:at] = [delta(position), y + text_height / 2] text_box label, text_options.merge(label_options) end # If labels are wider than this, they will be shrunk to fit def max_width 100 end def align(position) position == :left ? :right : :left end def delta(position) position == :left ? bounds.left : bounds.right - width(position) end def width(position) [@settings.fetch(position, 0), max_width].min end # The horizontal space between the label and the gridline def label_padding(position) @settings.key?(position) ? padding : 0 end end end Fix proportions on grid require 'squid/base' module Squid # Adds all the gridlines and axis values to the graph. class Grid < Base def draw y = bounds.top data.each.with_index do |labels, index| draw_gridline y unless index == data.size - 1 labels.each{|position, label| draw_label label, y, position} y -= @settings[:height] / (data.size - 1) end end private def draw_gridline(y) transparent(0.25) do with(line_width: 0.5) do left = width(:left) + label_padding(:left) right = bounds.right - width(:right) - label_padding(:right) stroke_horizontal_line left, right, at: y end end end def draw_label(label, y, position) label_options = {height: text_height, size: font_size} label_options[:width] = width position label_options[:align] = align position label_options[:at] = [delta(position), y + text_height / 2] text_box label, text_options.merge(label_options) end # If labels are wider than this, they will be shrunk to fit def max_width 100 end def align(position) position == :left ? :right : :left end def delta(position) position == :left ? bounds.left : bounds.right - width(position) end def width(position) [@settings.fetch(position, 0), max_width].min end # The horizontal space between the label and the gridline def label_padding(position) @settings.key?(position) ? padding : 0 end end end
# frozen_string_literal: true require "rubocops/extend/formula" module RuboCop module Cop module FormulaAudit # This cop audits `uses_from_macos` dependencies in formulae class UsesFromMacos < FormulaCop ALLOWED_USES_FROM_MACOS_DEPS = %w[ bison bzip2 curl expat expect flex groff icu4c krb5 libedit libffi libpcap libxml2 libxslt llvm ncurses m4 openldap openssl perl php ruby sqlite tcl-tk texinfo unzip vim xz zlib zip zsh ].freeze def audit_formula(_node, _class_node, _parent_class_node, body_node) find_method_with_args(body_node, :uses_from_macos, /^"(.+)"/).each do |method| dep = if parameters(method).first.class == RuboCop::AST::StrNode parameters(method).first elsif parameters(method).first.class == RuboCop::AST::HashNode parameters(method).first.keys.first end next if ALLOWED_USES_FROM_MACOS_DEPS.include?(string_content(dep)) problem "`uses_from_macos` should only be used for macOS dependencies, not #{string_content(dep)}." end end end end end end Library/Homebrew/rubocops/uses_from_macos: add ssh-copy-id # frozen_string_literal: true require "rubocops/extend/formula" module RuboCop module Cop module FormulaAudit # This cop audits `uses_from_macos` dependencies in formulae class UsesFromMacos < FormulaCop ALLOWED_USES_FROM_MACOS_DEPS = %w[ bison bzip2 curl expat expect flex groff icu4c krb5 libedit libffi libpcap libxml2 libxslt llvm m4 ncurses openldap openssl perl php ruby sqlite ssh-copy-id tcl-tk texinfo unzip vim xz zip zlib zsh ].freeze def audit_formula(_node, _class_node, _parent_class_node, body_node) find_method_with_args(body_node, :uses_from_macos, /^"(.+)"/).each do |method| dep = if parameters(method).first.class == RuboCop::AST::StrNode parameters(method).first elsif parameters(method).first.class == RuboCop::AST::HashNode parameters(method).first.keys.first end next if ALLOWED_USES_FROM_MACOS_DEPS.include?(string_content(dep)) problem "`uses_from_macos` should only be used for macOS dependencies, not #{string_content(dep)}." end end end end end end
require 'json' require 'optparse' require 'bundler' require 'pathname' require 'set' module Srclib class Scan def self.summary "discover Ruby gems/apps in a dir tree" end def option_parser OptionParser.new do |opts| opts.banner = "Usage: scan [options]" opts.on("--repo URI", "URI of repository") do |v| @opt[:repo] = v end opts.on("--subdir DIR", "path of current dir relative to repo root") do |v| @opt[:repo_subdir] = v end end end def run(args) option_parser.order! raise "no args may be specified to scan (got #{args.inspect}); it only scans the current directory" if args.length != 0 pre_wd = Pathname.pwd # Keep track of already discovered files in a set discovered_files = Set.new source_units = find_gems('.').map do |gemspec, gem| Dir.chdir(File.dirname(gemspec)) if File.exist?("Gemfile") deps = Bundler.definition.dependencies.map{ |d| [d.name, d.requirement.to_s] } end gem_dir = Pathname.new(gemspec).relative_path_from(pre_wd).parent gem.delete(:date) # Add set of all now accounted for files, using absolute paths discovered_files.merge(gem[:files].sort.map { |x| File.expand_path(x) } ) { 'Name' => gem[:name], 'Type' => 'rubygem', 'Dir' => gem_dir, 'Files' => gem[:files].sort.map { |f| gem_dir == "." ? f : File.join(gem_dir, f) }, 'Dependencies' => (deps and deps.sort), #gem[:dependencies], # TODO(sqs): what to do with the gemspec deps? 'Data' => gem, 'Ops' => {'depresolve' => nil, 'graph' => nil}, } end Dir.chdir(pre_wd) # Reset working directory to initial root scripts = find_scripts('.', source_units).map do |script_path| Pathname.new(script_path).relative_path_from(pre_wd) end # Filter out scripts that are already accounted for in the existing Source Units scripts = scripts.select do |script_file| script_absolute = File.expand_path(script_file) member = discovered_files.member? script_absolute !member end scripts.sort! # For testing consistency if scripts.length > 0 source_units << { 'Name' => '.', 'Type' => 'rubyprogram', 'Dir' => '.', 'Files' => scripts, 'Dependencies' => nil, #TODO(rameshvarun): Aggregate dependencies from all of the scripts 'Data' => { 'name' => 'rubyscripts', 'files' => scripts, }, 'Ops' => {'depresolve' => nil, 'graph' => nil}, } end puts JSON.generate(source_units.sort_by { |a| a['Name'] }) end def initialize @opt = {} end private # Finds all scripts that are not accounted for in the existing set of found gems # @param dir [String] The directory in which to search for scripts # @param gem_units [Array] The source units that have already been found. def find_scripts(dir, gem_units) scripts = [] dir = File.expand_path(dir) Dir.glob(File.join(dir, "**/*.rb")).map do |script_file| scripts << script_file end scripts end # Given the content of a script, finds all of its dependant gems # @param script_code [String] Content of the script # @return [Array] The dependency array. def script_deps(script_code) # Get a list of all installed gems installed_gems = `gem list`.split(/\n/).map do |line| line.split.first.strip #TODO: Extract version number end deps = [] script_code.scan(/require\W["'](.*)["']/) do |required| if installed_gems.include? required[0].strip deps << [ required[0].strip, ">= 0" #TODO: Should use the currently installed version number ] end end return deps end def find_gems(dir) dir = File.expand_path(dir) gemspecs = {} spec_files = Dir.glob(File.join(dir, "**/*.gemspec")).sort spec_files.each do |spec_file| Dir.chdir(File.expand_path(File.dirname(spec_file), dir)) spec = Gem::Specification.load(spec_file) if spec spec.normalize o = {} spec.class.attribute_names.find_all do |name| v = spec.instance_variable_get("@#{name}") o[name] = v if v end if o[:files] o[:files].sort! end if o[:metadata] && o[:metadata].empty? o.delete(:metadata) end o.delete(:rubygems_version) o.delete(:specification_version) gemspecs[spec_file] = o end end gemspecs end end end Ignore stdlib when looking for loose scripts require 'json' require 'optparse' require 'bundler' require 'pathname' require 'set' module Srclib class Scan def self.summary "discover Ruby gems/apps in a dir tree" end def option_parser OptionParser.new do |opts| opts.banner = "Usage: scan [options]" opts.on("--repo URI", "URI of repository") do |v| @opt[:repo] = v end opts.on("--subdir DIR", "path of current dir relative to repo root") do |v| @opt[:repo_subdir] = v end end end def run(args) option_parser.order! raise "no args may be specified to scan (got #{args.inspect}); it only scans the current directory" if args.length != 0 pre_wd = Pathname.pwd # Keep track of already discovered files in a set discovered_files = Set.new source_units = find_gems('.').map do |gemspec, gem| Dir.chdir(File.dirname(gemspec)) if File.exist?("Gemfile") deps = Bundler.definition.dependencies.map{ |d| [d.name, d.requirement.to_s] } end gem_dir = Pathname.new(gemspec).relative_path_from(pre_wd).parent gem.delete(:date) # Add set of all now accounted for files, using absolute paths discovered_files.merge(gem[:files].sort.map { |x| File.expand_path(x) } ) { 'Name' => gem[:name], 'Type' => 'rubygem', 'Dir' => gem_dir, 'Files' => gem[:files].sort.map { |f| gem_dir == "." ? f : File.join(gem_dir, f) }, 'Dependencies' => (deps and deps.sort), #gem[:dependencies], # TODO(sqs): what to do with the gemspec deps? 'Data' => gem, 'Ops' => {'depresolve' => nil, 'graph' => nil}, } end # Ignore standard library if @opt[:repo] != "github.com/ruby/ruby" Dir.chdir(pre_wd) # Reset working directory to initial root scripts = find_scripts('.', source_units).map do |script_path| Pathname.new(script_path).relative_path_from(pre_wd) end # Filter out scripts that are already accounted for in the existing Source Units scripts = scripts.select do |script_file| script_absolute = File.expand_path(script_file) member = discovered_files.member? script_absolute !member end scripts.sort! # For testing consistency # If scripts were found, append to the list of source units if scripts.length > 0 source_units << { 'Name' => '.', 'Type' => 'rubyprogram', 'Dir' => '.', 'Files' => scripts, 'Dependencies' => nil, #TODO(rameshvarun): Aggregate dependencies from all of the scripts 'Data' => { 'name' => 'rubyscripts', 'files' => scripts, }, 'Ops' => {'depresolve' => nil, 'graph' => nil}, } end end puts JSON.generate(source_units.sort_by { |a| a['Name'] }) end def initialize @opt = {} end private # Finds all scripts that are not accounted for in the existing set of found gems # @param dir [String] The directory in which to search for scripts # @param gem_units [Array] The source units that have already been found. def find_scripts(dir, gem_units) scripts = [] dir = File.expand_path(dir) Dir.glob(File.join(dir, "**/*.rb")).map do |script_file| scripts << script_file end scripts end # Given the content of a script, finds all of its dependant gems # @param script_code [String] Content of the script # @return [Array] The dependency array. def script_deps(script_code) # Get a list of all installed gems installed_gems = `gem list`.split(/\n/).map do |line| line.split.first.strip #TODO: Extract version number end deps = [] script_code.scan(/require\W["'](.*)["']/) do |required| if installed_gems.include? required[0].strip deps << [ required[0].strip, ">= 0" #TODO: Should use the currently installed version number ] end end return deps end def find_gems(dir) dir = File.expand_path(dir) gemspecs = {} spec_files = Dir.glob(File.join(dir, "**/*.gemspec")).sort spec_files.each do |spec_file| Dir.chdir(File.expand_path(File.dirname(spec_file), dir)) spec = Gem::Specification.load(spec_file) if spec spec.normalize o = {} spec.class.attribute_names.find_all do |name| v = spec.instance_variable_get("@#{name}") o[name] = v if v end if o[:files] o[:files].sort! end if o[:metadata] && o[:metadata].empty? o.delete(:metadata) end o.delete(:rubygems_version) o.delete(:specification_version) gemspecs[spec_file] = o end end gemspecs end end end
module Stalkerr VERSION = '0.1.1' end bump version to 0.2.0 module Stalkerr VERSION = '0.2.0' end
require 'socket' require 'timeout' module Stomp # Low level connection which maps commands and supports # synchronous receives class Connection attr_reader :connection_frame attr_reader :disconnect_receipt #alias :obj_send :send def self.default_port(ssl) ssl ? 61612 : 61613 end # A new Connection object accepts the following parameters: # # login (String, default : '') # passcode (String, default : '') # host (String, default : 'localhost') # port (Integer, default : 61613) # reliable (Boolean, default : false) # reconnect_delay (Integer, default : 5) # # e.g. c = Connection.new("username", "password", "localhost", 61613, true) # # Hash: # # hash = { # :hosts => [ # {:login => "login1", :passcode => "passcode1", :host => "localhost", :port => 61616, :ssl => false}, # {:login => "login2", :passcode => "passcode2", :host => "remotehost", :port => 61617, :ssl => false} # ], # :initial_reconnect_delay => 0.01, # :max_reconnect_delay => 30.0, # :use_exponential_back_off => true, # :back_off_multiplier => 2, # :max_reconnect_attempts => 0, # :randomize => false, # :backup => false, # :timeout => -1, # :parse_timeout => 5, # } # # e.g. c = Connection.new(hash) # # TODO # Stomp URL : # A Stomp URL must begin with 'stomp://' and can be in one of the following forms: # # stomp://host:port # stomp://host.domain.tld:port # stomp://user:pass@host:port # stomp://user:pass@host.domain.tld:port # def initialize(login = '', passcode = '', host = 'localhost', port = 61613, reliable = false, reconnect_delay = 5, connect_headers = {}) @received_messages = [] if login.is_a?(Hash) hashed_initialize(login) else @host = host @port = port @login = login @passcode = passcode @reliable = reliable @reconnect_delay = reconnect_delay @connect_headers = connect_headers @ssl = false @parameters = nil @parse_timeout = 5 # To override, use hashed parameters end # Use Mutexes: only one lock per each thread # Revert to original implementation attempt @transmit_semaphore = Mutex.new @read_semaphore = Mutex.new @socket_semaphore = Mutex.new @subscriptions = {} @failure = nil @connection_attempts = 0 socket end def hashed_initialize(params) @parameters = refine_params(params) @reliable = true @reconnect_delay = @parameters[:initial_reconnect_delay] @connect_headers = @parameters[:connect_headers] @parse_timeout = @parameters[:parse_timeout] #sets the first host to connect change_host end # Syntactic sugar for 'Connection.new' See 'initialize' for usage. def Connection.open(login = '', passcode = '', host = 'localhost', port = 61613, reliable = false, reconnect_delay = 5, connect_headers = {}) Connection.new(login, passcode, host, port, reliable, reconnect_delay, connect_headers) end def socket @socket_semaphore.synchronize do used_socket = @socket used_socket = nil if closed? while used_socket.nil? || !@failure.nil? @failure = nil begin used_socket = open_socket # Open complete connect(used_socket) @connection_attempts = 0 rescue @failure = $! used_socket = nil raise unless @reliable $stderr.print "connect to #{@host} failed: #{$!} will retry(##{@connection_attempts}) in #{@reconnect_delay}\n" raise Stomp::Error::MaxReconnectAttempts if max_reconnect_attempts? sleep(@reconnect_delay) @connection_attempts += 1 if @parameters change_host increase_reconnect_delay end end end @socket = used_socket end end def refine_params(params) params = params.uncamelize_and_symbolize_keys default_params = { :connect_headers => {}, # Failover parameters :initial_reconnect_delay => 0.01, :max_reconnect_delay => 30.0, :use_exponential_back_off => true, :back_off_multiplier => 2, :max_reconnect_attempts => 0, :randomize => false, :backup => false, :timeout => -1, # Parse Timeout :parse_timeout => 5 } default_params.merge(params) end def change_host @parameters[:hosts] = @parameters[:hosts].sort_by { rand } if @parameters[:randomize] # Set first as master and send it to the end of array current_host = @parameters[:hosts].shift @parameters[:hosts] << current_host @ssl = current_host[:ssl] @host = current_host[:host] @port = current_host[:port] || Connection::default_port(@ssl) @login = current_host[:login] || "" @passcode = current_host[:passcode] || "" end def max_reconnect_attempts? !(@parameters.nil? || @parameters[:max_reconnect_attempts].nil?) && @parameters[:max_reconnect_attempts] != 0 && @connection_attempts >= @parameters[:max_reconnect_attempts] end def increase_reconnect_delay @reconnect_delay *= @parameters[:back_off_multiplier] if @parameters[:use_exponential_back_off] @reconnect_delay = @parameters[:max_reconnect_delay] if @reconnect_delay > @parameters[:max_reconnect_delay] @reconnect_delay end # Is this connection open? def open? !@closed end # Is this connection closed? def closed? @closed end # Begin a transaction, requires a name for the transaction def begin(name, headers = {}) headers[:transaction] = name transmit("BEGIN", headers) end # Acknowledge a message, used when a subscription has specified # client acknowledgement ( connection.subscribe "/queue/a", :ack => 'client'g # # Accepts a transaction header ( :transaction => 'some_transaction_id' ) def ack(message_id, headers = {}) headers['message-id'] = message_id transmit("ACK", headers) end # Commit a transaction by name def commit(name, headers = {}) headers[:transaction] = name transmit("COMMIT", headers) end # Abort a transaction by name def abort(name, headers = {}) headers[:transaction] = name transmit("ABORT", headers) end # Subscribe to a destination, must specify a name def subscribe(name, headers = {}, subId = nil) headers[:destination] = name transmit("SUBSCRIBE", headers) # Store the sub so that we can replay if we reconnect. if @reliable subId = name if subId.nil? @subscriptions[subId] = headers end end # Unsubscribe from a destination, must specify a name def unsubscribe(name, headers = {}, subId = nil) headers[:destination] = name transmit("UNSUBSCRIBE", headers) if @reliable subId = name if subId.nil? @subscriptions.delete(subId) end end # Publish message to destination # # To disable content length header ( :suppress_content_length => true ) # Accepts a transaction header ( :transaction => 'some_transaction_id' ) def publish(destination, message, headers = {}) headers[:destination] = destination transmit("SEND", headers, message) end def obj_send(*args) __send__(*args) end def send(*args) warn("This method is deprecated and will be removed on the next release. Use 'publish' instead") publish(*args) end # Send a message back to the source or to the dead letter queue # # Accepts a dead letter queue option ( :dead_letter_queue => "/queue/DLQ" ) # Accepts a limit number of redeliveries option ( :max_redeliveries => 6 ) # Accepts a force client acknowledgement option (:force_client_ack => true) def unreceive(message, options = {}) options = { :dead_letter_queue => "/queue/DLQ", :max_redeliveries => 6 }.merge options # Lets make sure all keys are symbols message.headers = message.headers.symbolize_keys retry_count = message.headers[:retry_count].to_i || 0 message.headers[:retry_count] = retry_count + 1 transaction_id = "transaction-#{message.headers[:'message-id']}-#{retry_count}" message_id = message.headers.delete(:'message-id') begin self.begin transaction_id if client_ack?(message) || options[:force_client_ack] self.ack(message_id, :transaction => transaction_id) end if retry_count <= options[:max_redeliveries] self.publish(message.headers[:destination], message.body, message.headers.merge(:transaction => transaction_id)) else # Poison ack, sending the message to the DLQ self.publish(options[:dead_letter_queue], message.body, message.headers.merge(:transaction => transaction_id, :original_destination => message.headers[:destination], :persistent => true)) end self.commit transaction_id rescue Exception => exception self.abort transaction_id raise exception end end def client_ack?(message) headers = @subscriptions[message.headers[:destination]] !headers.nil? && headers[:ack] == "client" end # Close this connection def disconnect(headers = {}) transmit("DISCONNECT", headers) headers = headers.symbolize_keys @disconnect_receipt = receive if headers[:receipt] close_socket end # Return a pending message if one is available, otherwise # return nil def poll # No need for a read lock here. The receive method eventually fullfills # that requirement. return nil if @socket.nil? || !@socket.ready? receive end # Receive a frame, block until the frame is received def __old_receive # The recive my fail so we may need to retry. while TRUE begin used_socket = socket return _receive(used_socket) rescue @failure = $! raise unless @reliable $stderr.print "receive failed: #{$!}" end end end def receive super_result = __old_receive if super_result.nil? && @reliable $stderr.print "connection.receive returning EOF as nil - resetting connection.\n" @socket = nil super_result = __old_receive end return super_result end private def _receive( read_socket ) @read_semaphore.synchronize do line = read_socket.gets return nil if line.nil? # If the reading hangs for more than X seconds, abort the parsing process. # X defaults to 5. Override allowed in connection hash parameters. Timeout::timeout(@parse_timeout, Stomp::Error::PacketParsingTimeout) do # Reads the beginning of the message until it runs into a empty line message_header = '' begin message_header += line line = read_socket.gets end until line =~ /^\s?\n$/ # Checks if it includes content_length header content_length = message_header.match /content-length\s?:\s?(\d+)\s?\n/ message_body = '' # If it does, reads the specified amount of bytes char = '' if content_length message_body = read_socket.read content_length[1].to_i raise Stomp::Error::InvalidMessageLength unless parse_char(read_socket.getc) == "\0" # Else reads, the rest of the message until the first \0 else message_body += char while read_socket.ready? && (char = parse_char(read_socket.getc)) != "\0" end # If the buffer isn't empty, reads the next char and returns it to the buffer # unless it's a \n if read_socket.ready? last_char = read_socket.getc read_socket.ungetc(last_char) if parse_char(last_char) != "\n" end # Adds the excluded \n and \0 and tries to create a new message with it Message.new(message_header + "\n" + message_body + "\0") end end end def parse_char(char) RUBY_VERSION > '1.9' ? char : char.chr end def transmit(command, headers = {}, body = '') # The transmit may fail so we may need to retry. while TRUE begin used_socket = socket _transmit(used_socket, command, headers, body) return rescue @failure = $! raise unless @reliable $stderr.print "transmit to #{@host} failed: #{$!}\n" end end end def _transmit(used_socket, command, headers = {}, body = '') @transmit_semaphore.synchronize do # ActiveMQ interprets every message as a BinaryMessage # if content_length header is included. # Using :suppress_content_length => true will suppress this behaviour # and ActiveMQ will interpret the message as a TextMessage. # For more information refer to http://juretta.com/log/2009/05/24/activemq-jms-stomp/ # Lets send this header in the message, so it can maintain state when using unreceive headers['content-length'] = "#{body.length}" unless headers[:suppress_content_length] used_socket.puts command headers.each {|k,v| used_socket.puts "#{k}:#{v}" } used_socket.puts "content-type: text/plain; charset=UTF-8" used_socket.puts used_socket.write body used_socket.write "\0" end end def open_tcp_socket tcp_socket = TCPSocket.open @host, @port def tcp_socket.ready? r,w,e = IO.select([self],nil,nil,0) ! r.nil? end tcp_socket end def open_ssl_socket require 'openssl' unless defined?(OpenSSL) ctx = OpenSSL::SSL::SSLContext.new # For client certificate authentication: # key_path = ENV["STOMP_KEY_PATH"] || "~/stomp_keys" # ctx.cert = OpenSSL::X509::Certificate.new("#{key_path}/client.cer") # ctx.key = OpenSSL::PKey::RSA.new("#{key_path}/client.keystore") # For server certificate authentication: # truststores = OpenSSL::X509::Store.new # truststores.add_file("#{key_path}/client.ts") # ctx.verify_mode = OpenSSL::SSL::VERIFY_PEER # ctx.cert_store = truststores ctx.verify_mode = OpenSSL::SSL::VERIFY_NONE ssl = OpenSSL::SSL::SSLSocket.new(open_tcp_socket, ctx) def ssl.ready? ! @rbuffer.empty? || @io.ready? end ssl.connect ssl end def close_socket begin @socket.close rescue #Ignoring if already closed end @closed = true end def open_socket used_socket = @ssl ? open_ssl_socket : open_tcp_socket # try to close the old connection if any close_socket @closed = false used_socket end def connect(used_socket) headers = @connect_headers.clone headers[:login] = @login headers[:passcode] = @passcode _transmit(used_socket, "CONNECT", headers) @connection_frame = _receive(used_socket) @disconnect_receipt = nil # replay any subscriptions. @subscriptions.each { |k,v| _transmit(used_socket, "SUBSCRIBE", v) } end end end Update hashed login usage comment. require 'socket' require 'timeout' module Stomp # Low level connection which maps commands and supports # synchronous receives class Connection attr_reader :connection_frame attr_reader :disconnect_receipt #alias :obj_send :send def self.default_port(ssl) ssl ? 61612 : 61613 end # A new Connection object accepts the following parameters: # # login (String, default : '') # passcode (String, default : '') # host (String, default : 'localhost') # port (Integer, default : 61613) # reliable (Boolean, default : false) # reconnect_delay (Integer, default : 5) # # e.g. c = Connection.new("username", "password", "localhost", 61613, true) # # Hash: # # hash = { # :hosts => [ # {:login => "login1", :passcode => "passcode1", :host => "localhost", :port => 61616, :ssl => false}, # {:login => "login2", :passcode => "passcode2", :host => "remotehost", :port => 61617, :ssl => false} # ], # :initial_reconnect_delay => 0.01, # :max_reconnect_delay => 30.0, # :use_exponential_back_off => true, # :back_off_multiplier => 2, # :max_reconnect_attempts => 0, # :randomize => false, # :backup => false, # :timeout => -1, # :connect_headers => {}, # :parse_timeout => 5, # } # # e.g. c = Connection.new(hash) # # TODO # Stomp URL : # A Stomp URL must begin with 'stomp://' and can be in one of the following forms: # # stomp://host:port # stomp://host.domain.tld:port # stomp://user:pass@host:port # stomp://user:pass@host.domain.tld:port # def initialize(login = '', passcode = '', host = 'localhost', port = 61613, reliable = false, reconnect_delay = 5, connect_headers = {}) @received_messages = [] if login.is_a?(Hash) hashed_initialize(login) else @host = host @port = port @login = login @passcode = passcode @reliable = reliable @reconnect_delay = reconnect_delay @connect_headers = connect_headers @ssl = false @parameters = nil @parse_timeout = 5 # To override, use hashed parameters end # Use Mutexes: only one lock per each thread # Revert to original implementation attempt @transmit_semaphore = Mutex.new @read_semaphore = Mutex.new @socket_semaphore = Mutex.new @subscriptions = {} @failure = nil @connection_attempts = 0 socket end def hashed_initialize(params) @parameters = refine_params(params) @reliable = true @reconnect_delay = @parameters[:initial_reconnect_delay] @connect_headers = @parameters[:connect_headers] @parse_timeout = @parameters[:parse_timeout] #sets the first host to connect change_host end # Syntactic sugar for 'Connection.new' See 'initialize' for usage. def Connection.open(login = '', passcode = '', host = 'localhost', port = 61613, reliable = false, reconnect_delay = 5, connect_headers = {}) Connection.new(login, passcode, host, port, reliable, reconnect_delay, connect_headers) end def socket @socket_semaphore.synchronize do used_socket = @socket used_socket = nil if closed? while used_socket.nil? || !@failure.nil? @failure = nil begin used_socket = open_socket # Open complete connect(used_socket) @connection_attempts = 0 rescue @failure = $! used_socket = nil raise unless @reliable $stderr.print "connect to #{@host} failed: #{$!} will retry(##{@connection_attempts}) in #{@reconnect_delay}\n" raise Stomp::Error::MaxReconnectAttempts if max_reconnect_attempts? sleep(@reconnect_delay) @connection_attempts += 1 if @parameters change_host increase_reconnect_delay end end end @socket = used_socket end end def refine_params(params) params = params.uncamelize_and_symbolize_keys default_params = { :connect_headers => {}, # Failover parameters :initial_reconnect_delay => 0.01, :max_reconnect_delay => 30.0, :use_exponential_back_off => true, :back_off_multiplier => 2, :max_reconnect_attempts => 0, :randomize => false, :backup => false, :timeout => -1, # Parse Timeout :parse_timeout => 5 } default_params.merge(params) end def change_host @parameters[:hosts] = @parameters[:hosts].sort_by { rand } if @parameters[:randomize] # Set first as master and send it to the end of array current_host = @parameters[:hosts].shift @parameters[:hosts] << current_host @ssl = current_host[:ssl] @host = current_host[:host] @port = current_host[:port] || Connection::default_port(@ssl) @login = current_host[:login] || "" @passcode = current_host[:passcode] || "" end def max_reconnect_attempts? !(@parameters.nil? || @parameters[:max_reconnect_attempts].nil?) && @parameters[:max_reconnect_attempts] != 0 && @connection_attempts >= @parameters[:max_reconnect_attempts] end def increase_reconnect_delay @reconnect_delay *= @parameters[:back_off_multiplier] if @parameters[:use_exponential_back_off] @reconnect_delay = @parameters[:max_reconnect_delay] if @reconnect_delay > @parameters[:max_reconnect_delay] @reconnect_delay end # Is this connection open? def open? !@closed end # Is this connection closed? def closed? @closed end # Begin a transaction, requires a name for the transaction def begin(name, headers = {}) headers[:transaction] = name transmit("BEGIN", headers) end # Acknowledge a message, used when a subscription has specified # client acknowledgement ( connection.subscribe "/queue/a", :ack => 'client'g # # Accepts a transaction header ( :transaction => 'some_transaction_id' ) def ack(message_id, headers = {}) headers['message-id'] = message_id transmit("ACK", headers) end # Commit a transaction by name def commit(name, headers = {}) headers[:transaction] = name transmit("COMMIT", headers) end # Abort a transaction by name def abort(name, headers = {}) headers[:transaction] = name transmit("ABORT", headers) end # Subscribe to a destination, must specify a name def subscribe(name, headers = {}, subId = nil) headers[:destination] = name transmit("SUBSCRIBE", headers) # Store the sub so that we can replay if we reconnect. if @reliable subId = name if subId.nil? @subscriptions[subId] = headers end end # Unsubscribe from a destination, must specify a name def unsubscribe(name, headers = {}, subId = nil) headers[:destination] = name transmit("UNSUBSCRIBE", headers) if @reliable subId = name if subId.nil? @subscriptions.delete(subId) end end # Publish message to destination # # To disable content length header ( :suppress_content_length => true ) # Accepts a transaction header ( :transaction => 'some_transaction_id' ) def publish(destination, message, headers = {}) headers[:destination] = destination transmit("SEND", headers, message) end def obj_send(*args) __send__(*args) end def send(*args) warn("This method is deprecated and will be removed on the next release. Use 'publish' instead") publish(*args) end # Send a message back to the source or to the dead letter queue # # Accepts a dead letter queue option ( :dead_letter_queue => "/queue/DLQ" ) # Accepts a limit number of redeliveries option ( :max_redeliveries => 6 ) # Accepts a force client acknowledgement option (:force_client_ack => true) def unreceive(message, options = {}) options = { :dead_letter_queue => "/queue/DLQ", :max_redeliveries => 6 }.merge options # Lets make sure all keys are symbols message.headers = message.headers.symbolize_keys retry_count = message.headers[:retry_count].to_i || 0 message.headers[:retry_count] = retry_count + 1 transaction_id = "transaction-#{message.headers[:'message-id']}-#{retry_count}" message_id = message.headers.delete(:'message-id') begin self.begin transaction_id if client_ack?(message) || options[:force_client_ack] self.ack(message_id, :transaction => transaction_id) end if retry_count <= options[:max_redeliveries] self.publish(message.headers[:destination], message.body, message.headers.merge(:transaction => transaction_id)) else # Poison ack, sending the message to the DLQ self.publish(options[:dead_letter_queue], message.body, message.headers.merge(:transaction => transaction_id, :original_destination => message.headers[:destination], :persistent => true)) end self.commit transaction_id rescue Exception => exception self.abort transaction_id raise exception end end def client_ack?(message) headers = @subscriptions[message.headers[:destination]] !headers.nil? && headers[:ack] == "client" end # Close this connection def disconnect(headers = {}) transmit("DISCONNECT", headers) headers = headers.symbolize_keys @disconnect_receipt = receive if headers[:receipt] close_socket end # Return a pending message if one is available, otherwise # return nil def poll # No need for a read lock here. The receive method eventually fullfills # that requirement. return nil if @socket.nil? || !@socket.ready? receive end # Receive a frame, block until the frame is received def __old_receive # The recive my fail so we may need to retry. while TRUE begin used_socket = socket return _receive(used_socket) rescue @failure = $! raise unless @reliable $stderr.print "receive failed: #{$!}" end end end def receive super_result = __old_receive if super_result.nil? && @reliable $stderr.print "connection.receive returning EOF as nil - resetting connection.\n" @socket = nil super_result = __old_receive end return super_result end private def _receive( read_socket ) @read_semaphore.synchronize do line = read_socket.gets return nil if line.nil? # If the reading hangs for more than X seconds, abort the parsing process. # X defaults to 5. Override allowed in connection hash parameters. Timeout::timeout(@parse_timeout, Stomp::Error::PacketParsingTimeout) do # Reads the beginning of the message until it runs into a empty line message_header = '' begin message_header += line line = read_socket.gets end until line =~ /^\s?\n$/ # Checks if it includes content_length header content_length = message_header.match /content-length\s?:\s?(\d+)\s?\n/ message_body = '' # If it does, reads the specified amount of bytes char = '' if content_length message_body = read_socket.read content_length[1].to_i raise Stomp::Error::InvalidMessageLength unless parse_char(read_socket.getc) == "\0" # Else reads, the rest of the message until the first \0 else message_body += char while read_socket.ready? && (char = parse_char(read_socket.getc)) != "\0" end # If the buffer isn't empty, reads the next char and returns it to the buffer # unless it's a \n if read_socket.ready? last_char = read_socket.getc read_socket.ungetc(last_char) if parse_char(last_char) != "\n" end # Adds the excluded \n and \0 and tries to create a new message with it Message.new(message_header + "\n" + message_body + "\0") end end end def parse_char(char) RUBY_VERSION > '1.9' ? char : char.chr end def transmit(command, headers = {}, body = '') # The transmit may fail so we may need to retry. while TRUE begin used_socket = socket _transmit(used_socket, command, headers, body) return rescue @failure = $! raise unless @reliable $stderr.print "transmit to #{@host} failed: #{$!}\n" end end end def _transmit(used_socket, command, headers = {}, body = '') @transmit_semaphore.synchronize do # ActiveMQ interprets every message as a BinaryMessage # if content_length header is included. # Using :suppress_content_length => true will suppress this behaviour # and ActiveMQ will interpret the message as a TextMessage. # For more information refer to http://juretta.com/log/2009/05/24/activemq-jms-stomp/ # Lets send this header in the message, so it can maintain state when using unreceive headers['content-length'] = "#{body.length}" unless headers[:suppress_content_length] used_socket.puts command headers.each {|k,v| used_socket.puts "#{k}:#{v}" } used_socket.puts "content-type: text/plain; charset=UTF-8" used_socket.puts used_socket.write body used_socket.write "\0" end end def open_tcp_socket tcp_socket = TCPSocket.open @host, @port def tcp_socket.ready? r,w,e = IO.select([self],nil,nil,0) ! r.nil? end tcp_socket end def open_ssl_socket require 'openssl' unless defined?(OpenSSL) ctx = OpenSSL::SSL::SSLContext.new # For client certificate authentication: # key_path = ENV["STOMP_KEY_PATH"] || "~/stomp_keys" # ctx.cert = OpenSSL::X509::Certificate.new("#{key_path}/client.cer") # ctx.key = OpenSSL::PKey::RSA.new("#{key_path}/client.keystore") # For server certificate authentication: # truststores = OpenSSL::X509::Store.new # truststores.add_file("#{key_path}/client.ts") # ctx.verify_mode = OpenSSL::SSL::VERIFY_PEER # ctx.cert_store = truststores ctx.verify_mode = OpenSSL::SSL::VERIFY_NONE ssl = OpenSSL::SSL::SSLSocket.new(open_tcp_socket, ctx) def ssl.ready? ! @rbuffer.empty? || @io.ready? end ssl.connect ssl end def close_socket begin @socket.close rescue #Ignoring if already closed end @closed = true end def open_socket used_socket = @ssl ? open_ssl_socket : open_tcp_socket # try to close the old connection if any close_socket @closed = false used_socket end def connect(used_socket) headers = @connect_headers.clone headers[:login] = @login headers[:passcode] = @passcode _transmit(used_socket, "CONNECT", headers) @connection_frame = _receive(used_socket) @disconnect_receipt = nil # replay any subscriptions. @subscriptions.each { |k,v| _transmit(used_socket, "SUBSCRIBE", v) } end end end
class IgnitionCmake0 < Formula desc "CMake helper functions for building robotic applications" homepage "http://ignitionrobotics.org" url "https://bitbucket.org/ignitionrobotics/ign-cmake/get/ecc284a8d4c8.tar.gz" version "0.1.0~20171006~ecc284a" sha256 "4eba15c53553fb3f546db3b1e1db6e2c2f614a7918ae379fab9f697cef32846d" head "https://bitbucket.org/ignitionrobotics/ign-cmake", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-cmake/releases" cellar :any_skip_relocation sha256 "70c642fff4e68d0c319f05be1bf26b0a9f36d901c5f74d51de2ee5dc295aeffe" => :el_capitan_or_later end depends_on "cmake" => :run def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"CMakeLists.txt").write <<-EOS.undent cmake_minimum_required(VERSION 3.5.1 FATAL_ERROR) find_package(ignition-cmake0 REQUIRED) ign_configure_project(test 0.1.0) ign_configure_build(QUIT_IF_BUILD_ERRORS) #ign_create_packages() EOS %w[doc include src test].each do |dir| mkdir dir do touch "CMakeLists.txt" end end mkdir "build" do system "cmake", ".." end end end ignition-cmake 0.1.0 (#294) * ignition-cmake 0.1.0 * Update ignition-cmake0.rb * ignition-cmake0 el_capitan bottle class IgnitionCmake0 < Formula desc "CMake helper functions for building robotic applications" homepage "http://ignitionrobotics.org" url "http://gazebosim.org/distributions/ign-cmake/releases/ignition-cmake-0.1.0.tar.bz2" sha256 "9b79655b925d733333d944f36905c0ecd9971482c04e041feb5cef6184b16a72" head "https://bitbucket.org/ignitionrobotics/ign-cmake", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-cmake/releases" cellar :any_skip_relocation sha256 "9d8b7c6a1ffcfe1d023d200865c54b194f6e7d8047ed106c447baaaac2198d3c" => :el_capitan_or_later end depends_on "cmake" => :run def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"CMakeLists.txt").write <<-EOS.undent cmake_minimum_required(VERSION 3.5.1 FATAL_ERROR) find_package(ignition-cmake0 REQUIRED) ign_configure_project(test 0.1.0) ign_configure_build(QUIT_IF_BUILD_ERRORS) #ign_create_packages() EOS %w[doc include src test].each do |dir| mkdir dir do touch "CMakeLists.txt" end end mkdir "build" do system "cmake", ".." end end end
class IgnitionCommon < Formula desc "Common libraries for robotics applications" homepage "https://bitbucket.org/ignitionrobotics/ign-common" # url "http://gazebosim.org/distributions/ign-common/releases/ignition-common-0.4.0.tar.bz2" url "https://bitbucket.org/ignitionrobotics/ign-common/get/2071c29973ed10a53275c875199be36fb3dc8196.tar.gz" version "0.4.0~20170720~2071c29" sha256 "cb2bfbd60cfb353c28778f391bb65a13dc31168d9be4478e13f66ba0b9232a88" head "https://bitbucket.org/ignitionrobotics/ign-common", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-common/releases" sha256 "bf56bd002a3bedae78e7369b51d483d370ef67e9df54f5a0c113c17c9e95053b" => :sierra sha256 "d452ceab673157a6b6e8bf0a5ed01a45b66453b9e9c93babb7361a108426b0b8" => :el_capitan sha256 "4e739efdeeb59ca08f8f62307dda205dfc9800ac899bd4e73ef07696ac4e7c69" => :yosemite end depends_on "cmake" => :build depends_on "ffmpeg" depends_on "freeimage" depends_on "gts" depends_on "ignition-math3" depends_on "ossp-uuid" depends_on "tinyxml2" depends_on "pkg-config" => :run def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"test.cpp").write <<-EOS.undent #include <iostream> #include <ignition/common.hh> int main() { igndbg << "debug" << std::endl; ignwarn << "warn" << std::endl; ignerr << "error" << std::endl; // // this example code doesn't compile // try { // ignthrow("An example exception that is caught."); // } // catch(const ignition::common::exception &_e) { // std::cerr << "Caught a runtime error " << _e.what() << std::endl; // } // ignassert(0 == 0); return 0; } EOS system "pkg-config", "ignition-common0" cflags = `pkg-config --cflags ignition-common0`.split(" ") system ENV.cc, "test.cpp", *cflags, "-L#{lib}", "-lignition-common0", "-lc++", "-o", "test" system "./test" end end New snapshot of ignition-common: 0.4.0~20170906~ebf4b53 (#271) * New snapshot of ignition-common: 0.4.0~20170906~ebf4b53 * ignition-common 0.4.0~20170906~ebf4b53 sierra bottle * ignition-common 0.4.0~20170906~ebf4b53 el_capitan bottle * ignition-common 0.4.0~20170906~ebf4b53 yosemite bottle class IgnitionCommon < Formula desc "Common libraries for robotics applications" homepage "https://bitbucket.org/ignitionrobotics/ign-common" # url "http://gazebosim.org/distributions/ign-common/releases/ignition-common-0.4.0.tar.bz2" url "https://bitbucket.org/ignitionrobotics/ign-common/get/ebf4b53ee7633d541172953a213ca4009fc122f2.tar.gz" version "0.4.0~20170906~ebf4b53" sha256 "c28f27c50fad98360b143c1bed270183f7260105b7a21669463400c80efc7ed2" head "https://bitbucket.org/ignitionrobotics/ign-common", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-common/releases" sha256 "25483747704aaa04f1d86423e67c195f6f50b57297049d751e7d966a388b6f82" => :sierra sha256 "167ab634b6f3afeb0ae72c53c5c6752f3f5d81248b6a1762909b20dc5f00265d" => :el_capitan sha256 "ca64d8eca4af6b1a5ff3588c6a58e8c091a9c3d45bf38edd5e336064b79dfc13" => :yosemite end depends_on "cmake" => :build depends_on "ffmpeg" depends_on "freeimage" depends_on "gts" depends_on "ignition-math3" depends_on "ossp-uuid" depends_on "tinyxml2" depends_on "pkg-config" => :run def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"test.cpp").write <<-EOS.undent #include <iostream> #include <ignition/common.hh> int main() { igndbg << "debug" << std::endl; ignwarn << "warn" << std::endl; ignerr << "error" << std::endl; // // this example code doesn't compile // try { // ignthrow("An example exception that is caught."); // } // catch(const ignition::common::exception &_e) { // std::cerr << "Caught a runtime error " << _e.what() << std::endl; // } // ignassert(0 == 0); return 0; } EOS system "pkg-config", "ignition-common0" cflags = `pkg-config --cflags ignition-common0`.split(" ") system ENV.cc, "test.cpp", *cflags, "-L#{lib}", "-lignition-common0", "-lc++", "-o", "test" system "./test" end end
require_relative 'classutil' class Striuct # @author Kenichi Kamiya module Subclass extend ClassUtil include Enumerable def initialize(*values) @db, @lock = {}, false if values.size <= size values.each_with_index do |v, idx| self[idx] = v end excess = members.last(size - values.size) excess.each do |name| self[name] = default_for name if has_default? name end else raise ArgumentError, "struct size differs (max: #{size})" end end # @return [Boolean] def ==(other) if self.class.equal? other.class each_pair.all?{|k, v|v == other[k]} else false end end def eql?(other) if self.class.equal? other.class each_pair.all?{|k, v|v.eql? other[k]} else false end end # @return [Integer] def hash values.map(&:hash).hash end # @return [String] def inspect "#<#{self.class} (StrictStruct)".tap do |s| members.each_with_index do |name, idx| s << " #{idx}.#{name}: #{self[name].inspect}" end s << ">\n" end end # @return [String] def to_s "#<struct #{self.class}".tap do |s| members.each_with_index do |m, idx| s << " #{m}=#{self[m].inspect}" end s << '>' end end delegate_class_methods( :members, :keys, :has_member?, :member?, :has_key?, :key?, :length, :size, :convert_cname, :restrict?, :has_default?, :default_for, :names, :has_flavor?, :flavor_for, :has_conditions? ) private :convert_cname, :flavor_for # @param [Symbol, String, Fixnum] key def [](key) __subscript__(key){|name|__get__ name} end # @param [Symbol, String, Fixnum] key # @param [Object] value def []=(key, value) __subscript__(key){|name|__set__ name, value} end # @yield [name] # @yieldparam [Symbol] name - sequential under defined # @yieldreturn [self] # @return [Enumerator] def each_name return to_enum(__method__) unless block_given? self.class.each_name{|name|yield name} self end alias_method :each_member, :each_name alias_method :each_key, :each_name # @yield [value] # @yieldparam [Object] value - sequential under defined (see #each_name) # @yieldreturn [self] # @return [Enumerator] def each_value return to_enum(__method__) unless block_given? each_member{|member|yield self[member]} end alias_method :each, :each_value # @yield [name, value] # @yieldparam [Symbol] name (see #each_name) # @yieldparam [Object] value (see #each_value) # @yieldreturn [self] # @return [Enumerator] def each_pair return to_enum(__method__) unless block_given? each_name{|name|yield name, self[name]} end # @return [Array] def values [].tap do |r| each_value do |v| r << v end end end alias_method :to_a, :values # @param [Fixnum, Range] *keys # @return [Array] def values_at(*keys) [].tap do |r| keys.each do |key| case key when Fixnum r << self[key] when Range key.each do |n| r << self[n] end else raise TypeError end end end end # @param [Symbol, String] name def assign?(name) name = convert_cname name raise NameError unless member? name @db.has_key? name end # @param [Symbol, String] name def unassign(name) raise LockError if lock? name = convert_cname name raise NameError unless member? name @db.delete name end # @param [Symbol, String] name # @param [Object] *values - no argument and use own def sufficent?(name, value=self[name]) self.class.__send__(__method__, name, value, self) end alias_method :accept?, :sufficent? def strict? each_pair.all?{|name, value|self.class.sufficent? name, value} end # @return [self] def lock @lock = true self end def lock? @lock end def secure? lock? && self.class.lock? && strict? end def freeze @db.freeze super end private def initialize_copy(org) @db, @lock = @db.clone, false end def __get__(name) name = convert_cname name raise NameError unless member? name @db[name] end def __set__(name, value) raise LockError if lock? name = convert_cname name raise NameError unless member? name if restrict? name if accept? name, value __set__! name, value else raise ConditionError, 'deficent value for all conditions' end else __set__! name, value end end alias_method :assign, :__set__ public :assign def __set__!(name, value) raise LockError if lock? name = convert_cname name if has_flavor? name value = instance_exec value, &flavor_for(name) end @db[name] = value end def __subscript__(key) case key when Symbol, String name = convert_cname key if member? name yield name else raise NameError end when Fixnum if name = members[key] yield name else raise IndexError end else raise ArgumentError end end def unlock @lock = false self end end end modify private #__set__ (not change behavior) require_relative 'classutil' class Striuct # @author Kenichi Kamiya module Subclass extend ClassUtil include Enumerable def initialize(*values) @db, @lock = {}, false if values.size <= size values.each_with_index do |v, idx| self[idx] = v end excess = members.last(size - values.size) excess.each do |name| self[name] = default_for name if has_default? name end else raise ArgumentError, "struct size differs (max: #{size})" end end # @return [Boolean] def ==(other) if self.class.equal? other.class each_pair.all?{|k, v|v == other[k]} else false end end def eql?(other) if self.class.equal? other.class each_pair.all?{|k, v|v.eql? other[k]} else false end end # @return [Integer] def hash values.map(&:hash).hash end # @return [String] def inspect "#<#{self.class} (StrictStruct)".tap do |s| members.each_with_index do |name, idx| s << " #{idx}.#{name}: #{self[name].inspect}" end s << ">\n" end end # @return [String] def to_s "#<struct #{self.class}".tap do |s| members.each_with_index do |m, idx| s << " #{m}=#{self[m].inspect}" end s << '>' end end delegate_class_methods( :members, :keys, :has_member?, :member?, :has_key?, :key?, :length, :size, :convert_cname, :restrict?, :has_default?, :default_for, :names, :has_flavor?, :flavor_for, :has_conditions? ) private :convert_cname, :flavor_for # @param [Symbol, String, Fixnum] key def [](key) __subscript__(key){|name|__get__ name} end # @param [Symbol, String, Fixnum] key # @param [Object] value def []=(key, value) __subscript__(key){|name|__set__ name, value} end # @yield [name] # @yieldparam [Symbol] name - sequential under defined # @yieldreturn [self] # @return [Enumerator] def each_name return to_enum(__method__) unless block_given? self.class.each_name{|name|yield name} self end alias_method :each_member, :each_name alias_method :each_key, :each_name # @yield [value] # @yieldparam [Object] value - sequential under defined (see #each_name) # @yieldreturn [self] # @return [Enumerator] def each_value return to_enum(__method__) unless block_given? each_member{|member|yield self[member]} end alias_method :each, :each_value # @yield [name, value] # @yieldparam [Symbol] name (see #each_name) # @yieldparam [Object] value (see #each_value) # @yieldreturn [self] # @return [Enumerator] def each_pair return to_enum(__method__) unless block_given? each_name{|name|yield name, self[name]} end # @return [Array] def values [].tap do |r| each_value do |v| r << v end end end alias_method :to_a, :values # @param [Fixnum, Range] *keys # @return [Array] def values_at(*keys) [].tap do |r| keys.each do |key| case key when Fixnum r << self[key] when Range key.each do |n| r << self[n] end else raise TypeError end end end end # @param [Symbol, String] name def assign?(name) name = convert_cname name raise NameError unless member? name @db.has_key? name end # @param [Symbol, String] name def unassign(name) raise LockError if lock? name = convert_cname name raise NameError unless member? name @db.delete name end # @param [Symbol, String] name # @param [Object] *values - no argument and use own def sufficent?(name, value=self[name]) self.class.__send__(__method__, name, value, self) end alias_method :accept?, :sufficent? def strict? each_pair.all?{|name, value|self.class.sufficent? name, value} end # @return [self] def lock @lock = true self end def lock? @lock end def secure? lock? && self.class.lock? && strict? end def freeze @db.freeze super end private def initialize_copy(org) @db, @lock = @db.clone, false end def __get__(name) name = convert_cname name raise NameError unless member? name @db[name] end def __set__(name, value) raise LockError if lock? name = convert_cname name raise NameError unless member? name if accept? name, value if has_flavor? name value = instance_exec value, &flavor_for(name) end @db[name] = value else raise ConditionError, 'deficent value for all conditions' end end alias_method :assign, :__set__ public :assign def __subscript__(key) case key when Symbol, String name = convert_cname key if member? name yield name else raise NameError end when Fixnum if name = members[key] yield name else raise IndexError end else raise ArgumentError end end def unlock @lock = false self end end end
class SubjectSelector class MissingParameter < StandardError; end class MissingSubjectQueue < StandardError; end attr_reader :user, :params, :workflow def initialize(user, workflow, params, scope) @user, @workflow, @params, @scope = user, workflow, params, scope end def queued_subjects raise workflow_id_error unless workflow raise group_id_error if needs_set_id? if queue = retrieve_subject_queue selected_subjects(queue.next_subjects(default_page_size)) else raise MissingSubjectQueue.new("No queue defined for user. Building one now, please try again.") end end def selected_subjects(sms_ids, selector_context={}) subjects = @scope.joins(:set_member_subjects) .where(set_member_subjects: {id: sms_ids}) [subjects, selector_context] end private def needs_set_id? workflow.grouped && !params.has_key?(:group_id) end def workflow_id_error MissingParameter.new("workflow_id parameter missing") end def group_id_error MissingParameter.new("subject_set_id parameter missing for grouped workflow") end def default_page_size params[:page_size] ||= 10 end def retrieve_subject_queue queue = SubjectQueue.scoped_to_set(params[:subject_set_id]) .find_by(user: user.user, workflow: workflow) case when queue.nil? queue = SubjectQueue.create_for_user(workflow, user.user, set: params[:subject_set_id]) when queue.below_minimum? SubjectQueueWorker.perform_async(workflow.id, user.id) end queue end end Use subject_set_id instead of group_id Closes #810 class SubjectSelector class MissingParameter < StandardError; end class MissingSubjectQueue < StandardError; end attr_reader :user, :params, :workflow def initialize(user, workflow, params, scope) @user, @workflow, @params, @scope = user, workflow, params, scope end def queued_subjects raise workflow_id_error unless workflow raise group_id_error if needs_set_id? if queue = retrieve_subject_queue selected_subjects(queue.next_subjects(default_page_size)) else raise MissingSubjectQueue.new("No queue defined for user. Building one now, please try again.") end end def selected_subjects(sms_ids, selector_context={}) subjects = @scope.joins(:set_member_subjects) .where(set_member_subjects: {id: sms_ids}) [subjects, selector_context] end private def needs_set_id? workflow.grouped && !params.has_key?(:subject_set_id) end def workflow_id_error MissingParameter.new("workflow_id parameter missing") end def group_id_error MissingParameter.new("subject_set_id parameter missing for grouped workflow") end def default_page_size params[:page_size] ||= 10 end def retrieve_subject_queue queue = SubjectQueue.scoped_to_set(params[:subject_set_id]) .find_by(user: user.user, workflow: workflow) case when queue.nil? queue = SubjectQueue.create_for_user(workflow, user.user, set: params[:subject_set_id]) when queue.below_minimum? SubjectQueueWorker.perform_async(workflow.id, user.id) end queue end end
module Subspace VERSION = "0.4.11" end Bump version module Subspace VERSION = "0.4.13" end
# encoding: UTF-8 class SubtitleShifter module Version MAJOR = 1 MINOR = 0 PATCH = 0 BUILD = 0 STRING = [MAJOR, MINOR, PATCH, BUILD].compact.join('.') end attr_reader :subtitles, :parsed_ok TIME_SEPERATOR = '-->' def initialize(file, linebreak = "\r\n") @sub_file = file @linebreak = linebreak end def parse raw_text = File.open(@sub_file, 'r').read.force_encoding('UTF-8') raw_text.gsub!("\xEF\xBB\xBF".force_encoding("UTF-8"), '') #Remove stupid BOM that was causing me so much grief! #raw_text = IO.read @sub_file subtitle_parts = raw_text.split "#{@linebreak}#{@linebreak}" @subtitles = {} subtitle_parts.each do |subtitle| @subtitles.update extract_sub_data subtitle end # No longer needed due to removal of BOM #fix_first_index # What a hack :( @parsed_ok = true # Not very useful, but will help when error checking is added end def shift(args) first = args[:index] # used for checking first go round. index = first shift = args[:time] if shift < 0 # backward shift check time1 = @subtitles[first][:start] + shift time2 = @subtitles[first-1][:end] raise RuntimeError, 'Cannot overlap backward shift' if time2 > time1 end loop do break unless @subtitles.has_key?(index) @subtitles[index][:start] += shift @subtitles[index][:end] += shift index += 1 end end def to_s raise RuntimeError, 'File has not been parsed yet' unless @parsed_ok output = '' @subtitles.sort.map do |index, sub| start = ms_to_srt_time sub[:start] fin = ms_to_srt_time sub[:end] output += "#{index}#{@linebreak}#{start} #{TIME_SEPERATOR} #{fin}#{@linebreak}#{sub[:subtitle]}#{@linebreak}#{@linebreak}" end output.chomp end private def extract_sub_data(subtitle) s = subtitle.split @linebreak times = s[1].split " #{TIME_SEPERATOR} " {s[0].to_i => { start: srt_time_to_ms(times[0]), end: srt_time_to_ms(times[1]), subtitle: s[2..-1].join(@linebreak) } } end def srt_time_to_ms(srt_time) time_parts = parse_srt_time srt_time hours_ms = time_parts[:hours] * 60 * 60 * 1000 mins_ms = time_parts[:mins] * 60 * 1000 secs_ms = time_parts[:secs] * 1000 hours_ms + mins_ms + secs_ms + time_parts[:ms] end def ms_to_srt_time(ms) hours = (ms / (1000 * 60 *60)) % 60 minutes = (ms / (1000 * 60)) % 60 seconds = (ms / 1000) % 60 adj_ms = ms.to_s[-3..-1].to_i "%02d:%02d:%02d,%03d" % [hours, minutes, seconds, adj_ms] end def parse_srt_time (srt_time) # Time looks like: hh:mm:ss,ms # ... 10:09:08,756 /^(\d+):(\d+):(\d+),(\d+)$/ =~ srt_time {hours: $1.to_i, mins: $2.to_i, secs: $3.to_i, ms: $4.to_i } end # No longer needed due to fixing the BOM issue. But I'm leaving it in. def fix_first_index # This makes me feel *so* dirty :/ sub_arr = @subtitles.to_a idx1 = sub_arr[0][0] idx2 = sub_arr[1][0] @subtitles[idx2 - 1] = @subtitles.delete idx1 # At least I learnt this trick :) How to rename a hash key end end YARD Documentation added Project should be fully documented now. # encoding: UTF-8 #@example Manipulate subtitles from the command line # $ subtitle_shifter --help # $ subtitle_shifter --operation add --index 12 --time 2,345 source.srt dest.srt #@example Manipulate subtitles from within a ruby program # # This will shift all subtitles from index 12 onward by 2.345 seconds # # or 2345 milliseconds # subs = SubtitleShifter.new('mysubs.srt') # subs.parse # subs.shift(:index => 12, :time => 2345) #@example Shift subtitles backward # # This will shift subtitles backward, beware - you cannot shift # # subtitles backward so that they overlap the preceding subtitles. # # A RuntimeError exception will be raised if this occurs. # subs.shift(:index => 12, :time => -2345) # Simply provide a negative time value #@example Output subtitles once they've been parsed and/or manipulated # puts subs # # -- or -- # subs.to_s #@see http://en.wikipedia.org/wiki/Subrip Wikipedia's article on the SubRip format class SubtitleShifter module Version MAJOR = 1 MINOR = 1 PATCH = 0 BUILD = 0 STRING = [MAJOR, MINOR, PATCH, BUILD].compact.join('.') end # A boolean flag highlighting whether or not subtitles have been parsed yet attr_reader :parsed_ok # A hash of the parsed subtitles. You normally wouldn't need to access this directly # @example The format of the hash is as follows # {1 => {:start => 107, # :end => 5762, # :subtitle => 'This is the first subtitle' # }, # {2 => {:start => 5890, # :end => 10553, # :subtitle => 'This is the second subtitle' # } # @note I chose to implement internal representation of subtitle files as a hash # and not an array, which would've been more efficient, as subtitles cannot be # guaranteed to start at index 1 # That being said, I can already think of a way to do this using an array and offset attribute attr_reader :subtitles # The delimiter used for separating SubRip time stamps TIME_SEPERATOR = '-->' # @param [String] file A string of the file name # @param [String] linebreak A string of the linebreak pattern. def initialize(file, linebreak = "\r\n") @sub_file = file @linebreak = linebreak @parsed_ok = false end # Parses the subtitles # @note Always call only after initialising. # @note If your subtitle file is UTF-8 encoded, and has a Byte Order Mark as its first few bytes, # the BOM will not be preserved when outputing the parsed and shifted subtitles. You probably don't # need it anyway # @example # sub.parse if sub.parsed_ok # @see http://en.wikipedia.org/wiki/UTF-8#Byte_order_mark Wikipedia's article on UTF-8 and BOM def parse raw_text = File.open(@sub_file, 'r').read.force_encoding('UTF-8') raw_text.gsub!("\xEF\xBB\xBF".force_encoding("UTF-8"), '') #Remove stupid BOM that was causing me so much grief! #raw_text = IO.read @sub_file subtitle_parts = raw_text.split "#{@linebreak}#{@linebreak}" @subtitles = {} subtitle_parts.each do |subtitle| @subtitles.update extract_sub_data subtitle end # No longer needed due to removal of BOM #fix_first_index # What a hack :( @parsed_ok = true # Not very useful, but will help when error checking is added end # Shifts subtitles forward (or backward) by a number of ms from an index # @param [Integer] :index The index of the subtitle # @param [Integer] :time The time (in ms) by which you wish to shift the subtitles. A negative value will shift backwards. # @example # sub.shift(:index => 42, :time => 10000) # Shift subs from index 42 onwards by 10 seconds. # @raise [RuntimeError] Raises this exception when shifting backwards if index and index-1 time's overlap def shift(args) first = args[:index] # used for checking first go round. index = first shift = args[:time] if shift < 0 # backward shift check time1 = @subtitles[first][:start] + shift time2 = @subtitles[first-1][:end] raise RuntimeError, 'Cannot overlap backward shift' if time2 > time1 end loop do break unless @subtitles.has_key?(index) @subtitles[index][:start] += shift @subtitles[index][:end] += shift index += 1 end end # Outputs parsed subtitles # @raise [RuntimeError] Will raise this exception if an attempt is made to output the subs before parsing has taken place # @see SubtitleShifter#parsed_ok def to_s raise RuntimeError, 'File has not been parsed yet' unless @parsed_ok output = '' @subtitles.sort.map do |index, sub| start = ms_to_srt_time sub[:start] fin = ms_to_srt_time sub[:end] output += "#{index}#{@linebreak}#{start} #{TIME_SEPERATOR} #{fin}#{@linebreak}#{sub[:subtitle]}#{@linebreak}#{@linebreak}" end output.chomp end private def extract_sub_data(subtitle) s = subtitle.split @linebreak times = s[1].split " #{TIME_SEPERATOR} " {s[0].to_i => { start: srt_time_to_ms(times[0]), end: srt_time_to_ms(times[1]), subtitle: s[2..-1].join(@linebreak) } } end def srt_time_to_ms(srt_time) time_parts = parse_srt_time srt_time hours_ms = time_parts[:hours] * 60 * 60 * 1000 mins_ms = time_parts[:mins] * 60 * 1000 secs_ms = time_parts[:secs] * 1000 hours_ms + mins_ms + secs_ms + time_parts[:ms] end def ms_to_srt_time(ms) hours = (ms / (1000 * 60 *60)) % 60 minutes = (ms / (1000 * 60)) % 60 seconds = (ms / 1000) % 60 adj_ms = ms.to_s[-3..-1].to_i "%02d:%02d:%02d,%03d" % [hours, minutes, seconds, adj_ms] end def parse_srt_time (srt_time) # Time looks like: hh:mm:ss,ms # ... 10:09:08,756 /^(\d+):(\d+):(\d+),(\d+)$/ =~ srt_time {hours: $1.to_i, mins: $2.to_i, secs: $3.to_i, ms: $4.to_i } end # No longer needed due to fixing the BOM issue. But I'm leaving it in. def fix_first_index # This makes me feel *so* dirty :/ sub_arr = @subtitles.to_a idx1 = sub_arr[0][0] idx2 = sub_arr[1][0] @subtitles[idx2 - 1] = @subtitles.delete idx1 # At least I learnt this trick :) How to rename a hash key end end
# This class hold an individual connection to a SugarCRM server. # There can be several such simultaneous connections module SugarCRM; class Session attr_reader :config, :connection, :namespace attr_accessor :modules def initialize(url=nil, user=nil, pass=nil, opts={}) options = { :debug => false, :register_modules => true }.merge(opts) @modules = [] @namespace = "Namespace#{SugarCRM.sessions.size}" @config = { :base_url => url, :username => user, :password => pass } unless connection_info_loaded? # see README for reasoning behind the priorization config_file_paths.each{|path| load_config path if File.exists? path } end raise MissingCredentials, "Missing login credentials. Make sure you provide the SugarCRM URL, username, and password" unless connection_info_loaded? @connection = SugarCRM::Connection.new(url, user, pass, opts) @connection.session_instance = self extensions_folder = File.join(File.dirname(__FILE__), 'extensions') # Create a new module to have a separate namespace in which to register the SugarCRM modules. # This will prevent issues with modules from separate SugarCRM instances colliding within the same namespace # (e.g. 2 SugarCRM instances where only one has custom fields on the Account module) namespace_module = Object::Module.new do @session = nil def self.session @session end def self.session=(sess) @session = sess end def self.current_user SugarCRM.const_get(@session.namespace)::User.find_by_user_name(@session.config[:username]) end end # set the session: will be needed in SugarCRM::Base to call the API methods on the correct session namespace_module.session = self SugarCRM.const_set(@namespace, namespace_module) Module.register_all(self) if options[:register_modules] SugarCRM.sessions << self end # create a new session from the credentials present in a file def self.new_from_file(path, opts={}) config = load_and_parse_config(path) begin self.new(config[:base_url], config[:username], config[:password], opts) rescue MissingCredentials => e return false end end # load all the monkey patch extension files in the provided folder def extensions_folder=(folder, dirstring=nil) self.class.validate_path folder path = File.expand_path(folder, dirstring) Dir[File.join(path, '**', '*.rb').to_s].each { |f| load(f) } end # load credentials from file, and (re)connect to SugarCRM def load_config(path) @config = self.class.load_and_parse_config(path) @connection = SugarCRM::Connection.new(@config[:base_url], @config[:username], @config[:password]) if connection_info_loaded? @config end def update_config(params) params.each{|k,v| @config[k.to_sym] = v } @config end # lazy load the SugarCRM version we're connecting to def sugar_version @version ||= @connection.get_server_info["version"] end private def self.load_and_parse_config(path) validate_path path hash = {} config = YAML.load_file(path) if config && config["config"] config["config"].each{|k,v| hash[k.to_sym] = v } end hash end def self.validate_path(path) raise "Invalid path: #{path}" unless File.exists? path end def config_file_paths # see README for reasoning behind the priorization paths = ['/etc/sugarcrm.yaml', File.expand_path('~/.sugarcrm.yaml'), File.join(File.dirname(__FILE__), 'config', 'sugarcrm.yaml')] paths.insert(1, File.join(ENV['USERPROFILE'], 'sugarcrm.yaml')) if ENV['USERPROFILE'] paths end def connection_info_loaded? @config[:base_url] && @config[:username] && @config[:password] end end; end add Session#namespace_const virtual attribute # This class hold an individual connection to a SugarCRM server. # There can be several such simultaneous connections module SugarCRM; class Session attr_reader :config, :connection, :namespace attr_accessor :modules def initialize(url=nil, user=nil, pass=nil, opts={}) options = { :debug => false, :register_modules => true }.merge(opts) @modules = [] @namespace = "Namespace#{SugarCRM.sessions.size}" @config = { :base_url => url, :username => user, :password => pass } unless connection_info_loaded? # see README for reasoning behind the priorization config_file_paths.each{|path| load_config path if File.exists? path } end raise MissingCredentials, "Missing login credentials. Make sure you provide the SugarCRM URL, username, and password" unless connection_info_loaded? @connection = SugarCRM::Connection.new(url, user, pass, opts) @connection.session_instance = self extensions_folder = File.join(File.dirname(__FILE__), 'extensions') # Create a new module to have a separate namespace in which to register the SugarCRM modules. # This will prevent issues with modules from separate SugarCRM instances colliding within the same namespace # (e.g. 2 SugarCRM instances where only one has custom fields on the Account module) namespace_module = Object::Module.new do @session = nil def self.session @session end def self.session=(sess) @session = sess end def self.current_user SugarCRM.const_get(@session.namespace)::User.find_by_user_name(@session.config[:username]) end end # set the session: will be needed in SugarCRM::Base to call the API methods on the correct session namespace_module.session = self SugarCRM.const_set(@namespace, namespace_module) Module.register_all(self) if options[:register_modules] SugarCRM.sessions << self end # create a new session from the credentials present in a file def self.new_from_file(path, opts={}) config = load_and_parse_config(path) begin self.new(config[:base_url], config[:username], config[:password], opts) rescue MissingCredentials => e return false end end def namespace_const SugarCRM.const_get(@namespace) end # load all the monkey patch extension files in the provided folder def extensions_folder=(folder, dirstring=nil) self.class.validate_path folder path = File.expand_path(folder, dirstring) Dir[File.join(path, '**', '*.rb').to_s].each { |f| load(f) } end # load credentials from file, and (re)connect to SugarCRM def load_config(path) @config = self.class.load_and_parse_config(path) @connection = SugarCRM::Connection.new(@config[:base_url], @config[:username], @config[:password]) if connection_info_loaded? @config end def update_config(params) params.each{|k,v| @config[k.to_sym] = v } @config end # lazy load the SugarCRM version we're connecting to def sugar_version @version ||= @connection.get_server_info["version"] end private def self.load_and_parse_config(path) validate_path path hash = {} config = YAML.load_file(path) if config && config["config"] config["config"].each{|k,v| hash[k.to_sym] = v } end hash end def self.validate_path(path) raise "Invalid path: #{path}" unless File.exists? path end def config_file_paths # see README for reasoning behind the priorization paths = ['/etc/sugarcrm.yaml', File.expand_path('~/.sugarcrm.yaml'), File.join(File.dirname(__FILE__), 'config', 'sugarcrm.yaml')] paths.insert(1, File.join(ENV['USERPROFILE'], 'sugarcrm.yaml')) if ENV['USERPROFILE'] paths end def connection_info_loaded? @config[:base_url] && @config[:username] && @config[:password] end end; end
# Copyright (c) 2010-2012 The University of Manchester, UK. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the names of The University of Manchester nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Author: Robert Haines require 'base64' require 'uri' module T2Server # An interface for directly communicating with one or more Taverna 2 Server # instances. class Server include XML::Methods # The version of the remote Taverna Server instance. attr_reader :version # :stopdoc: XPaths = { # Server top-level XPath queries :server => XML::Methods.xpath_compile("//nsr:serverDescription"), :policy => XML::Methods.xpath_compile("//nsr:policy"), :run => XML::Methods.xpath_compile("//nsr:run"), :runs => XML::Methods.xpath_compile("//nsr:runs"), # Server policy XPath queries :runlimit => XML::Methods.xpath_compile("//nsr:runLimit"), :permwkf => XML::Methods.xpath_compile("//nsr:permittedWorkflows"), :permlstn => XML::Methods.xpath_compile("//nsr:permittedListeners"), :permlstt => XML::Methods.xpath_compile("//nsr:permittedListenerTypes"), :notify => XML::Methods.xpath_compile("//nsr:enabledNotificationFabrics") } # :startdoc: # :call-seq: # new(uri, connection_parameters = nil) -> Server # new(uri, connection_parameters = nil) {|self| ...} # # Create a new Server instance that represents the real server at _uri_. # If _connection_parameters_ are supplied they will be used to set up the # network connection to the server. # # It will _yield_ itself if a block is given. def initialize(uri, params = nil) # we want to use URIs here but strings can be passed in unless uri.is_a? URI uri = URI.parse(Util.strip_path_slashes(uri)) end # strip username and password from the URI if present if uri.user != nil uri = URI::HTTP.new(uri.scheme, nil, uri.host, uri.port, nil, uri.path, nil, nil, nil); end # setup connection @connection = ConnectionFactory.connect(uri, params) # add a slash to the end of this address to work around this bug: # http://www.mygrid.org.uk/dev/issues/browse/TAVSERV-113 server_description = xml_document(get_attribute("#{uri.path}/rest/", "application/xml")) @version = get_version(server_description) @links = get_description(server_description) @links[:admin] = "#{uri.path}/admin" # initialize run object cache @runs = {} yield(self) if block_given? end # :stopdoc: def Server.connect(uri, username="", password="") warn "[DEPRECATION] 'Server#connect' is deprecated and will be " + "removed in 1.0." new(uri) end # :startdoc: # :call-seq: # administrator(credentials = nil) -> Administrator # administrator(credentials = nil) {|admin| ...} # # Return an instance of the Taverna Server administrator interface. This # method will _yield_ the newly created administrator if a block is given. def administrator(credentials = nil) admin = Administrator.new(self, credentials) yield(admin) if block_given? admin end # :call-seq: # create_run(workflow, credentials = nil) -> run # create_run(workflow, credentials = nil) {|run| ...} # # Create a run on this server using the specified _workflow_. # This method will _yield_ the newly created Run if a block is given. def create_run(workflow, credentials = nil) id = initialize_run(workflow, credentials) run = Run.create(self, "", credentials, id) # cache newly created run object - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] @runs[user][id] = run yield(run) if block_given? run end # :call-seq: # initialize_run(workflow, credentials = nil) -> string # # Create a run on this server using the specified _workflow_ but do not # return it as a Run instance. Return its identifier instead. def initialize_run(workflow, credentials = nil) # set up the run object cache - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] @connection.POST_run("#{@links[:runs]}", XML::Fragments::WORKFLOW % workflow, credentials) end # :call-seq: # uri -> URI # # The URI of the connection to the remote Taverna Server. def uri @connection.uri end # :call-seq: # run_limit(credentials = nil) -> num # # The maximum number of runs that this server will allow at any one time. # Runs in any state (+Initialized+, +Running+ and +Finished+) are counted # against this maximum. def run_limit(credentials = nil) get_attribute(@links[:runlimit], "text/plain", credentials).to_i end # :call-seq: # runs(credentials = nil) -> [runs] # # Return the set of runs on this server. def runs(credentials = nil) get_runs(credentials).values end # :call-seq: # run(identifier, credentials = nil) -> run # # Return the specified run. def run(identifier, credentials = nil) get_runs(credentials)[identifier] end # :call-seq: # delete_run(run, credentials = nil) -> bool # # Delete the specified run from the server, discarding all of its state. # _run_ can be either a Run instance or a identifier. def delete_run(run, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end if delete_attribute("#{@links[:runs]}/#{run}", credentials) # delete cached run object - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user].delete(run) true end end # :call-seq: # delete_all_runs(credentials = nil) # # Delete all runs on this server, discarding all of their state. def delete_all_runs(credentials = nil) # first refresh run list runs(credentials).each {|run| run.delete} end # :stopdoc: def set_run_input(run, input, value, credentials = nil) warn "[DEPRECATION] 'Server#set_run_input' is deprecated and will be " + "removed in 1.0. Input ports are set directly instead. The most " + "direct replacement for this method is: " + "'Run#input_port(input).value = value'" # get the run from the identifier if that is what is passed in if not run.instance_of? Run run = run(run, credentials) end run.input_port(input).value = value end def set_run_input_file(run, input, filename, credentials = nil) warn "[DEPRECATION] 'Server#set_run_input_file' is deprecated and " + "will be removed in 1.0. Input ports are set directly instead. The " + "most direct replacement for this method is: " + "'Run#input_port(input).remote_file = filename'" # get the run from the identifier if that is what is passed in if not run.instance_of? Run run = run(run, credentials) end run.input_port(input).remote_file = filename end def create_dir(run, root, dir, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end raise AccessForbiddenError.new("subdirectories (#{dir})") if dir.include? ?/ @connection.POST_dir("#{@links[:runs]}/#{run}/#{root}", XML::Fragments::MKDIR % dir, run, dir, credentials) end def make_run_dir(run, root, dir, credentials = nil) warn "[DEPRECATION] 'Server#make_run_dir' is deprecated and will be " + "removed in 1.0. Please use 'Run#mkdir' instead." create_dir(run, root, dir, credentials) end def upload_file(run, filename, location, rename, credentials = nil) contents = IO.read(filename) rename = filename.split('/')[-1] if rename == "" if upload_data(run, contents, rename, location, credentials) rename end end def upload_data(run, data, remote_name, location, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end contents = Base64.encode64(data) @connection.POST_file("#{@links[:runs]}/#{run}/#{location}", XML::Fragments::UPLOAD % [remote_name, contents], run, credentials) end def upload_run_file(run, filename, location, rename, credentials = nil) warn "[DEPRECATION] 'Server#upload_run_file' is deprecated and will " + "be removed in 1.0. Please use 'Run#upload_file' or " + "'Run#input_port(input).file = filename' instead." upload_file(run, filename, location, rename, credentials) end def create_run_attribute(run, path, value, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end create_attribute("#{@links[:runs]}/#{run}/#{path}", value, type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def get_run_attribute(run, path, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end get_attribute("#{@links[:runs]}/#{run}/#{path}", type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def set_run_attribute(run, path, value, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end set_attribute("#{@links[:runs]}/#{run}/#{path}", value, type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def delete_run_attribute(run, path, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end delete_attribute("#{@links[:runs]}/#{run}/#{path}", credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def download_run_file(run, path, range, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end get_attribute("#{@links[:runs]}/#{run}/#{path}", "application/octet-stream", range, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def get_admin_attribute(path, credentials = nil) get_attribute("#{@links[:admin]}/#{path}", "*/*", credentials) end def set_admin_attribute(path, value, credentials = nil) set_attribute("#{@links[:admin]}/#{path}", value, "text/plain", credentials) end def admin_resource_writable?(path, credentials = nil) headers = @connection.OPTIONS("#{@links[:admin]}/#{path}", credentials) headers["allow"][0].split(",").include? "PUT" end # :startdoc: private def create_attribute(path, value, type, credentials = nil) @connection.POST(path, value, type, credentials) end def get_attribute(path, type, *rest) credentials = nil range = nil rest.each do |param| case param when HttpCredentials credentials = param when Range range = param when Array range = param[0]..param[1] end end begin @connection.GET(path, type, range, credentials) rescue ConnectionRedirectError => cre @connection = cre.redirect retry end end def set_attribute(path, value, type, credentials = nil) @connection.PUT(path, value, type, credentials) end def delete_attribute(path, credentials = nil) @connection.DELETE(path, credentials) end def get_version(doc) version = xpath_attr(doc, XPaths[:server], "serverVersion") if version == nil raise RuntimeError.new("Taverna Servers prior to version 2.3 " + "are no longer supported.") else return version.to_f end end def get_description(doc) links = {} links[:runs] = URI.parse(xpath_attr(doc, XPaths[:runs], "href")).path links[:policy] = URI.parse(xpath_attr(doc, XPaths[:policy], "href")).path doc = xml_document(get_attribute(links[:policy], "application/xml")) links[:permlisteners] = URI.parse(xpath_attr(doc, XPaths[:permlstt], "href")).path links[:notifications] = URI.parse(xpath_attr(doc, XPaths[:notify], "href")).path links[:runlimit] = URI.parse(xpath_attr(doc, XPaths[:runlimit], "href")).path links[:permworkflows] = URI.parse(xpath_attr(doc, XPaths[:permwkf], "href")).path links end def get_runs(credentials = nil) run_list = get_attribute("#{@links[:runs]}", "application/xml", credentials) doc = xml_document(run_list) # get list of run identifiers ids = [] xpath_find(doc, XPaths[:run]).each do |run| ids << xml_node_attribute(run, "href").split('/')[-1] end # cache run objects - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] # add new runs ids.each do |id| if !@runs[user].has_key? id @runs[user][id] = Run.create(self, "", credentials, id) end end # clear out the expired runs if @runs[user].length > ids.length @runs[user].delete_if {|key, val| !ids.member? key} end @runs[user] end end end Fix Run caching deletion issue in Server. When a Run is deleted then its cached object must be deleted, but only if it has been cached! # Copyright (c) 2010-2012 The University of Manchester, UK. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the names of The University of Manchester nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Author: Robert Haines require 'base64' require 'uri' module T2Server # An interface for directly communicating with one or more Taverna 2 Server # instances. class Server include XML::Methods # The version of the remote Taverna Server instance. attr_reader :version # :stopdoc: XPaths = { # Server top-level XPath queries :server => XML::Methods.xpath_compile("//nsr:serverDescription"), :policy => XML::Methods.xpath_compile("//nsr:policy"), :run => XML::Methods.xpath_compile("//nsr:run"), :runs => XML::Methods.xpath_compile("//nsr:runs"), # Server policy XPath queries :runlimit => XML::Methods.xpath_compile("//nsr:runLimit"), :permwkf => XML::Methods.xpath_compile("//nsr:permittedWorkflows"), :permlstn => XML::Methods.xpath_compile("//nsr:permittedListeners"), :permlstt => XML::Methods.xpath_compile("//nsr:permittedListenerTypes"), :notify => XML::Methods.xpath_compile("//nsr:enabledNotificationFabrics") } # :startdoc: # :call-seq: # new(uri, connection_parameters = nil) -> Server # new(uri, connection_parameters = nil) {|self| ...} # # Create a new Server instance that represents the real server at _uri_. # If _connection_parameters_ are supplied they will be used to set up the # network connection to the server. # # It will _yield_ itself if a block is given. def initialize(uri, params = nil) # we want to use URIs here but strings can be passed in unless uri.is_a? URI uri = URI.parse(Util.strip_path_slashes(uri)) end # strip username and password from the URI if present if uri.user != nil uri = URI::HTTP.new(uri.scheme, nil, uri.host, uri.port, nil, uri.path, nil, nil, nil); end # setup connection @connection = ConnectionFactory.connect(uri, params) # add a slash to the end of this address to work around this bug: # http://www.mygrid.org.uk/dev/issues/browse/TAVSERV-113 server_description = xml_document(get_attribute("#{uri.path}/rest/", "application/xml")) @version = get_version(server_description) @links = get_description(server_description) @links[:admin] = "#{uri.path}/admin" # initialize run object cache @runs = {} yield(self) if block_given? end # :stopdoc: def Server.connect(uri, username="", password="") warn "[DEPRECATION] 'Server#connect' is deprecated and will be " + "removed in 1.0." new(uri) end # :startdoc: # :call-seq: # administrator(credentials = nil) -> Administrator # administrator(credentials = nil) {|admin| ...} # # Return an instance of the Taverna Server administrator interface. This # method will _yield_ the newly created administrator if a block is given. def administrator(credentials = nil) admin = Administrator.new(self, credentials) yield(admin) if block_given? admin end # :call-seq: # create_run(workflow, credentials = nil) -> run # create_run(workflow, credentials = nil) {|run| ...} # # Create a run on this server using the specified _workflow_. # This method will _yield_ the newly created Run if a block is given. def create_run(workflow, credentials = nil) id = initialize_run(workflow, credentials) run = Run.create(self, "", credentials, id) # cache newly created run object - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] @runs[user][id] = run yield(run) if block_given? run end # :call-seq: # initialize_run(workflow, credentials = nil) -> string # # Create a run on this server using the specified _workflow_ but do not # return it as a Run instance. Return its identifier instead. def initialize_run(workflow, credentials = nil) # set up the run object cache - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] @connection.POST_run("#{@links[:runs]}", XML::Fragments::WORKFLOW % workflow, credentials) end # :call-seq: # uri -> URI # # The URI of the connection to the remote Taverna Server. def uri @connection.uri end # :call-seq: # run_limit(credentials = nil) -> num # # The maximum number of runs that this server will allow at any one time. # Runs in any state (+Initialized+, +Running+ and +Finished+) are counted # against this maximum. def run_limit(credentials = nil) get_attribute(@links[:runlimit], "text/plain", credentials).to_i end # :call-seq: # runs(credentials = nil) -> [runs] # # Return the set of runs on this server. def runs(credentials = nil) get_runs(credentials).values end # :call-seq: # run(identifier, credentials = nil) -> run # # Return the specified run. def run(identifier, credentials = nil) get_runs(credentials)[identifier] end # :call-seq: # delete_run(run, credentials = nil) -> bool # # Delete the specified run from the server, discarding all of its state. # _run_ can be either a Run instance or a identifier. def delete_run(run, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end if delete_attribute("#{@links[:runs]}/#{run}", credentials) # delete cached run object - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user].delete(run) if @runs[user] true end end # :call-seq: # delete_all_runs(credentials = nil) # # Delete all runs on this server, discarding all of their state. def delete_all_runs(credentials = nil) # first refresh run list runs(credentials).each {|run| run.delete} end # :stopdoc: def set_run_input(run, input, value, credentials = nil) warn "[DEPRECATION] 'Server#set_run_input' is deprecated and will be " + "removed in 1.0. Input ports are set directly instead. The most " + "direct replacement for this method is: " + "'Run#input_port(input).value = value'" # get the run from the identifier if that is what is passed in if not run.instance_of? Run run = run(run, credentials) end run.input_port(input).value = value end def set_run_input_file(run, input, filename, credentials = nil) warn "[DEPRECATION] 'Server#set_run_input_file' is deprecated and " + "will be removed in 1.0. Input ports are set directly instead. The " + "most direct replacement for this method is: " + "'Run#input_port(input).remote_file = filename'" # get the run from the identifier if that is what is passed in if not run.instance_of? Run run = run(run, credentials) end run.input_port(input).remote_file = filename end def create_dir(run, root, dir, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end raise AccessForbiddenError.new("subdirectories (#{dir})") if dir.include? ?/ @connection.POST_dir("#{@links[:runs]}/#{run}/#{root}", XML::Fragments::MKDIR % dir, run, dir, credentials) end def make_run_dir(run, root, dir, credentials = nil) warn "[DEPRECATION] 'Server#make_run_dir' is deprecated and will be " + "removed in 1.0. Please use 'Run#mkdir' instead." create_dir(run, root, dir, credentials) end def upload_file(run, filename, location, rename, credentials = nil) contents = IO.read(filename) rename = filename.split('/')[-1] if rename == "" if upload_data(run, contents, rename, location, credentials) rename end end def upload_data(run, data, remote_name, location, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end contents = Base64.encode64(data) @connection.POST_file("#{@links[:runs]}/#{run}/#{location}", XML::Fragments::UPLOAD % [remote_name, contents], run, credentials) end def upload_run_file(run, filename, location, rename, credentials = nil) warn "[DEPRECATION] 'Server#upload_run_file' is deprecated and will " + "be removed in 1.0. Please use 'Run#upload_file' or " + "'Run#input_port(input).file = filename' instead." upload_file(run, filename, location, rename, credentials) end def create_run_attribute(run, path, value, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end create_attribute("#{@links[:runs]}/#{run}/#{path}", value, type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def get_run_attribute(run, path, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end get_attribute("#{@links[:runs]}/#{run}/#{path}", type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def set_run_attribute(run, path, value, type, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end set_attribute("#{@links[:runs]}/#{run}/#{path}", value, type, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def delete_run_attribute(run, path, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end delete_attribute("#{@links[:runs]}/#{run}/#{path}", credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def download_run_file(run, path, range, credentials = nil) # get the identifier from the run if that is what is passed in if run.instance_of? Run run = run.identifier end get_attribute("#{@links[:runs]}/#{run}/#{path}", "application/octet-stream", range, credentials) rescue AttributeNotFoundError => e if get_runs(credentials).has_key? run raise e else raise RunNotFoundError.new(run) end end def get_admin_attribute(path, credentials = nil) get_attribute("#{@links[:admin]}/#{path}", "*/*", credentials) end def set_admin_attribute(path, value, credentials = nil) set_attribute("#{@links[:admin]}/#{path}", value, "text/plain", credentials) end def admin_resource_writable?(path, credentials = nil) headers = @connection.OPTIONS("#{@links[:admin]}/#{path}", credentials) headers["allow"][0].split(",").include? "PUT" end # :startdoc: private def create_attribute(path, value, type, credentials = nil) @connection.POST(path, value, type, credentials) end def get_attribute(path, type, *rest) credentials = nil range = nil rest.each do |param| case param when HttpCredentials credentials = param when Range range = param when Array range = param[0]..param[1] end end begin @connection.GET(path, type, range, credentials) rescue ConnectionRedirectError => cre @connection = cre.redirect retry end end def set_attribute(path, value, type, credentials = nil) @connection.PUT(path, value, type, credentials) end def delete_attribute(path, credentials = nil) @connection.DELETE(path, credentials) end def get_version(doc) version = xpath_attr(doc, XPaths[:server], "serverVersion") if version == nil raise RuntimeError.new("Taverna Servers prior to version 2.3 " + "are no longer supported.") else return version.to_f end end def get_description(doc) links = {} links[:runs] = URI.parse(xpath_attr(doc, XPaths[:runs], "href")).path links[:policy] = URI.parse(xpath_attr(doc, XPaths[:policy], "href")).path doc = xml_document(get_attribute(links[:policy], "application/xml")) links[:permlisteners] = URI.parse(xpath_attr(doc, XPaths[:permlstt], "href")).path links[:notifications] = URI.parse(xpath_attr(doc, XPaths[:notify], "href")).path links[:runlimit] = URI.parse(xpath_attr(doc, XPaths[:runlimit], "href")).path links[:permworkflows] = URI.parse(xpath_attr(doc, XPaths[:permwkf], "href")).path links end def get_runs(credentials = nil) run_list = get_attribute("#{@links[:runs]}", "application/xml", credentials) doc = xml_document(run_list) # get list of run identifiers ids = [] xpath_find(doc, XPaths[:run]).each do |run| ids << xml_node_attribute(run, "href").split('/')[-1] end # cache run objects - this must be done per user user = credentials.nil? ? :all : credentials.username @runs[user] = {} unless @runs[user] # add new runs ids.each do |id| if !@runs[user].has_key? id @runs[user][id] = Run.create(self, "", credentials, id) end end # clear out the expired runs if @runs[user].length > ids.length @runs[user].delete_if {|key, val| !ids.member? key} end @runs[user] end end end
module Talkable VERSION = "1.0.4" end Bump version to 1.0.5 module Talkable VERSION = "1.0.5" end
require 'cucumber/rake/task' namespace :cucumber do ::Cucumber::Rake::Task.new({ ok: 'test:prepare' }, 'Run features that should pass') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'default' end ::Cucumber::Rake::Task.new({ wip: 'test:prepare' }, 'Run features that are being worked on') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'wip' end ::Cucumber::Rake::Task.new({ rerun: 'test:prepare' }, 'Record failing features and run only them if any exist') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'rerun' end desc 'Run all features' task :all => [:ok, :wip] end desc 'Alias for cucumber:ok' task cucumber: 'cucumber:ok' Don't define Cucumber tasks in production mode Cucumber is only a member of the development and test groups in the Gemfile begin require 'cucumber/rake/task' namespace :cucumber do ::Cucumber::Rake::Task.new({ ok: 'test:prepare' }, 'Run features that should pass') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'default' end ::Cucumber::Rake::Task.new({ wip: 'test:prepare' }, 'Run features that are being worked on') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'wip' end ::Cucumber::Rake::Task.new({ rerun: 'test:prepare' }, 'Record failing features and run only them if any exist') do |t| t.fork = true # You may get faster startup if you set this to false t.profile = 'rerun' end desc 'Run all features' task :all => [:ok, :wip] end desc 'Alias for cucumber:ok' task cucumber: 'cucumber:ok' rescue LoadError end
namespace :pictures do desc 'Find missing attachments' task :find_missing_attachments do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |picture| next if picture.image_file.attachment.present? p picture end end namespace :refresh do desc 'Refresh all metadata' task :metadata do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |picture| next unless picture.image_file.attachment.present? begin picture.image_file.blob.analyze rescue Errno::ENOENT => err p err end end end desc 'Refresh order_date attributes' task :order_date do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |p| if p.photographed_at.nil? p.update_column(:order_date, p.created_at) else p.update_column(:order_date, p.photographed_at) end end end end end Add rake task stubs for old migrations namespace :pictures do desc 'Find missing attachments' task :find_missing_attachments do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |picture| next if picture.image_file.attachment.present? p picture end end namespace :refresh do desc 'Refresh all metadata' task :metadata do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |picture| next unless picture.image_file.attachment.present? begin picture.image_file.blob.analyze rescue Errno::ENOENT => err p err end end end desc 'Refresh order_date attributes' task :order_date do require_relative '../../config/environment' if ENV['LOG'].present? ActiveRecord::Base.logger = Logger.new($stderr) end Picture.find_each do |p| if p.photographed_at.nil? p.update_column(:order_date, p.created_at) else p.update_column(:order_date, p.photographed_at) end end end [:exif_data, :dimensions].each do |name| desc '[OBSOLETE] Stub for old migration' task name do end end end end
require 'csv' require 'rest-client' namespace :populate do desc 'Export metadata' task export_metadata: :environment do metas = Metadata.all CSV.open("#{Rails.root}/lib/metadata_export.csv", 'wb') do |csv| csv << [name, display_name, description, units, datatype, user_defined] metas.each do |meta| csv << [meta.name, meta.display_name, meta.description, meta.unit.machine_name, meta.datatype, meta.user_defined] end end end desc 'Import metadata from CSV' task import_metadata: :environment do # fail 'Populating is only intended for sample data in development' unless Rails.env == 'development' puts 'deleting and importing metadata from metadata.csv' Meta.delete_all # metadata.csv = real data, metadata_test.csv = test data CSV.foreach("#{Rails.root}/lib/metadata.csv", headers: true, header_converters: :symbol) do |r| next unless r[:name] # check on units match first, don't save if it doesn't match anything if r[:unit].nil? puts "No unit specified. If no units are applicable, set unit to 'none', metadata #{r[:name]} was not saved" next else units = Unit.where(name: r[:unit]) if units.count == 0 puts "No match for unit #{r[:unit]}, metadata #{r[:name]} was not saved" next elsif !units.first.allowable puts "Unit #{r[:unit]} is not allowable, metadata #{r[:name]} was not saved" next end end # All the meta get deleted every time, but in the future we should use find_or_create_by in order # to not delete user defined data potentially. m = Meta.find_or_create_by(name: r[:name]) m.name = r[:name] m.display_name = r[:display_name] m.short_name = r[:short_name] m.description = r[:description] m.unit = units.first m.datatype = r[:datatype] m.user_defined = r[:user_defined] == 'true' ? true : false m.save! end end # Import Project Haystack units desc 'import units from haystack excel file' task units: :environment do require 'roo' puts 'Deleting and reimporting units' Unit.delete_all mapping_file = Rails.root.join('lib/project_haystack_units.xlsx') puts "opening #{mapping_file}" xls = Roo::Spreadsheet.open(mapping_file.to_s) units = xls.sheet('haystack_definitions').parse row_cnt = 0 units.each do |row| row_cnt += 1 next if row_cnt <= 1 puts row.inspect unit = Unit.find_or_create_by(name: row[1]) unit.type = row[0] unit.display_name = row[2] unit.symbol = row[3] unit.symbol_alt = row[4] unless row[4].nil? unit.allowable = row[6] == 'TRUE' || row[6] == 'true' ? true : false unit.save! end # now go through the other sheet and add the "NREL mapped variables" maps = xls.sheet('nrel_units').parse row_cnt = 0 maps.each do |row| row_cnt += 1 next if row_cnt <= 1 unit = Unit.where(name: row[3]) if unit.count == 0 fail("no nrel_unit found in database for machine_name: '#{row[3]}' and map of #{row[0]}") elsif unit.count > 1 fail("found multiple machine names for: '#{row[3]}'") else unit = unit.first if unit.mapped.nil? puts "adding #{row[0]} to unit map for #{row[3]}" unit.mapped = [row[0]] else unit.mapped << row[0] unless unit.mapped.include?(row[0]) end unit.save! end end # map a special case of "" to undefined u = Unit.where(name: 'undefined').first u.mapped << '' u.save! end desc 'reset cache counters on analysis/structures relations' task reset_counters: :environment do Analysis.all.each do |a| Analysis.reset_counters(a.id, :structures) end end end Update populate.rake require 'csv' require 'rest-client' namespace :populate do desc 'Export metadata' task export_metadata: :environment do metas = Metadata.all CSV.open("#{Rails.root}/lib/metadata_export.csv", 'wb') do |csv| csv << [name, display_name, description, units, datatype, user_defined] metas.each do |meta| csv << [meta.name, meta.display_name, meta.description, meta.unit.machine_name, meta.datatype, meta.user_defined] end end end desc 'Import metadata from CSV' task import_metadata: :environment do # fail 'Populating is only intended for sample data in development' unless Rails.env == 'development' puts 'deleting and importing metadata from metadata.csv' Meta.delete_all # metadata.csv = real data, metadata_test.csv = test data CSV.foreach("#{Rails.root}/lib/metadata.csv", headers: true, header_converters: :symbol) do |r| next unless r[:name] # check on units match first, don't save if it doesn't match anything if r[:unit].nil? puts "No unit specified. If no units are applicable, set unit to 'none', metadata #{r[:name]} was not saved" next else units = Unit.where(name: r[:unit]) if units.count == 0 puts "No match for unit #{r[:unit]}, metadata #{r[:name]} was not saved" next elsif !units.first.allowable puts "Unit #{r[:unit]} is not allowable, metadata #{r[:name]} was not saved" next end end # All the meta get deleted every time, but in the future we should use find_or_create_by in order # to not delete user defined data potentially. m = Meta.find_or_create_by(name: r[:name]) m.name = r[:name] m.display_name = r[:display_name] m.short_name = r[:short_name] m.description = r[:description] m.unit = units.first m.datatype = r[:datatype] m.user_defined = r[:user_defined] == 'true' ? true : false m.save! end end # Import Project Haystack units desc 'import units from haystack excel file' task units: :environment do require 'roo' puts 'Deleting and reimporting units' Unit.delete_all mapping_file = Rails.root.join('lib/project_haystack_units.xlsx') puts "opening #{mapping_file}" xls = Roo::Spreadsheet.open(mapping_file.to_s) units = xls.sheet('haystack_definitions').parse row_cnt = 0 units.each do |row| row_cnt += 1 next if row_cnt <= 1 puts row.inspect unit = Unit.find_or_create_by(name: row[1]) unit.type = row[0] unit.display_name = row[2] unit.symbol = row[3] unit.symbol_alt = row[4] unless row[4].nil? unit.allowable = row[6].downcase == 'true' ? true : false unit.save! end # now go through the other sheet and add the "NREL mapped variables" maps = xls.sheet('nrel_units').parse row_cnt = 0 maps.each do |row| row_cnt += 1 next if row_cnt <= 1 unit = Unit.where(name: row[3]) if unit.count == 0 fail("no nrel_unit found in database for machine_name: '#{row[3]}' and map of #{row[0]}") elsif unit.count > 1 fail("found multiple machine names for: '#{row[3]}'") else unit = unit.first if unit.mapped.nil? puts "adding #{row[0]} to unit map for #{row[3]}" unit.mapped = [row[0]] else unit.mapped << row[0] unless unit.mapped.include?(row[0]) end unit.save! end end # map a special case of "" to undefined u = Unit.where(name: 'undefined').first u.mapped << '' u.save! end desc 'reset cache counters on analysis/structures relations' task reset_counters: :environment do Analysis.all.each do |a| Analysis.reset_counters(a.id, :structures) end end end
require 'rubygems' require 'rake' require 'active_record/fixtures' require 'uuidtools' require 'colorize' namespace :seek_dev do desc 'A simple task for quickly setting up a project and institution, and assigned the first user to it. This is useful for quickly setting up the database when testing. Need to create a default user before running this task' task(:initial_membership=>:environment) do p=Person.first raise Exception.new "Need to register a person first" if p.nil? || p.user.nil? User.with_current_user p.user do project=Project.new :title=>"Project X" institution=Institution.new :title=>"The Institute" project.save! institution.projects << project institution.save! p.update_attributes({"work_group_ids"=>["#{project.work_groups.first.id}"]}) end end desc 'finds duplicate create activity records for the same item' task(:duplicate_activity_creates=>:environment) do duplicates = ActivityLog.duplicates("create") if duplicates.length>0 puts "Found #{duplicates.length} duplicated entries:" duplicates.each do |duplicate| matches = ActivityLog.where({:activity_loggable_id=>duplicate.activity_loggable_id,:activity_loggable_type=>duplicate.activity_loggable_type,:action=>"create"},:order=>"created_at ASC") puts "ID:#{duplicate.id}\tLoggable ID:#{duplicate.activity_loggable_id}\tLoggable Type:#{duplicate.activity_loggable_type}\tCount:#{matches.count}\tCreated ats:#{matches.collect{|m| m.created_at}.join(", ")}" end else puts "No duplicates found" end end desc 'create 50 randomly named unlinked projects' task(:random_projects=>:environment) do (0...50).to_a.each do title=("A".."Z").to_a[rand(26)]+" #{UUIDTools::UUID.random_create.to_s}" p=Project.create :title=>title p.save! end end desc "Lists all publicly available assets" task :list_public_assets => :environment do [Investigation, Study, Assay, DataFile, Model, Sop, Publication].each do |assets| # :logout assets.all.each do |asset| if asset.can_view? puts "#{asset.title} - #{asset.id}" end end end end task(:refresh_content_types => :environment) do ContentBlob.all.each do |cb| filename = cb.original_filename unless filename.nil? file_format = filename.split('.').last.try(:strip) possible_mime_types = cb.mime_types_for_extension file_format type = possible_mime_types.sort.first || "application/octet-stream" type = type.gsub("image/jpg","image/jpeg") unless type.nil? cb.content_type = type cb.save end end end desc "display contributor types" task(:contributor_types=>:environment) do types = Seek::Util.user_creatable_types.collect do |type| type.all.collect do |thing| if thing.respond_to?(:contributor) if !thing.contributor.nil? "#{type.name} - #{thing.contributor.class.name}" end else pp "No contributor for #{type}" nil end end.flatten.compact.uniq end.flatten.uniq pp types end desc "display user contributors without people" task(:contributors_without_people=>:environment) do matches = Seek::Util.user_creatable_types.collect do |type| type.all.select do |thing| thing.respond_to?(:contributor_type) && thing.contributor.is_a?(User) && thing.contributor.person.nil? end end.flatten pp "#{matches.size} items found with a user contributor and no person" matches.each do |match| pp "\t#{match.class.name} - #{match.id}" end end desc "Generate an XMI db/schema.xml file describing the current DB as seen by AR. Produces XMI 1.1 for UML 1.3 Rose Extended, viewable e.g. by StarUML" task :xmi => :environment do require 'uml_dumper.rb' File.open("doc/data_models/schema.xmi", "w") do |file| ActiveRecord::UmlDumper.dump(ActiveRecord::Base.connection, file) end puts "Done. Schema XMI created as doc/data_models/schema.xmi." end desc 'removes any data this is not authorized to viewed by the first User' task(:remove_private_data=>:environment) do sops =Sop.find(:all) private_sops=sops.select { |s| !s.can_view? User.first } puts "#{private_sops.size} private Sops being removed" private_sops.each { |s| s.destroy } models =Model.find(:all) private_models=models.select { |m| ! m.can_view? User.first } puts "#{private_models.size} private Models being removed" private_models.each { |m| m.destroy } data =DataFile.find(:all) private_data=data.select { |d| !d.can_view? User.first } puts "#{private_data.size} private Data files being removed" private_data.each { |d| d.destroy } end desc "Dumps help documents and attachments/images" task :dump_help_docs => :environment do format_class = "YamlDb::Helper" dir = 'help_dump_tmp' #Clear path puts "Clearing existing backup directories" FileUtils.rm_r('config/default_data/help', :force => true) FileUtils.rm_r('config/default_data/help_images', :force => true) FileUtils.rm_r('db/help_dump_tmp/', :force => true) #Dump DB puts "Dumping database" SerializationHelper::Base.new(format_class.constantize).dump_to_dir dump_dir("/#{dir}") #Copy relevant yaml files puts "Copying files" FileUtils.mkdir('config/default_data/help') rescue () FileUtils.copy('db/help_dump_tmp/help_documents.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/help_attachments.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/help_images.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/db_files.yml', 'config/default_data/help/') #Delete everything else puts "Cleaning up" FileUtils.rm_r('db/help_dump_tmp/') #Copy image folder puts "Copying images" FileUtils.mkdir('public/help_images') rescue () FileUtils.cp_r('public/help_images', 'config/default_data/') rescue () end desc "Dumps current compounds and synoymns to a yaml file for the seed process" task :dump_compounds_and_synonyms => :environment do format_class = "YamlDb::Helper" dir = 'compound_dump_tmp' puts "Dumping database" SerializationHelper::Base.new(format_class.constantize).dump_to_dir dump_dir("/#{dir}") puts "Copying compound and synonym files" FileUtils.copy("db/#{dir}/compounds.yml", 'config/default_data/') FileUtils.copy("db/#{dir}/synonyms.yml", 'config/default_data/') puts "Cleaning up" FileUtils.rm_r("db/#{dir}/") end task :analyse_assay_types => :environment do uri_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_uri uri_hash = uri_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_uri) label_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_label label_hash = label_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_label) AssayType.all.each do |type| uri = type.term_uri label = type.title if label.nil? && uri.nil? puts "assay type #{type.id} has no label or uri".red end if uri.nil? if label_hash[label.downcase].nil? puts "URI is nil for #{type.id} and cannot be resolved from the label - label is #{label}".red else puts "URI is nil for #{type.id} but can be resolved from the label - label is #{label}".green end end unless uri.nil? if uri_hash[uri].nil? if label_hash[label.downcase].nil? if label_hash[label.gsub("_"," ").downcase].nil? puts "URI is unrecognised for #{type.id} and cannot be resolved from the label - label is #{label}".red else puts "URI is unrecognised for #{type.id} but can be resolved from the label, if underscores are replaced with spaces - label is #{label}".yellow end else puts "URI is unrecognised for #{type.id} but can be resolved from the label - label is #{label}".green end end end end end task :analyse_technology_types => :environment do uri_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_uri label_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_label TechnologyType.all.each do |type| uri = type.term_uri label = type.title if label.nil? && uri.nil? puts "technology type #{type.id} has no label or uri".red end if uri.nil? if label_hash[label.downcase].nil? puts "URI is nil for #{type.id} and cannot be resolved from the label - label is #{label}".red else puts "URI is nil for #{type.id} but can be resolved from the label - label is #{label}".green end end unless uri.nil? if uri_hash[uri].nil? if label_hash[label.downcase].nil? if label_hash[label.gsub("_"," ").downcase].nil? puts "URI is unrecognised for #{type.id} and cannot be resolved from the label - label is #{label}".red else puts "URI is unrecognised for #{type.id} but can be resolved from the label, if underscores are replaced with spaces - label is #{label}".yellow end else puts "URI is unrecognised for #{type.id} but can be resolved from the label - label is #{label}".green end end end end end end added a dev rake task to give an analysis of assay and technology types assigned to assays and whether they can be resolved or synchronised with the ontology require 'rubygems' require 'rake' require 'active_record/fixtures' require 'uuidtools' require 'colorize' namespace :seek_dev do desc 'A simple task for quickly setting up a project and institution, and assigned the first user to it. This is useful for quickly setting up the database when testing. Need to create a default user before running this task' task(:initial_membership=>:environment) do p=Person.first raise Exception.new "Need to register a person first" if p.nil? || p.user.nil? User.with_current_user p.user do project=Project.new :title=>"Project X" institution=Institution.new :title=>"The Institute" project.save! institution.projects << project institution.save! p.update_attributes({"work_group_ids"=>["#{project.work_groups.first.id}"]}) end end desc 'finds duplicate create activity records for the same item' task(:duplicate_activity_creates=>:environment) do duplicates = ActivityLog.duplicates("create") if duplicates.length>0 puts "Found #{duplicates.length} duplicated entries:" duplicates.each do |duplicate| matches = ActivityLog.where({:activity_loggable_id=>duplicate.activity_loggable_id,:activity_loggable_type=>duplicate.activity_loggable_type,:action=>"create"},:order=>"created_at ASC") puts "ID:#{duplicate.id}\tLoggable ID:#{duplicate.activity_loggable_id}\tLoggable Type:#{duplicate.activity_loggable_type}\tCount:#{matches.count}\tCreated ats:#{matches.collect{|m| m.created_at}.join(", ")}" end else puts "No duplicates found" end end desc 'create 50 randomly named unlinked projects' task(:random_projects=>:environment) do (0...50).to_a.each do title=("A".."Z").to_a[rand(26)]+" #{UUIDTools::UUID.random_create.to_s}" p=Project.create :title=>title p.save! end end desc "Lists all publicly available assets" task :list_public_assets => :environment do [Investigation, Study, Assay, DataFile, Model, Sop, Publication].each do |assets| # :logout assets.all.each do |asset| if asset.can_view? puts "#{asset.title} - #{asset.id}" end end end end task(:refresh_content_types => :environment) do ContentBlob.all.each do |cb| filename = cb.original_filename unless filename.nil? file_format = filename.split('.').last.try(:strip) possible_mime_types = cb.mime_types_for_extension file_format type = possible_mime_types.sort.first || "application/octet-stream" type = type.gsub("image/jpg","image/jpeg") unless type.nil? cb.content_type = type cb.save end end end desc "display contributor types" task(:contributor_types=>:environment) do types = Seek::Util.user_creatable_types.collect do |type| type.all.collect do |thing| if thing.respond_to?(:contributor) if !thing.contributor.nil? "#{type.name} - #{thing.contributor.class.name}" end else pp "No contributor for #{type}" nil end end.flatten.compact.uniq end.flatten.uniq pp types end desc "display user contributors without people" task(:contributors_without_people=>:environment) do matches = Seek::Util.user_creatable_types.collect do |type| type.all.select do |thing| thing.respond_to?(:contributor_type) && thing.contributor.is_a?(User) && thing.contributor.person.nil? end end.flatten pp "#{matches.size} items found with a user contributor and no person" matches.each do |match| pp "\t#{match.class.name} - #{match.id}" end end desc "Generate an XMI db/schema.xml file describing the current DB as seen by AR. Produces XMI 1.1 for UML 1.3 Rose Extended, viewable e.g. by StarUML" task :xmi => :environment do require 'uml_dumper.rb' File.open("doc/data_models/schema.xmi", "w") do |file| ActiveRecord::UmlDumper.dump(ActiveRecord::Base.connection, file) end puts "Done. Schema XMI created as doc/data_models/schema.xmi." end desc 'removes any data this is not authorized to viewed by the first User' task(:remove_private_data=>:environment) do sops =Sop.find(:all) private_sops=sops.select { |s| !s.can_view? User.first } puts "#{private_sops.size} private Sops being removed" private_sops.each { |s| s.destroy } models =Model.find(:all) private_models=models.select { |m| ! m.can_view? User.first } puts "#{private_models.size} private Models being removed" private_models.each { |m| m.destroy } data =DataFile.find(:all) private_data=data.select { |d| !d.can_view? User.first } puts "#{private_data.size} private Data files being removed" private_data.each { |d| d.destroy } end desc "Dumps help documents and attachments/images" task :dump_help_docs => :environment do format_class = "YamlDb::Helper" dir = 'help_dump_tmp' #Clear path puts "Clearing existing backup directories" FileUtils.rm_r('config/default_data/help', :force => true) FileUtils.rm_r('config/default_data/help_images', :force => true) FileUtils.rm_r('db/help_dump_tmp/', :force => true) #Dump DB puts "Dumping database" SerializationHelper::Base.new(format_class.constantize).dump_to_dir dump_dir("/#{dir}") #Copy relevant yaml files puts "Copying files" FileUtils.mkdir('config/default_data/help') rescue () FileUtils.copy('db/help_dump_tmp/help_documents.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/help_attachments.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/help_images.yml', 'config/default_data/help/') FileUtils.copy('db/help_dump_tmp/db_files.yml', 'config/default_data/help/') #Delete everything else puts "Cleaning up" FileUtils.rm_r('db/help_dump_tmp/') #Copy image folder puts "Copying images" FileUtils.mkdir('public/help_images') rescue () FileUtils.cp_r('public/help_images', 'config/default_data/') rescue () end desc "Dumps current compounds and synoymns to a yaml file for the seed process" task :dump_compounds_and_synonyms => :environment do format_class = "YamlDb::Helper" dir = 'compound_dump_tmp' puts "Dumping database" SerializationHelper::Base.new(format_class.constantize).dump_to_dir dump_dir("/#{dir}") puts "Copying compound and synonym files" FileUtils.copy("db/#{dir}/compounds.yml", 'config/default_data/') FileUtils.copy("db/#{dir}/synonyms.yml", 'config/default_data/') puts "Cleaning up" FileUtils.rm_r("db/#{dir}/") end task :analyse_assay_types => :environment do uri_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_uri uri_hash = uri_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_uri) label_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_label label_hash = label_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_label) AssayType.all.each do |type| uri = type.term_uri label = type.title if label.nil? && uri.nil? puts "assay type #{type.id} has no label or uri".red end if uri.nil? if label_hash[label.downcase].nil? puts "URI is nil for #{type.id} and cannot be resolved from the label - label is #{label} (#{type.assays.count} assays)".red else puts "URI is nil for #{type.id} but can be resolved from the label - label is #{label}".green end end unless uri.nil? if uri_hash[uri].nil? if label_hash[label.downcase].nil? if label_hash[label.gsub("_"," ").downcase].nil? puts "URI is unrecognised for #{type.id} and cannot be resolved from the label - label is #{label} (#{type.assays.count} assays)".red else puts "URI is unrecognised for #{type.id} but can be resolved from the label, if underscores are replaced with spaces - label is #{label}".yellow end else puts "URI is unrecognised for #{type.id} but can be resolved from the label - label is #{label}".green end end end end end task :analyse_technology_types => :environment do uri_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_uri label_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_label TechnologyType.all.each do |type| uri = type.term_uri label = type.title if label.nil? && uri.nil? puts "technology type #{type.id} has no label or uri".red end if uri.nil? if label_hash[label.downcase].nil? puts "URI is nil for #{type.id} and cannot be resolved from the label - label is #{label} (#{type.assays.count} assays)".red else puts "URI is nil for #{type.id} but can be resolved from the label - label is #{label}".green end end unless uri.nil? if uri_hash[uri].nil? if label_hash[label.downcase].nil? if label_hash[label.gsub("_"," ").downcase].nil? puts "URI is unrecognised for #{type.id} and cannot be resolved from the label - label is #{label} (#{type.assays.count} assays)".red else puts "URI is unrecognised for #{type.id} but can be resolved from the label, if underscores are replaced with spaces - label is #{label}".yellow end else puts "URI is unrecognised for #{type.id} but can be resolved from the label - label is #{label}".green end end end end end task :analyse_assays_and_their_types => :environment do assay_type_uri_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_uri assay_type_uri_hash = assay_type_uri_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_uri) assay_type_label_hash = Seek::Ontologies::AssayTypeReader.new.class_hierarchy.hash_by_label assay_type_label_hash = assay_type_label_hash.merge(Seek::Ontologies::ModellingAnalysisTypeReader.new.class_hierarchy.hash_by_label) technology_type_uri_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_uri technology_type_label_hash = Seek::Ontologies::TechnologyTypeReader.new.class_hierarchy.hash_by_label assay_type_label_known_mapping = ["generic experimental assay","generic modelling analysis","modelling analysis type"] technology_type_label_known_mapping = ["technology","enzymatic activity experiments"] Assay.all.each do |assay| id = assay.id assay_type_label = assay.assay_type_label assay_type_uri = assay.assay_type_uri technology_type_uri = assay.technology_type_uri technology_type_label = assay.technology_type_label technology_type_label = technology_type_label.downcase.gsub("_"," ") unless technology_type_label.nil? assay_type_label = assay_type_label.downcase.gsub("_"," ") unless assay_type_label.nil? if assay_type_label.blank? && assay_type_uri.blank? puts "No assay type uri or label defined, will be reset to the root class - #{id}".green elsif assay_type_uri.blank? || assay_type_uri_hash[assay_type_uri].nil? if assay_type_label_known_mapping.include?(assay_type_label) puts "Invalid assay type uri and but the label is recognised as a known mapping to fix '#{assay_type_label}' - #{id}".green elsif assay_type_label_hash[assay_type_label].nil? if technology_type_label_hash[assay_type_label].nil? if technology_type_uri_hash[assay_type_uri].nil? puts "Invalid assay type uri and it cannot be resolved by the label '#{assay_type_label}' - #{id}".red puts "\t uri is #{assay_type_uri}".red else puts "Invalid assay type uri and it cannot be resolved by the label '#{assay_type_label}', but does match a technology type uri - #{id}".orange puts "\t the assay type uri is #{assay_type_uri}" puts "\t this assays technology type label is currently #{technology_type_label}".orange end else puts "Invalid assay type uri and it cannot be resolved by the label, but does match a technology type '#{assay_type_label}' - #{id}".orange puts "\t this assays technology type label is currently #{technology_type_label}".orange end else puts "Invalid assay type uri but it can be resolved by the label '#{assay_type_label}' - #{id}".green end end unless assay.is_modelling? if technology_type_label.blank? && technology_type_uri.blank? puts "No technology type uri or label defined, will be reset to the root class - #{id}".green elsif technology_type_uri.blank? || technology_type_uri_hash[technology_type_uri].nil? if technology_type_label_known_mapping.include?(technology_type_label) puts "Invalid technology type uri and but the label is recognised as a known mapping to fix '#{technology_type_label}' - #{id}".green elsif technology_type_label_hash[technology_type_label].nil? if assay_type_label_hash[technology_type_label].nil? if assay_type_uri_hash[technology_type_uri].nil? puts "Invalid technology type uri and it cannot be resolved by the label '#{technology_type_label}' - #{id}".red puts "\t uri is #{technology_type_uri}".red else puts "Invalid technology type uri and it cannot be resolved by the label '#{technology_type_label}', but does match a assay type uri - #{id}".orange puts "\t the technology type uri is #{technology_type_uri}" puts "\t this assays assay type label is currently #{assay_type_label}".orange end else puts "Invalid technology type uri and it cannot be resolved by the label, but does match an assay type '#{technology_type_label}' - #{id}".orange puts "\t this assays assay type label is currently #{assay_type_label}".orange end else puts "Invalid technology type uri but it can be resolved by the label '#{technology_type_label}' - #{id}".green end end end end end end
require 'httparty' module Testrail class Request include HTTParty extend Testrail::CommandHelper base_uri Testrail.config.server format :json def self.get(*args) command, ids, opts = parse_args(*args) url = build_command(command, ids) attempts = 0 begin attempts += 1 response = Testrail::Response.new(super(url, opts)) rescue TimeoutError => error retry if attempts < 3 unless Testrail.logger.nil? Testrail.logger.error "Timeout connecting to GET #{url}" Testrail.logger.error error end raise error rescue Exception => error unless Testrail.logger.nil? Testrail.logger.error "Unexpected exception intercepted calling TestRail" Testrail.logger.error error end raise error end response end def self.post(*args) command, ids, opts = parse_args(*args) url = build_command(command, ids) attempts = 0 begin attempts += 1 response = Testrail::Response.new(super(url, opts)) rescue TimeoutError => error retry if attempts < 3 unless Testrail.logger.nil? Testrail.logger.error "Timeout connecting to POST #{url}" Testrail.logger.error error end raise error rescue Exception => error unless Testrail.logger.nil? Testrail.logger.error "Unexpected exception intercepted calling TestRail" Testrail.logger.error error end raise error end response end private def self.parse_args(*args) opts = args.last.instance_of?(Hash) ? args.pop : {} opts[:headers] = opts[:headers] ? Testrail.config.headers.merge(opts[:headers]) : Testrail.config.headers command = args.shift ids = args.empty? ? nil : args [command, ids, opts] end end end testing an issue with travis require 'httparty' require 'testrail/command_helper' module Testrail class Request include HTTParty extend Testrail::CommandHelper base_uri Testrail.config.server format :json def self.get(*args) command, ids, opts = parse_args(*args) url = build_command(command, ids) attempts = 0 begin attempts += 1 response = Testrail::Response.new(super(url, opts)) rescue TimeoutError => error retry if attempts < 3 unless Testrail.logger.nil? Testrail.logger.error "Timeout connecting to GET #{url}" Testrail.logger.error error end raise error rescue Exception => error unless Testrail.logger.nil? Testrail.logger.error "Unexpected exception intercepted calling TestRail" Testrail.logger.error error end raise error end response end def self.post(*args) command, ids, opts = parse_args(*args) url = build_command(command, ids) attempts = 0 begin attempts += 1 response = Testrail::Response.new(super(url, opts)) rescue TimeoutError => error retry if attempts < 3 unless Testrail.logger.nil? Testrail.logger.error "Timeout connecting to POST #{url}" Testrail.logger.error error end raise error rescue Exception => error unless Testrail.logger.nil? Testrail.logger.error "Unexpected exception intercepted calling TestRail" Testrail.logger.error error end raise error end response end private def self.parse_args(*args) opts = args.last.instance_of?(Hash) ? args.pop : {} opts[:headers] = opts[:headers] ? Testrail.config.headers.merge(opts[:headers]) : Testrail.config.headers command = args.shift ids = args.empty? ? nil : args [command, ids, opts] end end end
module TidyFFI VERSION = "0.1.2" end The released version is 0.1.3, not 0.1.2. Correct the version file module TidyFFI VERSION = "0.1.3" end
module ToParam VERSION = "0.0.1" end First release 1.0.0 module ToParam VERSION = "1.0.0" end
module Togglate VERSION = "0.1.4" end Version 0.1.5 module Togglate VERSION = "0.1.5" end
module TotalIn VERSION = "0.0.1" end Bump version to an intial 0.5.0 module TotalIn VERSION = "0.5.0" end
module Totrello VERSION = "0.1.6" end update version module Totrello VERSION = "0.1.7" end
module Trackman VERSION = "0.1.0" end version update module Trackman VERSION = "0.1.1" end
# encoding: utf-8 module Mongoid #:nodoc: module Tracking # This internal class handles all interaction for a track field. class Tracker include Readers def initialize(owner, field, aggregate_data) @owner, @for = owner, field @for_data = @owner.internal_track_name(@for) @data = @owner.read_attribute(@for_data) # The following is needed if the "field" Mongoid definition for our # internal tracking field does not include option ":default => {}" if @data.nil? @owner.write_attribute(@for_data, {}) @data = @owner.read_attribute(@for_data) end @aggregate_data = aggregate_data.first if aggregate_data.first end # Delegate all missing methods to the aggregate accessors. This enables # us to call an aggregation token after the tracking field. # # Example: # # <tt>@object.visits.browsers ...</tt> # def method_missing(name, *args, &block) super unless @owner.aggregate_fields.member?(name) @owner.send("#{name}_with_track".to_sym, @for, *args, &block) end # Update methods def add(how_much = 1, date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record. Save first!" if @owner.new_record? return if how_much == 0 # Note that the following #update_data method updates our local data # and the current value might differ from the actual value on the # database. Basically, what we do is update our own copy as a cache # but send the command to atomically update the database: we don't # read the actual value in return so that we save round trip delays. # update_data(data_for(date) + how_much, date) @owner.inc(store_key(date), how_much.abs) return unless @owner.aggregated? @owner.aggregate_fields.each do |k, v| next unless token = v.call(@aggregate_data) fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k, key: token.to_s } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$inc" => update_hash(how_much.abs, date)) end end def inc(date = Time.now) add(1, date) end def dec(date = Time.now) add(-1, date) end def set(how_much, date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record" if @owner.new_record? update_data(how_much, date) @owner.set(store_key(date), how_much) return unless @owner.aggregated? @owner.aggregate_fields.each do |(k,v)| next unless token = v.call(@aggregate_data) fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k, key: token.to_s } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$set" => update_hash(how_much.abs, date)) end end def reset(how_much, date = Time.now) return erase(date) if how_much.nil? # First, we use the default "set" for the tracking field # This will also update one aggregate but... oh well... set(how_much, date) # Need to iterate over all aggregates and send an update or delete # operations over all mongo records for this aggregate field @owner.aggregate_fields.each do |(k,v)| fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$set" => update_hash(how_much.abs, date)) end end def erase(date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record" if @owner.new_record? remove_data(date) @owner.unset(store_key(date)) return unless @owner.aggregated? # Need to iterate over all aggregates and send an update or delete # operations over all mongo records @owner.aggregate_fields.each do |(k,v)| fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$unset" => update_hash(1, date)) end end private def data_for(date) unless date.nil? date = normalize_date(date) @data.try(:[], date.to_i_timestamp.to_s).try(:[], date.to_i_hour.to_s) || 0 end end def whole_data_for(date) unless date.nil? date = normalize_date(date) if date.utc? d = expand_hash @data[date.to_key_timestamp] ReaderExtender.new(d.sum, d) else r = date.whole_day d1 = expand_hash @data[r.first.to_key_timestamp] d2 = expand_hash @data[r.last.to_key_timestamp] t = d1[r.first.to_i_hour, 24] + d2[0, r.first.to_i_hour] ReaderExtender.new(t.sum, t) end end end def whole_data_for_range(date) date = normalize_date(date) if date.first.utc? keys = date.map(&:to_key_timestamp) keys.inject([]) do |r, e| d = expand_hash(@data[e]) r << ReaderExtender.new(d.sum, d) end else first = date.first.whole_day.first.to_key_timestamp last = date.last.whole_day.last.to_key_timestamp pivot = date.first.whole_day.first.to_i_hour acc = expand_hash(@data[first.to_s]) data = [] first.succ.upto(last) do |n| d = expand_hash(@data[n]) t = acc[pivot, 24] + d[0, pivot] acc = d data << ReaderExtender.new(t.sum, t) end data end end def expand_hash(h) d = Array.new(24, 0) h.inject(d) { |d, e| d[e.first.to_i] = e.last; d } if h d end def update_data(value, date) unless date.nil? return remove_data(date) unless value date = normalize_date(date) dk, hk = date.to_i_timestamp.to_s, date.to_i_hour.to_s unless ts = @data[dk] ts = (@data[dk] = {}) end ts[hk] = value end end def remove_data(date) unless date.nil? date = normalize_date(date) dk, hk = date.to_i_timestamp.to_s, date.to_i_hour.to_s if ts = @data[dk] ts.delete(hk) unless ts.count > 0 @data.delete(dk) end end end end def update_hash(num, date) date = normalize_date(date) { "#{@for_data}.#{date.to_key}" => num } end # Allow for dates to be different types. def normalize_date(date) case date when String Time.parse(date) when Date date.to_time when Range normalize_date(date.first)..normalize_date(date.last) else date end end # WARNING: This is +only+ for debugging purposes (rspec y tal) def _original_hash @data end end end end Add store_key to Tracker. # encoding: utf-8 module Mongoid #:nodoc: module Tracking # This internal class handles all interaction for a track field. class Tracker include Readers def initialize(owner, field, aggregate_data) @owner, @for = owner, field @for_data = @owner.internal_track_name(@for) @data = @owner.read_attribute(@for_data) # The following is needed if the "field" Mongoid definition for our # internal tracking field does not include option ":default => {}" if @data.nil? @owner.write_attribute(@for_data, {}) @data = @owner.read_attribute(@for_data) end @aggregate_data = aggregate_data.first if aggregate_data.first end # Delegate all missing methods to the aggregate accessors. This enables # us to call an aggregation token after the tracking field. # # Example: # # <tt>@object.visits.browsers ...</tt> # def method_missing(name, *args, &block) super unless @owner.aggregate_fields.member?(name) @owner.send("#{name}_with_track".to_sym, @for, *args, &block) end # Update methods def add(how_much = 1, date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record. Save first!" if @owner.new_record? return if how_much == 0 # Note that the following #update_data method updates our local data # and the current value might differ from the actual value on the # database. Basically, what we do is update our own copy as a cache # but send the command to atomically update the database: we don't # read the actual value in return so that we save round trip delays. # update_data(data_for(date) + how_much, date) @owner.inc(store_key(date), how_much.abs) return unless @owner.aggregated? @owner.aggregate_fields.each do |k, v| next unless token = v.call(@aggregate_data) fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k, key: token.to_s } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$inc" => update_hash(how_much.abs, date)) end end def inc(date = Time.now) add(1, date) end def dec(date = Time.now) add(-1, date) end def set(how_much, date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record" if @owner.new_record? update_data(how_much, date) @owner.set(store_key(date), how_much) return unless @owner.aggregated? @owner.aggregate_fields.each do |(k,v)| next unless token = v.call(@aggregate_data) fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k, key: token.to_s } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$set" => update_hash(how_much.abs, date)) end end def reset(how_much, date = Time.now) return erase(date) if how_much.nil? # First, we use the default "set" for the tracking field # This will also update one aggregate but... oh well... set(how_much, date) # Need to iterate over all aggregates and send an update or delete # operations over all mongo records for this aggregate field @owner.aggregate_fields.each do |(k,v)| fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$set" => update_hash(how_much.abs, date)) end end def erase(date = Time.now) raise Errors::ModelNotSaved, "Can't update a new record" if @owner.new_record? remove_data(date) @owner.unset(store_key(date)) return unless @owner.aggregated? # Need to iterate over all aggregates and send an update or delete # operations over all mongo records @owner.aggregate_fields.each do |(k,v)| fk = @owner.class.name.to_s.foreign_key.to_sym selector = { fk => @owner.id, ns: k } criteria = @owner.aggregate_klass.collection.find(selector) criteria.upsert("$unset" => update_hash(1, date)) end end private def data_for(date) unless date.nil? date = normalize_date(date) @data.try(:[], date.to_i_timestamp.to_s).try(:[], date.to_i_hour.to_s) || 0 end end def whole_data_for(date) unless date.nil? date = normalize_date(date) if date.utc? d = expand_hash @data[date.to_key_timestamp] ReaderExtender.new(d.sum, d) else r = date.whole_day d1 = expand_hash @data[r.first.to_key_timestamp] d2 = expand_hash @data[r.last.to_key_timestamp] t = d1[r.first.to_i_hour, 24] + d2[0, r.first.to_i_hour] ReaderExtender.new(t.sum, t) end end end def whole_data_for_range(date) date = normalize_date(date) if date.first.utc? keys = date.map(&:to_key_timestamp) keys.inject([]) do |r, e| d = expand_hash(@data[e]) r << ReaderExtender.new(d.sum, d) end else first = date.first.whole_day.first.to_key_timestamp last = date.last.whole_day.last.to_key_timestamp pivot = date.first.whole_day.first.to_i_hour acc = expand_hash(@data[first.to_s]) data = [] first.succ.upto(last) do |n| d = expand_hash(@data[n]) t = acc[pivot, 24] + d[0, pivot] acc = d data << ReaderExtender.new(t.sum, t) end data end end def expand_hash(h) d = Array.new(24, 0) h.inject(d) { |d, e| d[e.first.to_i] = e.last; d } if h d end def update_data(value, date) unless date.nil? return remove_data(date) unless value date = normalize_date(date) dk, hk = date.to_i_timestamp.to_s, date.to_i_hour.to_s unless ts = @data[dk] ts = (@data[dk] = {}) end ts[hk] = value end end def remove_data(date) unless date.nil? date = normalize_date(date) dk, hk = date.to_i_timestamp.to_s, date.to_i_hour.to_s if ts = @data[dk] ts.delete(hk) unless ts.count > 0 @data.delete(dk) end end end end # Returns a store key for passed date. def store_key(date) "#{@for_data}.#{normalize_date(date).to_key}" end def update_hash(num, date) { store_key(date) => num } end # Allow for dates to be different types. def normalize_date(date) case date when String Time.parse(date) when Date date.to_time when Range normalize_date(date.first)..normalize_date(date.last) else date end end # WARNING: This is +only+ for debugging purposes (rspec y tal) def _original_hash @data end end end end
# Copyright (c) 2013 Red Hat # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. require 'active_support/core_ext/string/inflections' #required for .camelize require 'active_support/core_ext/hash' #with_indifferent_access module Trebuchet class Runner # Run all operations, or a specific operation # # @param [Hash] config config hash to pass to operations (currently :host, :user, :password) # @param [String] operation_name the single operation to run, otherwise all def run(config, operation_name=nil) config = config.with_indifferent_access if operation_name get_operation(operation_name).new(config).run else gather_operations.each do |operation| if operation_name.nil? || operation_name == operation.name operation.new(config).run end end end end # List all operations # # @return [Array] list of available operations to deploy def list_operations gather_operations.collect{|o| o.new} end private def gather_operations files = Dir.glob(File.dirname(__FILE__) + '/operation/*.rb') files.collect do |file| file = File.basename(file, '.rb') get_operation(file) end end def get_operation(name) eval('Trebuchet::Operation::' + name.camelize) end end end Removing unneeded line left from splitting up runner logic. # Copyright (c) 2013 Red Hat # # MIT License # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. require 'active_support/core_ext/string/inflections' #required for .camelize require 'active_support/core_ext/hash' #with_indifferent_access module Trebuchet class Runner # Run all operations, or a specific operation # # @param [Hash] config config hash to pass to operations (currently :host, :user, :password) # @param [String] operation_name the single operation to run, otherwise all def run(config, operation_name=nil) config = config.with_indifferent_access if operation_name get_operation(operation_name).new(config).run else gather_operations.each do |operation| operation.new(config).run end end end # List all operations # # @return [Array] list of available operations to deploy def list_operations gather_operations.collect{|o| o.new} end private def gather_operations files = Dir.glob(File.dirname(__FILE__) + '/operation/*.rb') files.collect do |file| file = File.basename(file, '.rb') get_operation(file) end end def get_operation(name) eval('Trebuchet::Operation::' + name.camelize) end end end
def trim_and_correct_fastqs(sample_map, directory, forward_reads_suffix, forward_reads_file_extension, reverse_reads_suffix, reverse_reads_file_extension, quality_scale, fastq_quality_trimmer_path, quake_path,trim_point_fraction, trim_quality_cutoff) Dir.chdir(directory) # trimming sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") || File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") puts "Trimming files for #{sample_name}" #determine read length read_length = calculate_read_length("#{directory}/#{sample_file_prefix}#{forward_reads_suffix}.#{forward_reads_file_extension}") trim_point = (trim_point_fraction * read_length).to_i `#{fastq_quality_trimmer_path} -i #{directory}/#{sample_file_prefix}#{forward_reads_suffix}.#{forward_reads_file_extension} -o #{directory}/#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} -t #{trim_quality_cutoff} -l #{trim_point} -Q #{quality_scale} -v` `#{fastq_quality_trimmer_path} -i #{directory}/#{sample_file_prefix}#{reverse_reads_suffix}.#{reverse_reads_file_extension} -o #{directory}/#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension} -t #{trim_quality_cutoff} -l #{trim_point} -Q #{quality_scale} -v` `perl /tmp/fastq-remove-orphans.pl -1 #{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} -2 #{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}` end # quake correction # write file for quake sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") || File.exists?(("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}")) puts "Error correcting files for #{sample_name}" output_file = File.open("quake_file_list.txt","w") output_file.puts "paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}" output_file.close # run quake `#{quake_path} -f quake_file_list.txt -k 15 -q #{quality_scale}` end sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") # remove orphans `perl /tmp/fastq-remove-orphans.pl -1 paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension} -2 paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}` end # cleanup and rename files sample_map.each do |sample_file_prefix, sample_name| File.delete("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") File.delete("orphaned_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("orphaned_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") File.delete("error_model.paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.txt") File.delete("error_model.paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.txt") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.stats.txt") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.stats.txt") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") File.delete("orphaned_paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("orphaned_paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") File.rename("paired_paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}", "#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.rename("paired_paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}", "#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") else File.rename("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}", "#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.rename("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}", "#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") end end end def calculate_read_length(filename) read_length = nil File.open(filename) do |f| f.each do |line| line.chomp! if line =~ /^[GATCgatc]/ read_length = line.size break end end end return read_length - 1 end additional file checks def trim_and_correct_fastqs(sample_map, directory, forward_reads_suffix, forward_reads_file_extension, reverse_reads_suffix, reverse_reads_file_extension, quality_scale, fastq_quality_trimmer_path, quake_path,trim_point_fraction, trim_quality_cutoff) Dir.chdir(directory) # trimming sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") || File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") puts "Trimming files for #{sample_name}" #determine read length read_length = calculate_read_length("#{directory}/#{sample_file_prefix}#{forward_reads_suffix}.#{forward_reads_file_extension}") trim_point = (trim_point_fraction * read_length).to_i `#{fastq_quality_trimmer_path} -i #{directory}/#{sample_file_prefix}#{forward_reads_suffix}.#{forward_reads_file_extension} -o #{directory}/#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} -t #{trim_quality_cutoff} -l #{trim_point} -Q #{quality_scale} -v` `#{fastq_quality_trimmer_path} -i #{directory}/#{sample_file_prefix}#{reverse_reads_suffix}.#{reverse_reads_file_extension} -o #{directory}/#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension} -t #{trim_quality_cutoff} -l #{trim_point} -Q #{quality_scale} -v` `perl /tmp/fastq-remove-orphans.pl -1 #{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} -2 #{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}` end # quake correction # write file for quake sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") || File.exists?(("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}")) puts "Error correcting files for #{sample_name}" output_file = File.open("quake_file_list.txt","w") output_file.puts "paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension} paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}" output_file.close # run quake `#{quake_path} -f quake_file_list.txt -k 15 -q #{quality_scale}` end sample_map.each do |sample_file_prefix, sample_name| next if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") # remove orphans `perl /tmp/fastq-remove-orphans.pl -1 paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension} -2 paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}` end # cleanup and rename files sample_map.each do |sample_file_prefix, sample_name| File.delete("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") if File.exists?("#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") if File.exists?("#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") File.delete("orphaned_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") if File.exists?("orphaned_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("orphaned_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") if File.exists?("orphaned_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") File.delete("error_model.paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.txt") if File.exists?("error_model.paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.txt") File.delete("error_model.paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.txt") if File.exists?("error_model.paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.txt") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.stats.txt") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.stats.txt") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.stats.txt") if File.exists?("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.stats.txt") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor_single.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") if File.exists?("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") File.delete("orphaned_paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") if File.exists?("orphaned_paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.delete("orphaned_paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") if File.exists?("orphaned_paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") File.rename("paired_paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}", "#{sample_file_prefix}#{forward_reads_suffix}.trimmed.cor.#{forward_reads_file_extension}") File.rename("paired_paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}", "#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.cor.#{reverse_reads_file_extension}") else File.rename("paired_#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}", "#{sample_file_prefix}#{forward_reads_suffix}.trimmed.#{forward_reads_file_extension}") File.rename("paired_#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}", "#{sample_file_prefix}#{reverse_reads_suffix}.trimmed.#{reverse_reads_file_extension}") end end end def calculate_read_length(filename) read_length = nil File.open(filename) do |f| f.each do |line| line.chomp! if line =~ /^[GATCgatc]/ read_length = line.size break end end end return read_length - 1 end
require "json" require "rest_client" module Tweetwine class Client COMMANDS = %w{friends user msg} MAX_NUM_STATUSES = 20 MAX_STATUS_LENGTH = 140 def initialize(options) @username, password = options[:username].to_s, options[:password].to_s @base_url = "https://#{@username}:#{password}@twitter.com/" @colorize = options[:colorize] || false @num_statuses = options[:num_statuses] || MAX_NUM_STATUSES end def friends print_statuses JSON.parse(get("statuses/friends_timeline.json?count=#{@num_statuses}")) end def user(user = @username) print_statuses JSON.parse(get("statuses/user_timeline/#{user}.json?count=#{@num_statuses}")) end def msg(status = nil) unless status printf "New status: " status = $stdin.gets end if confirm_user_action("Really send?") msg = status[0...MAX_STATUS_LENGTH] body = {:status => msg } status = JSON.parse(post("statuses/update.json", body)) puts "Sent status update.\n\n" print_statuses([status]) else puts "Cancelled." end end private def get(rest_uri) RestClient.get @base_url + rest_uri end def post(rest_url, body) RestClient.post @base_url + rest_url, body end def confirm_user_action(msg) printf "#{msg} [yn] " confirmation = $stdin.gets.strip confirmation.downcase[0,1] == "y" end def print_statuses(statuses) statuses.each do |status| time_diff_value, time_diff_unit = Util.humanize_time_diff(Time.now, status["created_at"]) from_user = status["user"]["screen_name"] from_user = Util.colorize(:green, from_user) if @colorize text = status["text"] text = Util.colorize(:red, text, /@\w+/) if @colorize puts <<-END #{from_user}, #{time_diff_value} #{time_diff_unit} ago: #{text} END end end end end Emphasize that "no" is the default answer. require "json" require "rest_client" module Tweetwine class Client COMMANDS = %w{friends user msg} MAX_NUM_STATUSES = 20 MAX_STATUS_LENGTH = 140 def initialize(options) @username, password = options[:username].to_s, options[:password].to_s @base_url = "https://#{@username}:#{password}@twitter.com/" @colorize = options[:colorize] || false @num_statuses = options[:num_statuses] || MAX_NUM_STATUSES end def friends print_statuses JSON.parse(get("statuses/friends_timeline.json?count=#{@num_statuses}")) end def user(user = @username) print_statuses JSON.parse(get("statuses/user_timeline/#{user}.json?count=#{@num_statuses}")) end def msg(status = nil) unless status printf "New status: " status = $stdin.gets end if confirm_user_action("Really send?") msg = status[0...MAX_STATUS_LENGTH] body = {:status => msg } status = JSON.parse(post("statuses/update.json", body)) puts "Sent status update.\n\n" print_statuses([status]) else puts "Cancelled." end end private def get(rest_uri) RestClient.get @base_url + rest_uri end def post(rest_url, body) RestClient.post @base_url + rest_url, body end def confirm_user_action(msg) printf "#{msg} [yN] " confirmation = $stdin.gets.strip confirmation.downcase[0,1] == "y" end def print_statuses(statuses) statuses.each do |status| time_diff_value, time_diff_unit = Util.humanize_time_diff(Time.now, status["created_at"]) from_user = status["user"]["screen_name"] from_user = Util.colorize(:green, from_user) if @colorize text = status["text"] text = Util.colorize(:red, text, /@\w+/) if @colorize puts <<-END #{from_user}, #{time_diff_value} #{time_diff_unit} ago: #{text} END end end end end
# frozen_string_literal: true require 'active_model' require 'validated_object/version' module ValidatedObject # @abstract Subclass and add `attr_accessor` and validations # to create custom validating objects. # # Uses [ActiveModel::Validations](http://api.rubyonrails.org/classes/ActiveModel/Validations/ClassMethods.html#method-i-validates) # to create self-validating Plain Old Ruby objects. This is especially # useful when importing data from one system into another. This class also # creates very readable error messages. # # @example Writing a self-validating object # class Dog < ValidatedObject::Base # attr_accessor :name, :birthday # # validates :name, presence: true # validates :birthday, type: Date, allow_nil: true # end # # @example Instantiating and automatically validating # # The dog1 instance validates itself at the end of instantiation. # # Here, it succeeds and so doesn't raise an exception. # dog1 = Dog.new name: 'Spot' # # # We can also explicitly test for validity # dog1.valid? # => true # # dog1.birthday = Date.new(2015, 1, 23) # dog1.valid? # => true # # @example Making an instance invalid # dog1.birthday = '2015-01-23' # dog1.valid? # => false # dog1.check_validations! # => ArgumentError: Birthday is class String, not Date # # @see ValidatedObject::Base::TypeValidator # @see http://yehudakatz.com/2010/01/10/activemodel-make-any-ruby-object-feel-like-activerecord/ ActiveModel: Make Any Ruby Object Feel Like ActiveRecord, Yehuda Katz # @see http://www.rubyinside.com/rails-3-0s-activemodel-how-to-give-ruby-classes-some-activerecord-magic-2937.html Rails 3.0′s ActiveModel: How To Give Ruby Classes Some ActiveRecord Magic, Peter Cooper class Base include ActiveModel::Validations EMPTY_HASH = {}.freeze # Implements a pseudo-boolean class. class Boolean end # Instantiate and validate a new object. # @example # maru = Dog.new(birthday: Date.today, name: 'Maru') # # @raise [ArgumentError] if the object is not valid at the # end of initialization or `attributes` is not a Hash. def initialize(attributes=EMPTY_HASH) raise ArgumentError, "#{attributes} is not a Hash" unless attributes.is_a?(Hash) set_instance_variables from_hash: attributes check_validations! self end # Run any validations and raise an error if invalid. # @raise [ArgumentError] if any validations fail. def check_validations! raise ArgumentError, errors.full_messages.join('; ') if invalid? self end # A custom validator which ensures an object is an instance of a class # or a subclass. It supports a pseudo-boolean class for convenient # validation. (Ruby doesn't have a built-in Boolean.) # # @example Ensure that weight is a number # class Dog < ValidatedObject::Base # attr_accessor :weight, :neutered # validates :weight, type: Numeric # Typed and required # validates :neutered, type: Boolean, allow_nil: true # Typed but optional # end class TypeValidator < ActiveModel::EachValidator # @return [nil] def validate_each(record, attribute, value) expected_class = options[:with] return if pseudo_boolean?(expected_class, value) || expected_class?(expected_class, value) save_error(record, attribute, value, options) end private def pseudo_boolean?(expected_class, value) expected_class == Boolean && boolean?(value) end def expected_class?(expected_class, value) value.is_a?(expected_class) end def boolean?(value) value.is_a?(TrueClass) || value.is_a?(FalseClass) end def save_error(record, attribute, value, options) record.errors.add attribute, options[:message] || "is a #{value.class}, not a #{options[:with]}" end end private def set_instance_variables(from_hash:) from_hash.each do |variable_name, variable_value| self.instance_variable_set "@#{variable_name}".to_sym, variable_value end end end end fix: Invalid attr names disallowed as docs describe # frozen_string_literal: true require 'active_model' require 'validated_object/version' module ValidatedObject # @abstract Subclass and add `attr_accessor` and validations # to create custom validating objects. # # Uses [ActiveModel::Validations](http://api.rubyonrails.org/classes/ActiveModel/Validations/ClassMethods.html#method-i-validates) # to create self-validating Plain Old Ruby objects. This is especially # useful when importing data from one system into another. This class also # creates very readable error messages. # # @example Writing a self-validating object # class Dog < ValidatedObject::Base # attr_accessor :name, :birthday # # validates :name, presence: true # validates :birthday, type: Date, allow_nil: true # end # # @example Instantiating and automatically validating # # The dog1 instance validates itself at the end of instantiation. # # Here, it succeeds and so doesn't raise an exception. # dog1 = Dog.new name: 'Spot' # # # We can also explicitly test for validity # dog1.valid? # => true # # dog1.birthday = Date.new(2015, 1, 23) # dog1.valid? # => true # # @example Making an instance invalid # dog1.birthday = '2015-01-23' # dog1.valid? # => false # dog1.check_validations! # => ArgumentError: Birthday is class String, not Date # # @see ValidatedObject::Base::TypeValidator # @see http://yehudakatz.com/2010/01/10/activemodel-make-any-ruby-object-feel-like-activerecord/ ActiveModel: Make Any Ruby Object Feel Like ActiveRecord, Yehuda Katz # @see http://www.rubyinside.com/rails-3-0s-activemodel-how-to-give-ruby-classes-some-activerecord-magic-2937.html Rails 3.0′s ActiveModel: How To Give Ruby Classes Some ActiveRecord Magic, Peter Cooper class Base include ActiveModel::Validations EMPTY_HASH = {}.freeze # Implements a pseudo-boolean class. class Boolean end # Instantiate and validate a new object. # @example # maru = Dog.new(birthday: Date.today, name: 'Maru') # # @raise [ArgumentError] if the object is not valid at the # end of initialization or `attributes` is not a Hash. def initialize(attributes=EMPTY_HASH) raise ArgumentError, "#{attributes} is not a Hash" unless attributes.is_a?(Hash) set_instance_variables from_hash: attributes check_validations! return self end # Run any validations and raise an error if invalid. # @raise [ArgumentError] if any validations fail. def check_validations! raise ArgumentError, errors.full_messages.join('; ') if invalid? self end # A custom validator which ensures an object is an instance of a class # or a subclass. It supports a pseudo-boolean class for convenient # validation. (Ruby doesn't have a built-in Boolean.) # # @example Ensure that weight is a number # class Dog < ValidatedObject::Base # attr_accessor :weight, :neutered # validates :weight, type: Numeric # Typed and required # validates :neutered, type: Boolean, allow_nil: true # Typed but optional # end class TypeValidator < ActiveModel::EachValidator # @return [nil] def validate_each(record, attribute, value) expected_class = options[:with] return if pseudo_boolean?(expected_class, value) || expected_class?(expected_class, value) save_error(record, attribute, value, options) end private def pseudo_boolean?(expected_class, value) expected_class == Boolean && boolean?(value) end def expected_class?(expected_class, value) value.is_a?(expected_class) end def boolean?(value) value.is_a?(TrueClass) || value.is_a?(FalseClass) end def save_error(record, attribute, value, options) record.errors.add attribute, options[:message] || "is a #{value.class}, not a #{options[:with]}" end end private def set_instance_variables(from_hash:) from_hash.each do |variable_name, variable_value| # Test for the attribute reader self.send variable_name.to_sym # Set value in a way that succeeds even if attr is read-only self.instance_variable_set "@#{variable_name}".to_sym, variable_value end end end end
module VncTools VERSION = "0.0.8" end Bump to 0.0.9.pre1 (debug build) module VncTools VERSION = "0.0.9.pre1" end
require 'json' require 'httparty' class WireMockCaller include HTTParty def initialize(base_uri = "http://localhost:9999/__admin") WireMockCaller.base_uri base_uri end def get_mappings response = self.class.get("") response["mappings"] end def create_mapping(url, method, body) mapping_request = build_mapping_request(body, method, url) self.class.post("/mappings/new", body: mapping_request.to_json) end def reset_mappings self.class.post("/mappings/reset") end private def build_mapping_request(body, method, url) { request: { method: method, url: url }, response: { status: 200, body: body, headers: { :'Content-Type' => "text/plain" # have to use this notation because the dash in Content-Type is not a legal character in a ruby symbol } } } end end change method signature require 'json' require 'httparty' class WireMockCaller include HTTParty def initialize(base_uri = "http://localhost:9999/__admin") WireMockCaller.base_uri base_uri end def get_mappings response = self.class.get("") response["mappings"] end def create_mapping(url, method, body) mapping_request = build_mapping_request(method, body, url) self.class.post("/mappings/new", body: mapping_request.to_json) end def reset_mappings self.class.post("/mappings/reset") end private def build_mapping_request(method, body, url) url_or_url_pattern = "url" { request: { method: method, url_or_url_pattern => url }, response: { status: 200, body: body, headers: { :'Content-Type' => "text/plain" # have to use this notation because the dash in Content-Type is not a legal character in a ruby symbol } } } end end
require 'net/http' @directory = File.expand_path("cell-files",File.dirname(__FILE__)) uri = URI('https://westcentralus.api.cognitive.microsoft.com/vision/v1.0/ocr') uri.query = URI.encode_www_form({ 'language' => 'unk', 'detectOrientation ' => 'true' }) request = Net::HTTP::Post.new(uri.request_uri) request['Content-Type'] = 'application/octet-stream' request['Ocp-Apim-Subscription-Key'] = '43f80a0ab4d5441d8e0d292e19e5d3c9' Dir.glob("#{@directory}/*.{jpg.png.gif}") do |crop_image| request.body = File.binread(crop_image) end response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| http.request(request) end json_file = File.open('json_data.tsv','w') json_file.puts (response.body) Request Azure CV API require 'net/http' def request_API @directory = File.expand_path("cell-files",File.dirname(__FILE__)) uri = URI('https://westcentralus.api.cognitive.microsoft.com/vision/v1.0/ocr') uri.query = URI.encode_www_form({ 'language' => 'unk', 'detectOrientation ' => 'true' }) request = Net::HTTP::Post.new(uri.request_uri) request['Content-Type'] = 'application/octet-stream' request['Ocp-Apim-Subscription-Key'] = '43f80a0ab4d5441d8e0d292e19e5d3c9' Dir.glob("#{@directory}/*.{jpg.png.gif}") do |crop_image| request.body = File.binread(crop_image) response = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http| http.request(request) end json_file = File.open('json_data.tsv','w') json_file.puts(response.body) json_file.close end end
require 'json' module ZAWS class CloudTrail DEFAULT_DAYS_TO_FETCH=7 ZAWS_S3_CACHE="~/.zaws/s3-cache" def initialize(shellout,aws) @shellout=shellout @aws=aws end def get_cloud_trail_by_bucket(region,bucket_name) dir_name=@aws.s3.bucket.sync(region,bucket_name,"#{ZAWS_S3_CACHE}/#{bucket_name}") results = [] Dir.open(dir_name) { |dir| Dir.glob(File.join(dir, '**', '*')) { |filename| File.open(filename) { |file| results.push JSON.parse file.read } if File.file? filename } } json = {:Results => results}.to_json puts json json end def get_cloud_trail_by_name(region, trail_name) available_cloud_trails = get_cloud_trails(region) bucket_name = available_cloud_trails.find { |available_cloud_trail| available_cloud_trail['Name'] === trail_name }['S3BucketName'] get_cloud_trail_by_bucket(region, bucket_name) end def get_cloud_trails(region, verbose=nil) comLine = "aws cloudtrail describe-trails --region #{region}" cloud_trails = JSON.parse @shellout.cli(comLine, verbose) cloud_trails['trailList'] end def exists(name,region) get_cloud_trails(region).any? {|trail| trail['Name'] === name} end def declare(name,region,bucket_name,verbose=nil) if exists(name,region) puts "CloudTrail already exists. Creation skipped.\n" else bucket_exists=@aws.s3.bucket().exists(bucket_name,region) cmdline = "aws --region #{region} cloudtrail create-subscription " << "--name #{name} --s3-#{bucket_exists ? 'use' : 'new'}-bucket #{bucket_name}" puts @shellout.cli(cmdline,verbose) end end end end Kyle - Added support for reading gzipped files from cloud trail * Also fixed a number of bugs such as the need to hash bucket names for caching etc. require 'json' require 'digest/sha1' require 'fileutils' require 'zlib' module ZAWS class CloudTrail DEFAULT_DAYS_TO_FETCH=7 ZAWS_S3_CACHE="#{Dir.home}/.zaws/s3-cache" def initialize(shellout,aws) @shellout=shellout @aws=aws end def get_cloud_trail_by_bucket(region,bucket_name) bucket_name = "s3://#{bucket_name}" if !bucket_name.match('s3://.*') bucket_hash = Digest::SHA1.hexdigest("#{region}#{bucket_name}") dir_name = "#{ZAWS_S3_CACHE}/#{bucket_hash}" FileUtils.mkdir_p(dir_name) dir_name = @aws.s3.bucket.sync(region,bucket_name,dir_name) results = [] Dir.open(dir_name) { |dir| Dir.glob(File.join(dir, '**', '*')) { |filename| Zlib::GzipReader.open(filename) { |file| results.push JSON.parse file.read } if File.file? filename } } json = {:Results => results}.to_json puts json json end def get_cloud_trail_by_name(region, trail_name) available_cloud_trails = get_cloud_trails(region) bucket_name = available_cloud_trails.find { |available_cloud_trail| available_cloud_trail['Name'] === trail_name }['S3BucketName'] get_cloud_trail_by_bucket(region, bucket_name) end def get_cloud_trails(region, verbose=nil) comLine = "aws cloudtrail describe-trails --region #{region}" cloud_trails = JSON.parse @shellout.cli(comLine, verbose) cloud_trails['trailList'] end def exists(name,region) get_cloud_trails(region).any? {|trail| trail['Name'] === name} end def declare(name,region,bucket_name,verbose=nil) if exists(name,region) puts "CloudTrail already exists. Creation skipped.\n" else bucket_exists=@aws.s3.bucket().exists(bucket_name,region) cmdline = "aws --region #{region} cloudtrail create-subscription " << "--name #{name} --s3-#{bucket_exists ? 'use' : 'new'}-bucket #{bucket_name}" puts @shellout.cli(cmdline,verbose) end end end end
# Generated by jeweler # DO NOT EDIT THIS FILE # Instead, edit Jeweler::Tasks in Rakefile, and run `rake gemspec` # -*- encoding: utf-8 -*- Gem::Specification.new do |s| s.name = %q{imdb_party} s.version = "0.1.1" s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= s.authors = ["Jon Maddox", "Michael Mesicek"] s.date = %q{2010-10-11} s.description = %q{IMDB client using the IMDB API that their iPhone app uses} s.email = %q{jon@mustacheinc.com mastermike14@gmail.com} s.extra_rdoc_files = [ "LICENSE", "README.md" ] s.files = [ ".document", ".gitignore", "LICENSE", "README.md", "Rakefile", "VERSION", "imdb_party.gemspec", "lib/imdb_party.rb", "lib/imdb_party/httparty_icebox.rb", "lib/imdb_party/imdb.rb", "lib/imdb_party/movie.rb", "lib/imdb_party/person.rb", "test/movie_test.rb", "test/person_test.rb", "test/search_test.rb", "test/test_helper.rb" ] s.homepage = %q{http://github.com/mastermike14/imdb_party} s.rdoc_options = ["--charset=UTF-8"] s.require_paths = ["lib"] s.rubygems_version = %q{1.3.7} s.summary = %q{IMDB client using the IMDB API that their iPhone app uses} s.test_files = [ "test/movie_test.rb", "test/person_test.rb", "test/search_test.rb", "test/test_helper.rb" ] if s.respond_to? :specification_version then current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION s.specification_version = 3 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_development_dependency(%q<shoulda>, [">= 0"]) s.add_runtime_dependency(%q<httparty>, [">= 0"]) else s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<httparty>, [">= 0"]) end else s.add_dependency(%q<shoulda>, [">= 0"]) s.add_dependency(%q<httparty>, [">= 0"]) end end gemfile updated # coding: utf-8 lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) Gem::Specification.new do |gem| gem.name = "imdb-party" gem.version = "1.0" gem.authors = ["John Maddox", "Mike Mesicek"] gem.email = ["jon@mustacheinc.com"] gem.description = %q{Imdb JSON client used IMDB to serve information to the IMDB iPhone app via the IMDB API} gem.summary = "IMDB API for Rails" gem.homepage = 'https://github.com/mastermike14/imdb-party' gem.license = "MIT" gem.files = `git ls-files`.split($/) gem.executables = gem.files.grep(%r{^bin/}) { |f| File.basename(f) } gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) gem.require_paths = ["lib"] if File.exists?('UPGRADING') gem.post_install_message = File.read('UPGRADING') end gem.add_runtime_dependency 'rails', ['>= 3', '< 5'] gem.add_development_dependency 'rspec-rails', '2.13.0' # 2.13.1 is broken gem.add_development_dependency 'rspec', '~> 2.6' gem.add_development_dependency 'shoulda' gem.add_development_dependency 'httparty' gem.add_development_dependency 'hpricot' gem.add_development_dependency 'guard' gem.add_development_dependency 'guard-rspec' end
class HarvestLinks include Sidekiq::Worker sidekiq_options retry: false def perform(entry_id) entry = Entry.find(entry_id) tweets = [entry.main_tweet] tweets.push(entry.main_tweet.quoted_status) if entry.main_tweet.quoted_status? urls = find_urls(tweets) if url = urls.first page = MercuryParser.parse(url) entry.data["saved_pages"] = {url => page.to_h} entry.save! TwitterLinkImage.perform_async(entry.id, url) if entry.link_tweet? end entry.content = ApplicationController.render template: "entries/_tweet_default.html.erb", locals: {entry: entry}, layout: nil entry.save! end def find_urls(tweets) tweets.each_with_object([]) do |tweet, array| tweet.urls.each do |url| url = url.expanded_url if url_valid?(url) array.push(url.to_s) end end end end def url_valid?(url) !(url.host == "twitter.com") end end Queue priorities. class HarvestLinks include Sidekiq::Worker sidekiq_options retry: false, queue: :low def perform(entry_id) entry = Entry.find(entry_id) tweets = [entry.main_tweet] tweets.push(entry.main_tweet.quoted_status) if entry.main_tweet.quoted_status? urls = find_urls(tweets) if url = urls.first page = MercuryParser.parse(url) entry.data["saved_pages"] = {url => page.to_h} entry.save! TwitterLinkImage.perform_async(entry.id, url) if entry.link_tweet? end entry.content = ApplicationController.render template: "entries/_tweet_default.html.erb", locals: {entry: entry}, layout: nil entry.save! end def find_urls(tweets) tweets.each_with_object([]) do |tweet, array| tweet.urls.each do |url| url = url.expanded_url if url_valid?(url) array.push(url.to_s) end end end end def url_valid?(url) !(url.host == "twitter.com") end end
class TipMailer < ActionMailer::Base layout 'email' def tipped(tip_id) @tip = Tip.find(tip_id) @user = @tip.to mail to: @user.email_address, subject: "#{@tip.from.username} tipped you #{@tip.cents} coins" end end Fix subject lines on Tip Emails. class TipMailer < ActionMailer::Base layout 'email' def tipped(tip_id) @tip = Tip.find(tip_id) @user = @tip.to mail to: @user.email_address, subject: "#{@tip.from.username} tipped you #{pluralize(@tip.cents / 100, 'coins')}" end end
# discovery.rb # # LEGAL NOTICE # ------------- # # OSS Discovery is a tool that finds installed open source software. # Copyright (C) 2007 OpenLogic, Inc. # # OSS Discovery is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 as # published by the Free Software Foundation. # # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License version 3 (discovery2-client/license/OSSDiscoveryLicense.txt) # for more details. # # You should have received a copy of the GNU Affero General Public License along with this program. # If not, see http://www.gnu.org/licenses/ # # You can learn more about OSSDiscovery, report bugs and get the latest versions at www.ossdiscovery.org. # You can contact the OSS Discovery team at info@ossdiscovery.org. # You can contact OpenLogic at info@openlogic.com. # -------------------------------------------------------------------------------------------------- # # discovery.rb is the main CLI framework. It's purpose is to: # # a) process command line arguments # b) instantiate the major subsystems such as the walker and rule engine # c) kick off the scan # d) produce the reports # # Every property from the config.yml file is loaded as an instance variable of global self. # This is done so that this file can have default values for all of these properties, and then # change them if necessary based on a cli option that was specified. So, if a default value in # the config.yml file is ever modified, this file will receive that modified value by default. # The same will happen if a new value is ever added to the config.yml. # # Quick and dirty code architecture discussion: # 1) A Walker is a class which traverses the disk looking for files that match a given set of # of regular expressions # a) The Walker derives the list of file matches it should be looking for from the RuleEngine # 2) The RuleEngine is initialized through reading the set of project rules xml files found in # the lib/rules directory and subdirectories. A project rule and its match rules defines # filename regular expressions which could indicate a project is installed # 3) The Walker looks for any file which matches one of rule's "files of interest" (FOI) # a) A file of interest is really a regular expression that could match any number of possible # files the rule could apply to. # b) You can see the list of patterns that make up the "files of interest" by running: # ./discovery --list-foi # 4) When the Walker finds a matching file, it calls the RuleEngine with the found file # 5) The RuleEngine will evaluate the file and apply any rule which matches that filename # a) There are currently 4 types of match rules depending upon the rule writer's judgement for # the best way to detect the project's key files. # 6) The RuleEngine will track the match state for each of the project rules and match rules # 7) After the Walker has completed the disk traverse, the RuleEngine contains the match states # of everything found # 8) The Discovery framework then dumps a list of the match states to the console in a sorted order # a) optionally, the results will be delivered to an open source census server for inclusion in # the open source census. # # For more details on how the project rules work, please see the "Rule Writing for Discovery" document # on the project web site: http://www.ossdiscovery.org # $:.unshift File.join(File.dirname(__FILE__)) require 'date' require 'getoptlong' require 'parsedate' require 'pp' require 'walker.rb' require 'cliutils.rb' require 'rule_engine.rb' require 'scan_rules_updater' #--------------- global defaults --------------------------------------------- # maintain these in alphabetical order, please @basedir = File.expand_path(File.dirname(__FILE__)) @config = 'conf/config.rb' @copyright = "Copyright (C) 2007 OpenLogic, Inc." @discovery_version = "2.0-alpha-4" @discovery_name = "discovery" @discovery_license = "GNU Affero General Public License version 3" @discovery_license_shortname = "Affero GPLv3" @dir_exclusion_filters = Hash.new @distro = "Unknown: Unrecognized" @file_exclusion_filters = Hash.new @census_code = "" @inclusion_filters = Hash.new @@log = Config.prop(:log) # walker configuration parameter defaults @list_files = false @list_foi = false @list_exclusions = false @os = "Unknown" # distro major name "ubuntu" @os_family = "Unknown" # linux, windows, etc @os_architecture = "Unknown" # i386, x86_64, sparc, etc @os_version = "Unknown" # 5.04, 10.4, etc @show_every = 1000 @show_progress = false @show_verbose = false # used to help validate speed values in various subsystems @valid_speeds = 1 SPEEDHINT = 1 unless defined?(SPEEDHINT) # important global objects @rule_engine = nil @walker = nil # configuration file can override any of the parameters above require "#{@basedir}/#{@config}" require "#{@basedir}/#{Config.prop(:generic_filters)}" # Load any plugins, meaning any file named 'init.rb' found somewhere # under the 'plugins' directory. def load_plugins plugin_files = File.join(File.dirname(__FILE__), "plugins", "**", "init.rb") Dir.glob(plugin_files) { |path| require path } end load_plugins if Config.prop(:load_plugins) =begin rdoc This is the main executive controller of discovery a) assumes processing command line arguments has occurred b) instantiates the major subsystems such as the walker and rule engine c) kicks off the scan =end def execute() # mark the beginning of a scan @starttime = Time.new @universal_rules_md5 = ScanRulesReader.generate_aggregate_md5(File.dirname(@rules_openlogic)) @universal_rules_version = ScanRulesReader.get_universal_rules_version() # create the application's Walker instance - @list_files is boolean for whether to dump files as encountered @walker = Walker::new( ) if ( @walker == nil ) printf("FATAL - walker cannot be created\n") exit 1 end # setup all the walker behavior based on CLI flags # # exclusion filters is a hash of descriptions/regexs, so just pass the criteria to the walker @walker.add_dir_exclusions( @dir_exclusion_filters.values ) @walker.add_file_exclusions( @file_exclusion_filters.values ) @walker.list_exclusions = @list_exclusions @walker.list_files = @list_files @walker.show_permission_denied = @show_permission_denied @walker.show_every = @show_every.to_i @walker.show_progress = @show_progress @walker.show_verbose = @show_verbose @walker.symlink_depth = @symlink_depth @walker.follow_symlinks = @follow_symlinks @walker.throttling_enabled = @throttling_enabled @walker.throttle_number_of_files = @throttle_number_of_files @walker.throttle_seconds_to_pause = @throttle_seconds_to_pause # create the applications RuleEngine instance # in the process of constructing the object, the rule engine # will register with the walker and set up the list of files of interest # after this object is created, the machine is ready to scan puts "Reading project rules....\n" @rule_engine = RuleEngine.new( @rules_dirs, @walker, SPEEDHINT ) # @rule_engine = RuleEngine.new( @rules_dirs, @walker, @speedhint ) - future, whenever 'speedhint' gets added back to config.yml # obey the command line parameter to list the files of interest. this can't be done until # the rule engine has parsed the scan rules file so that we know all the actual files of # interest determined by scan rules expressions if ( @list_foi ) printf("Files of interest:\n") @walker.get_files_of_interest.each { | foi | printf("%s\n", foi.source) } exit 0 end # This is the main call to start scanning a machine @directory_to_scan = File.expand_path(@directory_to_scan) puts "Scanning #{@directory_to_scan}\n" @walker.walk_dir( @directory_to_scan ) # mark the end of a scan @endtime = Time.new end def update_scan_rules() updater = ScanRulesUpdater.new(@server_base_url) updater.proxy_host = @proxy_host updater.proxy_port = @proxy_port updater.proxy_username = @proxy_user updater.proxy_password = @proxy_password begin updater.update_scanrules(@rules_openlogic, @rules_files_url_path) rescue Exception => e @@log.error("Discovery: " << e.inspect + e.backtrace.inspect.gsub("[\"", "\n\t[\"").gsub(", ", ",\n\t ")) # if there's a better/easier way to get a readable exception trace, I don't know what it is printf("#{e.to_s}\n") end end def validate_directory_to_scan( dir ) # Some versions of ruby have trouble when expanding a path with backslashes. # In windows, replace all backslashes with forward slashes. if major_platform =~ /windows/ dir=dir.gsub!('\\','/') end @directory_to_scan = File.expand_path( dir ) @directory_to_scan.gsub!('//','/') dir_exists=true if ( !File.exist?(@directory_to_scan ) ) # If it doesn't exist, it may be a weirdism with ruby turning c:\ into /c:/. So # make that change and try again if ( @directory_to_scan =~ /:/ ) @directory_to_scan = @directory_to_scan[1..@directory_to_scan.length] if ( !File.exist?(@directory_to_scan) ) dir_exists=false else dir_exists=true end else dir_exists=false end end if not dir_exists printf("The given path to scan does not exist: %s\n", dir ) # printf("Expanded path does not exist: %s\n", @directory_to_scan ) return false else return true end end #----------------------------- command line parsing ------------------------------------------ options = GetoptLong.new( # please maintain these in alphabetical order [ "--conf", "-c", GetoptLong::REQUIRED_ARGUMENT ], # specific conf file [ "--deliver-results", "-d", GetoptLong::OPTIONAL_ARGUMENT ],# existence says 'yes' deliver results to server, followed by a filename sends that file to the server [ "--deliver-batch", "-D", GetoptLong::REQUIRED_ARGUMENT ], # argument points to a directory of scan results files to submit [ "--help", "-h", GetoptLong::NO_ARGUMENT ], # get help, then exit [ "--geography", "-Y", GetoptLong::REQUIRED_ARGUMENT ], # geography code [ "--census-code","-C", GetoptLong::REQUIRED_ARGUMENT ], # identifier representing the census code [ "--human-results","-u", GetoptLong::REQUIRED_ARGUMENT ], # path to results file [ "--list-os","-o", GetoptLong::NO_ARGUMENT ], # returns the same os string that will be reported with machine scan results [ "--list-excluded", "-e", GetoptLong::NO_ARGUMENT], # show excluded filenames during scan [ "--list-files", "-l", GetoptLong::NO_ARGUMENT ], # show encountered filenames during scan [ "--list-filters", "-g", GetoptLong::NO_ARGUMENT ], # show list of filters, then exit [ "--list-foi", "-i", GetoptLong::NO_ARGUMENT ], # show a list of files of interest derived from scan rules, then exit [ "--list-projects", "-j", GetoptLong::OPTIONAL_ARGUMENT ], # show a list projects discovery is capable of finding [ "--list-md5-dupes", "-M", GetoptLong::NO_ARGUMENT ], # [ "--list-tag", "-t", GetoptLong::NO_ARGUMENT ], # dump the MD5 hash which is the machine id tag [ "--machine-results","-m", GetoptLong::REQUIRED_ARGUMENT ], # path to results file [ "--nofollow", "-S", GetoptLong::NO_ARGUMENT ], # follow symlinks? presence of this flag says "No" don't follow [ "--inc-path", "-I", GetoptLong::NO_ARGUMENT ], # existence of this flag says to include location (path) in results [ "--path", "-p", GetoptLong::REQUIRED_ARGUMENT ], # scan explicit path [ "--progress", "-x", GetoptLong::OPTIONAL_ARGUMENT ], # show a progress indication every X files scanned [ "--preview-results","-R", GetoptLong::OPTIONAL_ARGUMENT ], # the existence of this flag will cause discovery to print to stdout the machine results file when scan is completed [ "--production-scan","-P", GetoptLong::NO_ARGUMENT ], # This flag identifies the scan you run as a scan of a production machine in the results. # future [ "--speed", "-s", GetoptLong::REQUIRED_ARGUMENT ], # speed hint - how much analysis to do, which rules to use [ "--rule-version", "-V", GetoptLong::NO_ARGUMENT ], # print out rule version info and do nothing else (no scan performed) [ "--throttle", "-T", GetoptLong::NO_ARGUMENT ], # enable production throttling (by default it is disabled) [ "--update-rules", "-r", GetoptLong::OPTIONAL_ARGUMENT ], # get update scan rules, and optionally perform the scan after getting them [ "--verbose", "-b", GetoptLong::OPTIONAL_ARGUMENT ], # be verbose while scanning - every X files scanned [ "--version", "-v", GetoptLong::OPTIONAL_ARGUMENT ] # print version, then exit # TODO - would be nice to override the filter-list.rb file from the CLI # TODO - need to be able to throttle the scan rate so it doesn't soak CPU cycles on production boxes ) # begin # Every property from the config.yml file is loaded as an instance variable of self. # This is done so that this file can have default values for all of these properties, and then # change them if necessary based on a cli option that was specified. configs = Config.configs configs.each_pair {|key, value| self.instance_variable_set("@" + key.to_s, value) } @distro = get_os_version_str # generate a unique and static machine id @machine_id = make_machine_id options.each do | opt, arg | case opt when "--conf" if ( File.exist?(arg) && File.file?(arg) ) @config = arg else printf("The given configuration path does not exist or is not a file: %s\n", arg ) exit 1 end when "--deliver-batch" if ( !File.directory?(arg) ) printf("#{arg} does not exist, please recheck the directory name\n") exit 1 end deliver_batch( arg ) exit 0 # existence says 'yes' deliver the machine readable results to the server # optional arg will either immediately deliver results if the file already exists # or will scan the machine and use that filename as the results file and then deliver it # if no results filename is given, the machine will be rescanned and results placed in the # default results file and then posted. when "--deliver-results" @send_results = true if ( arg != nil && arg != "" ) # results file was given, see if it exists. # if it exists, post it immediately, exit once the status code is received from the server # if it does not exist, scan the machine normally except use the given filename as the # the results file to post when the scan is complete if ( File.exists?(arg) ) printf("Immediately delivering the results file: #{arg} ...\n") # don't need to enforce geography check on cli because by delivering files, that geography would # have already been validated. Also, if the scan_results geography is invalid, the server # will reject the scan deliver_results( arg ) exit 0 else puts "The file you specified to be delivered to the census server does not exist." puts File.expand_path(arg) exit 1 end end # if deliverying anonymous results (no group passcode), then the geography option is required if ( (@census_code == nil || @census_code == "") && (@geography == nil || (@geography.to_i < 1 || @geography.to_i > 9)) ) printf("\nScan not completed\n") printf("\nWhen delivering anonymous results to the OSSCensus server, the geography must be defined\n") printf(" use --geography to specify the geography code or \n") printf(" modify the geography property in the config.yml file\n") printf(" Geography codes for the --geography option are:\n") printf( show_geographies() ) printf("\n --geography is an order dependent parameter and must be used before the --deliver-results parameter\n") printf("If you are registered with the OSSCensus site and have a group passcode or token, you should set that \n") printf("on the command line or add it to your config.yml file.\n") exit 1 elsif ( @census_code != "" && @geography.to_i == 100 ) # default the geography to "" if group passcode is supplied but geography was not overridden # geography will be associated on the server side using the census-code @geography = "" end begin File.open(@machine_results, "w") {|file|} rescue Exception => e puts "ERROR: Unable to access file: '#{@machine_results}'" exit 1 end when "--help" help() exit 0 when "--inc-path" @include_paths = true when "--human-results" # Test access to the results directory/filename before performing # any scan. This meets one of the requirements for disco 2 which is to not perform # a huge scan and then bomb at the end because the results can't be written # need to do a test file create/write - if it succeeds, proceed # if it fails, bail now so you don't end up running a scan when there's no place # to put the results @results = arg begin # Issue 34: only open as append in this test so we do not blow away an existing results file File.open(@results, "a") {|file|} rescue Exception => e puts "ERROR: Unable to write to file: '#{@results}'\n" if ( !(File.directory?( File.dirname(@results) ) ) ) puts "The directory " + File.dirname( @results ) + " does not exist\n" end exit 0 end when "--geography" @geography = arg if ( @geography.to_i < 1 || @geography.to_i > 9 ) printf("Invalid geography #{@geography}\n") printf(show_geographies()) exit 1 end when "--census-code" @census_code = arg # TODO - validation of census code format # if geography is undefined and a census_code is supplied, geography should be empty if ( @geography.to_i < 1 || @geography.to_i > 9 ) @geography = "" end when "--list-os" printf("%s, arch: %s, kernel: %s\n", get_os_version_str(), @os_architecture, @kernel ) exit 0 when "--list-excluded" @list_exclusions = true when "--list-filters" dump_filters() exit 0 when "--list-files" @list_files = true when "--list-foi" @list_foi = true when "--list-md5-dupes" ScanRulesReader.find_duplicated_md5_match_rules(@rules_dirs) exit 0 when "--list-projects" projects = ScanRulesReader.discoverable_projects(@rules_dirs) if (arg == "verbose") then puts "number,name,from,platforms,description" projects.each_with_index do |p, i| puts "#{i+1},#{p.name},#{p.from},#{p.operating_systems.to_a.inspect.gsub(", ", "|")},#{p.desc}" end else names = projects.collect{|p| p.name}.to_set.sort names.each_with_index do |name, i| puts "#{i+1},#{name}" end end exit 0 when "--list-tag" printf("Unique Machine Tag (ID): %s\n", @machine_id ) exit 0 when "--machine-results" # Test access to the results directory/filename before performing # any scan. This meets one of the requirements for disco 2 which is to not perform # a huge scan and then bomb at the end because the results can't be written # need to do a test file create/write - if it succeeds, proceed # if it fails, bail now so you don't end up running a scan when there's no place # to put the results @machine_results = arg begin File.open(@machine_results, "a") {|file|} rescue Exception => e puts "ERROR: Unable to write to file: '#{@machine_results}'" if ( !(File.directory?( File.dirname(@machine_results) ) ) ) puts"The directory " + File.dirname( @machine_results ) + " does not exist\n" end exit 0 end when "--nofollow" @follow_symlinks = false when "--path" if ( !validate_directory_to_scan( arg ) ) exit 1 end when "--progress" @show_progress = true if ( arg != "" ) # TODO validate argument to be a positive integer > 50 @show_every = arg.to_i end when "--preview-results" @preview_results = true when "--production-scan" @production_scan = true @@log.info('Discovery') {'This scan will be identified as a production scan.'} when "--rule-version" print_rule_version_info exit 0 when "--throttle" @throttling_enabled = true @@log.info('Discovery') {'Throttling has been enabled.'} when "--update-rules" if (arg == nil || arg == "") then @update_rules = true @update_rules_and_do_scan = false elsif (arg == "scan") @update_rules = true @update_rules_and_do_scan = true else puts "The only valid arg for the '--update-rules' option is 'scan'. You provided an arg of '#{arg}'." exit 1 end when "--verbose" @show_verbose = true if ( arg != "" ) # TODO validate argument to be a positive integer > 50 @show_every = arg.to_i end when "--version" printf("%s\n", version() ) exit 0 end # case end # options do # rescue Exception => e # printf("Unsupported option. Please review the list of supported options and usage:\n") # @@log.error('Discovery') {"Unsupported option. Please review the list of supported options and usage: #{$!}"} # @@log.error('Discovery') {"#{e.message}\n#{e.backtrace}"} # puts "#{e.message}\n#{e.backtrace}" # help() # exit 1 # end # interpret any leftover arguments as the override path if ( ARGV.size > 0 ) if ( ARGV[0] != "" ) validate_directory_to_scan( ARGV[0] ) end end #----------------------------- do the business ------------------------------------- # If this is running under jruby, we ignore the --nofollow preference and manually set # symlinks to not be followed. Jruby has a lot of problems with symlinks, so we have to # completely ignore them unless running in native ruby. @follow_symlinks = false if RUBY_PLATFORM =~ /java/ #if RUBY_PLATFORM =~ /java/ # require 'java' # puts "Java Version: #{java.lang.System.getProperty('java.version')}" #end # Immediately check to see if the machine results output file is writeable. If it is not, don't be a hack and do the scan anyway. begin File.open(@machine_results, "w") {|file|} rescue Exception => e puts "ERROR: Unable to write to machine results file: '#{@machine_results}'. This file must be writeable before a scan can be performed." exit 1 end if (@update_rules) then do_a_scan = "Finished getting the updated rules, going on to perform a scan.\n" just_update_rules = "Finished getting the updated rules, no scan being performed.\n" # get the updated rules from the server begin printf("Getting the updated scan rules from the server.\n") update_scan_rules() rescue => e error_msg = "An error occured while attempting to get the updated scan rules.\n" error_msg << " error: #{e.message}\n" error_msg << " The original scan rules should still be in affect.\n" printf(error_msg) @@log.error(e.inspect + e.backtrace.inspect.gsub("[\"", "\n\t[\"").gsub(", ", ",\n\t ")) # if there's a better/easier way to get a readable exception trace, I don't know what it is do_a_scan = "Going on to perform a scan using the original scan rules.\n" just_update_rules = "No scan being performed.\n" end if (@update_rules_and_do_scan) then # go on and do the business below starting with 'execute()' printf(do_a_scan) else printf(just_update_rules) exit 0 end end # execute a scan execute # scan is complete, do a simple report based projects evaluated by the rule engine - this 'report' method is in cliutils.rb @packages = @rule_engine.scan_complete def make_reports # human readable report report @packages if @produce_match_audit_records report_audit_records @rule_engine.audit_records end if ( @geography.to_i < 1 || @geography.to_i > 9 ) @geography = "" end # machine_report method is no longer defined in cliutils.rb -- see the 'TODO technical debt' in census_utils.rb if (Object.respond_to?(:machine_report, true)) then # deal with machine reports and sending results if allowed machine_report(@machine_results, @packages, version, @machine_id, @walker.dir_ct, @walker.file_ct, @walker.sym_link_ct, @walker.permission_denied_ct, @walker.foi_ct, @starttime, @endtime, @distro, @os_family, @os, @os_version, @os_architecture, @kernel, @production_scan, @include_paths, @preview_results, @census_code, @universal_rules_md5, @universal_rules_version, @geography ) end end make_reports if @send_results deliver_results @machine_results end puts "Scan complete" exit 0 fix for gsub error git-svn-id: 36b044a1d4e5f74e123a9788437846fc1f2f0dec@305 fd6d9157-2c40-0410-9642-e7077f30ef04 # discovery.rb # # LEGAL NOTICE # ------------- # # OSS Discovery is a tool that finds installed open source software. # Copyright (C) 2007 OpenLogic, Inc. # # OSS Discovery is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License version 3 as # published by the Free Software Foundation. # # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License version 3 (discovery2-client/license/OSSDiscoveryLicense.txt) # for more details. # # You should have received a copy of the GNU Affero General Public License along with this program. # If not, see http://www.gnu.org/licenses/ # # You can learn more about OSSDiscovery, report bugs and get the latest versions at www.ossdiscovery.org. # You can contact the OSS Discovery team at info@ossdiscovery.org. # You can contact OpenLogic at info@openlogic.com. # -------------------------------------------------------------------------------------------------- # # discovery.rb is the main CLI framework. It's purpose is to: # # a) process command line arguments # b) instantiate the major subsystems such as the walker and rule engine # c) kick off the scan # d) produce the reports # # Every property from the config.yml file is loaded as an instance variable of global self. # This is done so that this file can have default values for all of these properties, and then # change them if necessary based on a cli option that was specified. So, if a default value in # the config.yml file is ever modified, this file will receive that modified value by default. # The same will happen if a new value is ever added to the config.yml. # # Quick and dirty code architecture discussion: # 1) A Walker is a class which traverses the disk looking for files that match a given set of # of regular expressions # a) The Walker derives the list of file matches it should be looking for from the RuleEngine # 2) The RuleEngine is initialized through reading the set of project rules xml files found in # the lib/rules directory and subdirectories. A project rule and its match rules defines # filename regular expressions which could indicate a project is installed # 3) The Walker looks for any file which matches one of rule's "files of interest" (FOI) # a) A file of interest is really a regular expression that could match any number of possible # files the rule could apply to. # b) You can see the list of patterns that make up the "files of interest" by running: # ./discovery --list-foi # 4) When the Walker finds a matching file, it calls the RuleEngine with the found file # 5) The RuleEngine will evaluate the file and apply any rule which matches that filename # a) There are currently 4 types of match rules depending upon the rule writer's judgement for # the best way to detect the project's key files. # 6) The RuleEngine will track the match state for each of the project rules and match rules # 7) After the Walker has completed the disk traverse, the RuleEngine contains the match states # of everything found # 8) The Discovery framework then dumps a list of the match states to the console in a sorted order # a) optionally, the results will be delivered to an open source census server for inclusion in # the open source census. # # For more details on how the project rules work, please see the "Rule Writing for Discovery" document # on the project web site: http://www.ossdiscovery.org # $:.unshift File.join(File.dirname(__FILE__)) require 'date' require 'getoptlong' require 'parsedate' require 'pp' require 'walker.rb' require 'cliutils.rb' require 'rule_engine.rb' require 'scan_rules_updater' #--------------- global defaults --------------------------------------------- # maintain these in alphabetical order, please @basedir = File.expand_path(File.dirname(__FILE__)) @config = 'conf/config.rb' @copyright = "Copyright (C) 2007 OpenLogic, Inc." @discovery_version = "2.0-alpha-4" @discovery_name = "discovery" @discovery_license = "GNU Affero General Public License version 3" @discovery_license_shortname = "Affero GPLv3" @dir_exclusion_filters = Hash.new @distro = "Unknown: Unrecognized" @file_exclusion_filters = Hash.new @census_code = "" @inclusion_filters = Hash.new @@log = Config.prop(:log) # walker configuration parameter defaults @list_files = false @list_foi = false @list_exclusions = false @os = "Unknown" # distro major name "ubuntu" @os_family = "Unknown" # linux, windows, etc @os_architecture = "Unknown" # i386, x86_64, sparc, etc @os_version = "Unknown" # 5.04, 10.4, etc @show_every = 1000 @show_progress = false @show_verbose = false # used to help validate speed values in various subsystems @valid_speeds = 1 SPEEDHINT = 1 unless defined?(SPEEDHINT) # important global objects @rule_engine = nil @walker = nil # configuration file can override any of the parameters above require "#{@basedir}/#{@config}" require "#{@basedir}/#{Config.prop(:generic_filters)}" # Load any plugins, meaning any file named 'init.rb' found somewhere # under the 'plugins' directory. def load_plugins plugin_files = File.join(File.dirname(__FILE__), "plugins", "**", "init.rb") Dir.glob(plugin_files) { |path| require path } end load_plugins if Config.prop(:load_plugins) =begin rdoc This is the main executive controller of discovery a) assumes processing command line arguments has occurred b) instantiates the major subsystems such as the walker and rule engine c) kicks off the scan =end def execute() # mark the beginning of a scan @starttime = Time.new @universal_rules_md5 = ScanRulesReader.generate_aggregate_md5(File.dirname(@rules_openlogic)) @universal_rules_version = ScanRulesReader.get_universal_rules_version() # create the application's Walker instance - @list_files is boolean for whether to dump files as encountered @walker = Walker::new( ) if ( @walker == nil ) printf("FATAL - walker cannot be created\n") exit 1 end # setup all the walker behavior based on CLI flags # # exclusion filters is a hash of descriptions/regexs, so just pass the criteria to the walker @walker.add_dir_exclusions( @dir_exclusion_filters.values ) @walker.add_file_exclusions( @file_exclusion_filters.values ) @walker.list_exclusions = @list_exclusions @walker.list_files = @list_files @walker.show_permission_denied = @show_permission_denied @walker.show_every = @show_every.to_i @walker.show_progress = @show_progress @walker.show_verbose = @show_verbose @walker.symlink_depth = @symlink_depth @walker.follow_symlinks = @follow_symlinks @walker.throttling_enabled = @throttling_enabled @walker.throttle_number_of_files = @throttle_number_of_files @walker.throttle_seconds_to_pause = @throttle_seconds_to_pause # create the applications RuleEngine instance # in the process of constructing the object, the rule engine # will register with the walker and set up the list of files of interest # after this object is created, the machine is ready to scan puts "Reading project rules....\n" @rule_engine = RuleEngine.new( @rules_dirs, @walker, SPEEDHINT ) # @rule_engine = RuleEngine.new( @rules_dirs, @walker, @speedhint ) - future, whenever 'speedhint' gets added back to config.yml # obey the command line parameter to list the files of interest. this can't be done until # the rule engine has parsed the scan rules file so that we know all the actual files of # interest determined by scan rules expressions if ( @list_foi ) printf("Files of interest:\n") @walker.get_files_of_interest.each { | foi | printf("%s\n", foi.source) } exit 0 end # This is the main call to start scanning a machine @directory_to_scan = File.expand_path(@directory_to_scan) puts "Scanning #{@directory_to_scan}\n" @walker.walk_dir( @directory_to_scan ) # mark the end of a scan @endtime = Time.new end def update_scan_rules() updater = ScanRulesUpdater.new(@server_base_url) updater.proxy_host = @proxy_host updater.proxy_port = @proxy_port updater.proxy_username = @proxy_user updater.proxy_password = @proxy_password begin updater.update_scanrules(@rules_openlogic, @rules_files_url_path) rescue Exception => e @@log.error("Discovery: " << e.inspect + e.backtrace.inspect.gsub("[\"", "\n\t[\"").gsub(", ", ",\n\t ")) # if there's a better/easier way to get a readable exception trace, I don't know what it is printf("#{e.to_s}\n") end end def validate_directory_to_scan( dir ) # Some versions of ruby have trouble when expanding a path with backslashes. # In windows, replace all backslashes with forward slashes. if major_platform =~ /windows/ dir=dir.gsub('\\','/') end @directory_to_scan = File.expand_path( dir ) @directory_to_scan.gsub!('//','/') dir_exists=true if ( !File.exist?(@directory_to_scan ) ) # If it doesn't exist, it may be a weirdism with ruby turning c:\ into /c:/. So # make that change and try again if ( @directory_to_scan =~ /:/ ) @directory_to_scan = @directory_to_scan[1..@directory_to_scan.length] if ( !File.exist?(@directory_to_scan) ) dir_exists=false else dir_exists=true end else dir_exists=false end end if not dir_exists printf("The given path to scan does not exist: %s\n", dir ) # printf("Expanded path does not exist: %s\n", @directory_to_scan ) return false else return true end end #----------------------------- command line parsing ------------------------------------------ options = GetoptLong.new( # please maintain these in alphabetical order [ "--conf", "-c", GetoptLong::REQUIRED_ARGUMENT ], # specific conf file [ "--deliver-results", "-d", GetoptLong::OPTIONAL_ARGUMENT ],# existence says 'yes' deliver results to server, followed by a filename sends that file to the server [ "--deliver-batch", "-D", GetoptLong::REQUIRED_ARGUMENT ], # argument points to a directory of scan results files to submit [ "--help", "-h", GetoptLong::NO_ARGUMENT ], # get help, then exit [ "--geography", "-Y", GetoptLong::REQUIRED_ARGUMENT ], # geography code [ "--census-code","-C", GetoptLong::REQUIRED_ARGUMENT ], # identifier representing the census code [ "--human-results","-u", GetoptLong::REQUIRED_ARGUMENT ], # path to results file [ "--list-os","-o", GetoptLong::NO_ARGUMENT ], # returns the same os string that will be reported with machine scan results [ "--list-excluded", "-e", GetoptLong::NO_ARGUMENT], # show excluded filenames during scan [ "--list-files", "-l", GetoptLong::NO_ARGUMENT ], # show encountered filenames during scan [ "--list-filters", "-g", GetoptLong::NO_ARGUMENT ], # show list of filters, then exit [ "--list-foi", "-i", GetoptLong::NO_ARGUMENT ], # show a list of files of interest derived from scan rules, then exit [ "--list-projects", "-j", GetoptLong::OPTIONAL_ARGUMENT ], # show a list projects discovery is capable of finding [ "--list-md5-dupes", "-M", GetoptLong::NO_ARGUMENT ], # [ "--list-tag", "-t", GetoptLong::NO_ARGUMENT ], # dump the MD5 hash which is the machine id tag [ "--machine-results","-m", GetoptLong::REQUIRED_ARGUMENT ], # path to results file [ "--nofollow", "-S", GetoptLong::NO_ARGUMENT ], # follow symlinks? presence of this flag says "No" don't follow [ "--inc-path", "-I", GetoptLong::NO_ARGUMENT ], # existence of this flag says to include location (path) in results [ "--path", "-p", GetoptLong::REQUIRED_ARGUMENT ], # scan explicit path [ "--progress", "-x", GetoptLong::OPTIONAL_ARGUMENT ], # show a progress indication every X files scanned [ "--preview-results","-R", GetoptLong::OPTIONAL_ARGUMENT ], # the existence of this flag will cause discovery to print to stdout the machine results file when scan is completed [ "--production-scan","-P", GetoptLong::NO_ARGUMENT ], # This flag identifies the scan you run as a scan of a production machine in the results. # future [ "--speed", "-s", GetoptLong::REQUIRED_ARGUMENT ], # speed hint - how much analysis to do, which rules to use [ "--rule-version", "-V", GetoptLong::NO_ARGUMENT ], # print out rule version info and do nothing else (no scan performed) [ "--throttle", "-T", GetoptLong::NO_ARGUMENT ], # enable production throttling (by default it is disabled) [ "--update-rules", "-r", GetoptLong::OPTIONAL_ARGUMENT ], # get update scan rules, and optionally perform the scan after getting them [ "--verbose", "-b", GetoptLong::OPTIONAL_ARGUMENT ], # be verbose while scanning - every X files scanned [ "--version", "-v", GetoptLong::OPTIONAL_ARGUMENT ] # print version, then exit # TODO - would be nice to override the filter-list.rb file from the CLI # TODO - need to be able to throttle the scan rate so it doesn't soak CPU cycles on production boxes ) # begin # Every property from the config.yml file is loaded as an instance variable of self. # This is done so that this file can have default values for all of these properties, and then # change them if necessary based on a cli option that was specified. configs = Config.configs configs.each_pair {|key, value| self.instance_variable_set("@" + key.to_s, value) } @distro = get_os_version_str # generate a unique and static machine id @machine_id = make_machine_id options.each do | opt, arg | case opt when "--conf" if ( File.exist?(arg) && File.file?(arg) ) @config = arg else printf("The given configuration path does not exist or is not a file: %s\n", arg ) exit 1 end when "--deliver-batch" if ( !File.directory?(arg) ) printf("#{arg} does not exist, please recheck the directory name\n") exit 1 end deliver_batch( arg ) exit 0 # existence says 'yes' deliver the machine readable results to the server # optional arg will either immediately deliver results if the file already exists # or will scan the machine and use that filename as the results file and then deliver it # if no results filename is given, the machine will be rescanned and results placed in the # default results file and then posted. when "--deliver-results" @send_results = true if ( arg != nil && arg != "" ) # results file was given, see if it exists. # if it exists, post it immediately, exit once the status code is received from the server # if it does not exist, scan the machine normally except use the given filename as the # the results file to post when the scan is complete if ( File.exists?(arg) ) printf("Immediately delivering the results file: #{arg} ...\n") # don't need to enforce geography check on cli because by delivering files, that geography would # have already been validated. Also, if the scan_results geography is invalid, the server # will reject the scan deliver_results( arg ) exit 0 else puts "The file you specified to be delivered to the census server does not exist." puts File.expand_path(arg) exit 1 end end # if deliverying anonymous results (no group passcode), then the geography option is required if ( (@census_code == nil || @census_code == "") && (@geography == nil || (@geography.to_i < 1 || @geography.to_i > 9)) ) printf("\nScan not completed\n") printf("\nWhen delivering anonymous results to the OSSCensus server, the geography must be defined\n") printf(" use --geography to specify the geography code or \n") printf(" modify the geography property in the config.yml file\n") printf(" Geography codes for the --geography option are:\n") printf( show_geographies() ) printf("\n --geography is an order dependent parameter and must be used before the --deliver-results parameter\n") printf("If you are registered with the OSSCensus site and have a group passcode or token, you should set that \n") printf("on the command line or add it to your config.yml file.\n") exit 1 elsif ( @census_code != "" && @geography.to_i == 100 ) # default the geography to "" if group passcode is supplied but geography was not overridden # geography will be associated on the server side using the census-code @geography = "" end begin File.open(@machine_results, "w") {|file|} rescue Exception => e puts "ERROR: Unable to access file: '#{@machine_results}'" exit 1 end when "--help" help() exit 0 when "--inc-path" @include_paths = true when "--human-results" # Test access to the results directory/filename before performing # any scan. This meets one of the requirements for disco 2 which is to not perform # a huge scan and then bomb at the end because the results can't be written # need to do a test file create/write - if it succeeds, proceed # if it fails, bail now so you don't end up running a scan when there's no place # to put the results @results = arg begin # Issue 34: only open as append in this test so we do not blow away an existing results file File.open(@results, "a") {|file|} rescue Exception => e puts "ERROR: Unable to write to file: '#{@results}'\n" if ( !(File.directory?( File.dirname(@results) ) ) ) puts "The directory " + File.dirname( @results ) + " does not exist\n" end exit 0 end when "--geography" @geography = arg if ( @geography.to_i < 1 || @geography.to_i > 9 ) printf("Invalid geography #{@geography}\n") printf(show_geographies()) exit 1 end when "--census-code" @census_code = arg # TODO - validation of census code format # if geography is undefined and a census_code is supplied, geography should be empty if ( @geography.to_i < 1 || @geography.to_i > 9 ) @geography = "" end when "--list-os" printf("%s, arch: %s, kernel: %s\n", get_os_version_str(), @os_architecture, @kernel ) exit 0 when "--list-excluded" @list_exclusions = true when "--list-filters" dump_filters() exit 0 when "--list-files" @list_files = true when "--list-foi" @list_foi = true when "--list-md5-dupes" ScanRulesReader.find_duplicated_md5_match_rules(@rules_dirs) exit 0 when "--list-projects" projects = ScanRulesReader.discoverable_projects(@rules_dirs) if (arg == "verbose") then puts "number,name,from,platforms,description" projects.each_with_index do |p, i| puts "#{i+1},#{p.name},#{p.from},#{p.operating_systems.to_a.inspect.gsub(", ", "|")},#{p.desc}" end else names = projects.collect{|p| p.name}.to_set.sort names.each_with_index do |name, i| puts "#{i+1},#{name}" end end exit 0 when "--list-tag" printf("Unique Machine Tag (ID): %s\n", @machine_id ) exit 0 when "--machine-results" # Test access to the results directory/filename before performing # any scan. This meets one of the requirements for disco 2 which is to not perform # a huge scan and then bomb at the end because the results can't be written # need to do a test file create/write - if it succeeds, proceed # if it fails, bail now so you don't end up running a scan when there's no place # to put the results @machine_results = arg begin File.open(@machine_results, "a") {|file|} rescue Exception => e puts "ERROR: Unable to write to file: '#{@machine_results}'" if ( !(File.directory?( File.dirname(@machine_results) ) ) ) puts"The directory " + File.dirname( @machine_results ) + " does not exist\n" end exit 0 end when "--nofollow" @follow_symlinks = false when "--path" if ( !validate_directory_to_scan( arg ) ) exit 1 end when "--progress" @show_progress = true if ( arg != "" ) # TODO validate argument to be a positive integer > 50 @show_every = arg.to_i end when "--preview-results" @preview_results = true when "--production-scan" @production_scan = true @@log.info('Discovery') {'This scan will be identified as a production scan.'} when "--rule-version" print_rule_version_info exit 0 when "--throttle" @throttling_enabled = true @@log.info('Discovery') {'Throttling has been enabled.'} when "--update-rules" if (arg == nil || arg == "") then @update_rules = true @update_rules_and_do_scan = false elsif (arg == "scan") @update_rules = true @update_rules_and_do_scan = true else puts "The only valid arg for the '--update-rules' option is 'scan'. You provided an arg of '#{arg}'." exit 1 end when "--verbose" @show_verbose = true if ( arg != "" ) # TODO validate argument to be a positive integer > 50 @show_every = arg.to_i end when "--version" printf("%s\n", version() ) exit 0 end # case end # options do # rescue Exception => e # printf("Unsupported option. Please review the list of supported options and usage:\n") # @@log.error('Discovery') {"Unsupported option. Please review the list of supported options and usage: #{$!}"} # @@log.error('Discovery') {"#{e.message}\n#{e.backtrace}"} # puts "#{e.message}\n#{e.backtrace}" # help() # exit 1 # end # interpret any leftover arguments as the override path if ( ARGV.size > 0 ) if ( ARGV[0] != "" ) validate_directory_to_scan( ARGV[0] ) end end #----------------------------- do the business ------------------------------------- # If this is running under jruby, we ignore the --nofollow preference and manually set # symlinks to not be followed. Jruby has a lot of problems with symlinks, so we have to # completely ignore them unless running in native ruby. @follow_symlinks = false if RUBY_PLATFORM =~ /java/ #if RUBY_PLATFORM =~ /java/ # require 'java' # puts "Java Version: #{java.lang.System.getProperty('java.version')}" #end # Immediately check to see if the machine results output file is writeable. If it is not, don't be a hack and do the scan anyway. begin File.open(@machine_results, "w") {|file|} rescue Exception => e puts "ERROR: Unable to write to machine results file: '#{@machine_results}'. This file must be writeable before a scan can be performed." exit 1 end if (@update_rules) then do_a_scan = "Finished getting the updated rules, going on to perform a scan.\n" just_update_rules = "Finished getting the updated rules, no scan being performed.\n" # get the updated rules from the server begin printf("Getting the updated scan rules from the server.\n") update_scan_rules() rescue => e error_msg = "An error occured while attempting to get the updated scan rules.\n" error_msg << " error: #{e.message}\n" error_msg << " The original scan rules should still be in affect.\n" printf(error_msg) @@log.error(e.inspect + e.backtrace.inspect.gsub("[\"", "\n\t[\"").gsub(", ", ",\n\t ")) # if there's a better/easier way to get a readable exception trace, I don't know what it is do_a_scan = "Going on to perform a scan using the original scan rules.\n" just_update_rules = "No scan being performed.\n" end if (@update_rules_and_do_scan) then # go on and do the business below starting with 'execute()' printf(do_a_scan) else printf(just_update_rules) exit 0 end end # execute a scan execute # scan is complete, do a simple report based projects evaluated by the rule engine - this 'report' method is in cliutils.rb @packages = @rule_engine.scan_complete def make_reports # human readable report report @packages if @produce_match_audit_records report_audit_records @rule_engine.audit_records end if ( @geography.to_i < 1 || @geography.to_i > 9 ) @geography = "" end # machine_report method is no longer defined in cliutils.rb -- see the 'TODO technical debt' in census_utils.rb if (Object.respond_to?(:machine_report, true)) then # deal with machine reports and sending results if allowed machine_report(@machine_results, @packages, version, @machine_id, @walker.dir_ct, @walker.file_ct, @walker.sym_link_ct, @walker.permission_denied_ct, @walker.foi_ct, @starttime, @endtime, @distro, @os_family, @os, @os_version, @os_architecture, @kernel, @production_scan, @include_paths, @preview_results, @census_code, @universal_rules_md5, @universal_rules_version, @geography ) end end make_reports if @send_results deliver_results @machine_results end puts "Scan complete" exit 0
moved sensors to under api/ ## ## Copyright [2013-2015] [Megam Systems] ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## module Api class Sensors < APIDispatch attr_reader :sensors def initialize @sensors = [] end def list(api_params, &_block) raw = api_request(api_params, SENSORS, LIST) @sensors = to_hash(raw[:body]) yield self if block_given? self end def to_hash(sensors_collection) sensors = [] sensors_collection.each do |sensor| sensors << { sensor_type: sensor.sensor_type, payload: sensor.payload, created_at: sensor.created_at.to_time.to_formatted_s(:rfc822) } end sensors.sort_by { |vn| vn[:created_at] } end end end
class Bot::Twitter < ActiveRecord::Base include Bot::SocialBot attr_accessor :twitter_client def self.default Bot::Twitter.where(name: 'Twitter Bot').last end def send_to_twitter_in_background(annotation) self.send_to_social_network_in_background(:send_to_twitter, annotation) end def self.send_to_twitter(annotation_id) translation = Dynamic.where(id: annotation_id, annotation_type: 'translation').last Bot::Twitter.default.send_to_twitter(translation) end def send_to_twitter(translation) send_to_social_network 'twitter', translation do auth = self.get_auth('twitter') self.twitter_client = Twitter::REST::Client.new do |config| config.consumer_key = CONFIG['twitter_consumer_key'] config.consumer_secret = CONFIG['twitter_consumer_secret'] config.access_token = auth['token'] config.access_token_secret = auth['secret'] end text = self.format_for_twitter(self.text) image = self.get_screenshot_for_twitter tweet = self.twitter_client.update_with_media(text, File.new(image)) FileUtils.rm(image) tweet.url.to_s end end protected def get_screenshot_for_twitter require 'open-uri' url = self.embed_url(:private, :png) path = File.join(Dir::tmpdir, "#{Time.now.to_i}_#{rand(100000)}.png") # Try to get screenshot from Reader... if it doesn't work, use a default image begin IO.copy_stream(open(url, { ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE }), path) rescue FileUtils.cp File.join(Rails.root, 'public', 'images', 'bridge.png'), path end path end def twitter_url_size Rails.cache.fetch('twitter_short_url_length', expire_in: 24.hours) do self.twitter_client.configuration.short_url_length_https + 1 end end def format_for_twitter(text) url = self.embed_url size = 140 - self.twitter_url_size * 2 # one URL for Bridge Reader and another one for the attached image text.truncate(size) + ' ' + url.to_s end end Higher timeout for the screenshot class Bot::Twitter < ActiveRecord::Base include Bot::SocialBot attr_accessor :twitter_client def self.default Bot::Twitter.where(name: 'Twitter Bot').last end def send_to_twitter_in_background(annotation) self.send_to_social_network_in_background(:send_to_twitter, annotation) end def self.send_to_twitter(annotation_id) translation = Dynamic.where(id: annotation_id, annotation_type: 'translation').last Bot::Twitter.default.send_to_twitter(translation) end def send_to_twitter(translation) send_to_social_network 'twitter', translation do auth = self.get_auth('twitter') self.twitter_client = Twitter::REST::Client.new do |config| config.consumer_key = CONFIG['twitter_consumer_key'] config.consumer_secret = CONFIG['twitter_consumer_secret'] config.access_token = auth['token'] config.access_token_secret = auth['secret'] end text = self.format_for_twitter(self.text) image = self.get_screenshot_for_twitter tweet = self.twitter_client.update_with_media(text, File.new(image)) FileUtils.rm(image) tweet.url.to_s end end protected def get_screenshot_for_twitter require 'open-uri' url = self.embed_url(:private, :png) path = File.join(Dir::tmpdir, "#{Time.now.to_i}_#{rand(100000)}.png") # Try to get screenshot from Reader... if it doesn't work, use a default image begin IO.copy_stream(open(url, { ssl_verify_mode: OpenSSL::SSL::VERIFY_NONE, read_timeout: 60 }), path) rescue FileUtils.cp File.join(Rails.root, 'public', 'images', 'bridge.png'), path end path end def twitter_url_size Rails.cache.fetch('twitter_short_url_length', expire_in: 24.hours) do self.twitter_client.configuration.short_url_length_https + 1 end end def format_for_twitter(text) url = self.embed_url size = 140 - self.twitter_url_size * 2 # one URL for Bridge Reader and another one for the attached image text.truncate(size) + ' ' + url.to_s end end
# # Copyright (C) 2011 - present Instructure, Inc. # # This file is part of Canvas. # # Canvas is free software: you can redistribute it and/or modify it under # the terms of the GNU Affero General Public License as published by the Free # Software Foundation, version 3 of the License. # # Canvas is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Affero General Public License for more # details. # # You should have received a copy of the GNU Affero General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. # class ContentTag < ActiveRecord::Base class LastLinkToOutcomeNotDestroyed < StandardError end TABLED_CONTENT_TYPES = ['Attachment', 'Assignment', 'WikiPage', 'Quizzes::Quiz', 'LearningOutcome', 'DiscussionTopic', 'Rubric', 'ContextExternalTool', 'LearningOutcomeGroup', 'AssessmentQuestionBank', 'LiveAssessments::Assessment', 'Lti::MessageHandler'].freeze TABLELESS_CONTENT_TYPES = ['ContextModuleSubHeader', 'ExternalUrl'].freeze CONTENT_TYPES = (TABLED_CONTENT_TYPES + TABLELESS_CONTENT_TYPES).freeze include Workflow include SearchTermHelper include MasterCourses::Restrictor restrict_columns :state, [:workflow_state] belongs_to :content, polymorphic: [], exhaustive: false validates_inclusion_of :content_type, :allow_nil => true, :in => CONTENT_TYPES belongs_to :context, polymorphic: [:course, :learning_outcome_group, :assignment, :account, { quiz: 'Quizzes::Quiz' }] belongs_to :associated_asset, polymorphic: [:learning_outcome_group], polymorphic_prefix: true belongs_to :context_module belongs_to :learning_outcome # This allows doing a has_many_through relationship on ContentTags for linked LearningOutcomes. (see LearningOutcomeContext) belongs_to :learning_outcome_content, :class_name => 'LearningOutcome', :foreign_key => :content_id has_many :learning_outcome_results # This allows bypassing loading context for validation if we have # context_id and context_type set, but still allows validating when # context is not yet saved. validates_presence_of :context, :unless => proc { |tag| tag.context_id && tag.context_type } validates_presence_of :workflow_state validates_length_of :comments, :maximum => maximum_text_length, :allow_nil => true, :allow_blank => true before_save :associate_external_tool before_save :default_values after_save :update_could_be_locked after_save :touch_context_module_after_transaction after_save :touch_context_if_learning_outcome include CustomValidations validates_as_url :url validate :check_for_restricted_content_changes acts_as_list :scope => :context_module set_policy do given {|user, session| self.context && self.context.grants_right?(user, session, :manage_content)} can :delete end workflow do state :active do event :unpublish, :transitions_to => :unpublished end state :unpublished do event :publish, :transitions_to => :active end state :deleted end alias_method :published?, :active? scope :active, -> { where(:workflow_state => 'active') } scope :not_deleted, -> { where("content_tags.workflow_state<>'deleted'") } attr_accessor :skip_touch def touch_context_module return true if skip_touch.present? ContentTag.touch_context_modules([self.context_module_id]) end def touch_context_module_after_transaction self.class.connection.after_transaction_commit { touch_context_module } end private :touch_context_module_after_transaction def self.touch_context_modules(ids=[]) if ids.length == 1 ContextModule.where(id: ids).update_all(updated_at: Time.now.utc) elsif ids.empty? # do nothing else ContextModule.where(id: ids).touch_all end true end def touch_context_if_learning_outcome if (self.tag_type == 'learning_outcome_association' || self.tag_type == 'learning_outcome') && skip_touch.blank? self.context_type.constantize.where(:id => self.context_id).update_all(:updated_at => Time.now.utc) end end def associate_external_tool return if content.present? || content_type != 'ContextExternalTool' || context.blank? || url.blank? content = ContextExternalTool.find_external_tool(url, context) self.content = content if content end def default_values self.title ||= self.content.title rescue nil self.title ||= self.content.name rescue nil self.title ||= self.content.display_name rescue nil self.title ||= t(:no_title, "No title") self.comments ||= "" self.comments = "" if self.comments == "Comments" self.context_code = "#{self.context_type.to_s.underscore}_#{self.context_id}" end protected :default_values def context_code read_attribute(:context_code) || "#{self.context_type.to_s.underscore}_#{self.context_id}" rescue nil end def context_name self.context.name rescue "" end def update_could_be_locked ContentTag.update_could_be_locked([self]) unless skip_touch.present? true end def self.update_could_be_locked(tags=[]) content_ids = {} tags.each do |t| (content_ids[t.content_type] ||= []) << t.content_id if t.content_type && t.content_id end content_ids.each do |type, ids| klass = type.constantize next unless klass < ActiveRecord::Base next if klass < Tableless if klass.new.respond_to?(:could_be_locked=) klass.where(:id => ids).update_all(:could_be_locked => true) end end end def confirm_valid_module_requirements self.context_module && self.context_module.confirm_valid_requirements end def scoreable? self.content_type_quiz? || self.graded? end def graded? return true if self.content_type == 'Assignment' return false if self.content_type == 'WikiPage' return false unless self.can_have_assignment? return content && !content.assignment_id.nil? end def duplicate_able? case self.content_type_class when 'assignment' content&.can_duplicate? when 'discussion_topic', 'wiki_page' true else false end end def content_type_class if self.content_type == 'Assignment' if self.content && self.content.submission_types == 'online_quiz' 'quiz' elsif self.content && self.content.submission_types == 'discussion_topic' 'discussion_topic' else 'assignment' end elsif self.content_type == 'Quizzes::Quiz' 'quiz' else self.content_type.underscore end rescue (self.content_type || "").underscore end def item_class (self.content_type || "").gsub(/\A[A-Za-z]+::/, '') + '_' + self.content_id.to_s end def can_have_assignment? ['Assignment', 'DiscussionTopic', 'Quizzes::Quiz', 'WikiPage'].include?(self.content_type) end def assignment if self.content_type == 'Assignment' self.content elsif can_have_assignment? self.content&.assignment else nil end end alias_method :old_content, :content def content TABLELESS_CONTENT_TYPES.include?(self.content_type) ? nil : old_content end def content_or_self content || self end def asset_safe_title(column) name = self.title.to_s if (limit = self.content.class.try(:columns_hash)[column].try(:limit)) && name.length > limit name = name[0, limit][/.{0,#{limit}}/mu] end name end def self.asset_workflow_state(asset) if asset.respond_to?(:published?) if asset.respond_to?(:deleted?) && asset.deleted? 'deleted' elsif asset.published? 'active' else 'unpublished' end else if asset.respond_to?(:workflow_state) workflow_state = asset.workflow_state.to_s if ['active', 'available', 'published'].include?(workflow_state) 'active' elsif ['unpublished', 'deleted'].include?(workflow_state) workflow_state end else nil end end end def asset_workflow_state ContentTag.asset_workflow_state(self.content) end def asset_context_matches? self.content && self.content.respond_to?(:context) && self.content.context == context end def update_asset_name!(user=nil) return unless self.sync_title_to_asset_title? return unless self.asset_context_matches? # Assignment proxies name= and name to title= and title, which breaks the asset_safe_title logic if content.respond_to?("name=") && content.respond_to?("name") && !content.is_a?(Assignment) content.name = asset_safe_title('name') elsif content.respond_to?("title=") content.title = asset_safe_title('title') elsif content.respond_to?("display_name=") content.display_name = asset_safe_title('display_name') end if content.changed? content.user = user if user && content.is_a?(WikiPage) content.save end end def update_asset_workflow_state! return unless self.sync_workflow_state_to_asset? return unless self.asset_context_matches? return unless self.content && self.content.respond_to?(:publish!) # update the asset and also update _other_ content tags that point at it if self.unpublished? && self.content.published? && self.content.can_unpublish? self.content.unpublish! self.class.update_for(self.content, exclude_tag: self) elsif self.active? && !self.content.published? self.content.publish! self.class.update_for(self.content, exclude_tag: self) end end def self.delete_for(asset) ContentTag.where(content_id: asset, content_type: asset.class.to_s).each{|t| t.destroy } ContentTag.where(context_id: asset, context_type: asset.class.to_s).each{|t| t.destroy } end def can_destroy? # if it's a learning outcome link... if self.tag_type == 'learning_outcome_association' # and there are no other links to the same outcome in the same context... outcome = self.content other_link = ContentTag.learning_outcome_links.active. where(:context_type => self.context_type, :context_id => self.context_id, :content_id => outcome). where("id<>?", self).first if !other_link # and there are alignments to the outcome (in the link's context for # foreign links, in any context for native links) alignment_conditions = { :learning_outcome_id => outcome.id } native = outcome.context_type == self.context_type && outcome.context_id == self.context_id if native @should_destroy_outcome = true else alignment_conditions[:context_id] = self.context_id alignment_conditions[:context_type] = self.context_type end if ContentTag.learning_outcome_alignments.active.where(alignment_conditions).exists? # then don't let them delete the link return false end end end true end alias_method :destroy_permanently!, :destroy def destroy unless can_destroy? raise LastLinkToOutcomeNotDestroyed.new('Link is the last link to an aligned outcome. Remove the alignment and then try again') end context_module.remove_completion_requirement(id) if context_module self.workflow_state = 'deleted' self.save! # after deleting the last native link to an unaligned outcome, delete the # outcome. we do this here instead of in LearningOutcome#destroy because # (a) LearningOutcome#destroy *should* only ever be called from here, and # (b) we've already determined other_link and native if @should_destroy_outcome self.content.destroy end true end def locked_for?(user, opts={}) return unless self.context_module self.context_module.locked_for?(user, opts.merge({:tag => self})) end def available_for?(user, opts={}) self.context_module.available_for?(user, opts.merge({:tag => self})) end def self.update_for(asset, exclude_tag: nil) tags = ContentTag.where(:content_id => asset, :content_type => asset.class.to_s).not_deleted tags = tags.where('content_tags.id<>?', exclude_tag.id) if exclude_tag tags = tags.select([:id, :tag_type, :content_type, :context_module_id]).to_a return if tags.empty? module_ids = tags.map(&:context_module_id).compact # update title tag_ids = tags.select{|t| t.sync_title_to_asset_title? }.map(&:id) attr_hash = {:updated_at => Time.now.utc} {:display_name => :title, :name => :title, :title => :title}.each do |attr, val| attr_hash[val] = asset.send(attr) if asset.respond_to?(attr) end ContentTag.where(:id => tag_ids).update_all(attr_hash) unless tag_ids.empty? # update workflow_state tag_ids = tags.select{|t| t.sync_workflow_state_to_asset? }.map(&:id) attr_hash = {:updated_at => Time.now.utc} workflow_state = asset_workflow_state(asset) attr_hash[:workflow_state] = workflow_state if workflow_state ContentTag.where(:id => tag_ids).update_all(attr_hash) if attr_hash[:workflow_state] && !tag_ids.empty? # update the module timestamp ContentTag.touch_context_modules(module_ids) end def sync_title_to_asset_title? self.tag_type != "learning_outcome_association" && !['ContextExternalTool', 'Attachment'].member?(self.content_type) end def sync_workflow_state_to_asset? self.content_type_quiz? || ['Attachment', 'Assignment', 'WikiPage', 'DiscussionTopic'].include?(self.content_type) end def content_type_quiz? Quizzes::Quiz.class_names.include?(self.content_type) end def content_type_discussion? 'DiscussionTopic' == self.content_type end def context_module_action(user, action, points=nil) self.context_module.update_for(user, action, self, points) if self.context_module end def progression_for_user(user) context_module.context_module_progressions.where(user_id: user.id).first end def content_asset_string @content_asset_string ||= "#{self.content_type.underscore}_#{self.content_id}" end def associated_asset_string @associated_asset_string ||= "#{self.associated_asset_type.underscore}_#{self.associated_asset_id}" end def content_asset_string=(val) vals = val.split("_") id = vals.pop type = Context::asset_type_for_string(vals.join("_").classify) if type && id && id.to_i > 0 self.content_type = type.to_s self.content_id = id end end def has_rubric_association? content.respond_to?(:rubric_association) && content.rubric_association end scope :for_tagged_url, lambda { |url, tag| where(:url => url, :tag => tag) } scope :for_context, lambda { |context| case context when Account select("content_tags.*"). joins("INNER JOIN ( SELECT DISTINCT ct.id AS content_tag_id FROM #{ContentTag.quoted_table_name} AS ct INNER JOIN #{CourseAccountAssociation.quoted_table_name} AS caa ON caa.course_id = ct.context_id AND ct.context_type = 'Course' WHERE caa.account_id = #{context.id} UNION SELECT ct.id AS content_tag_id FROM #{ContentTag.quoted_table_name} AS ct WHERE ct.context_id = #{context.id} AND context_type = 'Account') AS related_content_tags ON related_content_tags.content_tag_id = content_tags.id") else where(:context_type => context.class.to_s, :context_id => context) end } scope :learning_outcome_alignments, -> { where(:tag_type => 'learning_outcome') } scope :learning_outcome_links, -> { where(:tag_type => 'learning_outcome_association', :associated_asset_type => 'LearningOutcomeGroup', :content_type => 'LearningOutcome') } # Scopes For Differentiated Assignment Filtering: scope :visible_to_students_in_course_with_da, lambda { |user_ids, course_ids| differentiable_classes = ['Assignment','DiscussionTopic', 'Quiz','Quizzes::Quiz', 'WikiPage'] scope = for_non_differentiable_classes(course_ids, differentiable_classes) non_cyoe_courses = Course.where(id: course_ids).reject{|course| ConditionalRelease::Service.enabled_in_context?(course)} if non_cyoe_courses scope = scope.union(where(context_id: non_cyoe_courses, context_type: 'Course', content_type: 'WikiPage')) end scope.union( for_non_differentiable_wiki_pages(course_ids), for_non_differentiable_discussions(course_ids), for_differentiable_assignments(user_ids, course_ids), for_differentiable_wiki_pages(user_ids, course_ids), for_differentiable_discussions(user_ids, course_ids), for_differentiable_quizzes(user_ids, course_ids) ) } scope :for_non_differentiable_classes, lambda {|course_ids, differentiable_classes| where(context_id: course_ids, context_type: 'Course').where.not(content_type: differentiable_classes) } scope :for_non_differentiable_discussions, lambda {|course_ids| joins("JOIN #{DiscussionTopic.quoted_table_name} as dt ON dt.id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND content_tags.content_type = 'DiscussionTopic' AND dt.assignment_id IS NULL",course_ids) } scope :for_non_differentiable_wiki_pages, lambda {|course_ids| joins("JOIN #{WikiPage.quoted_table_name} as wp ON wp.id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND content_tags.content_type = 'WikiPage' AND wp.assignment_id IS NULL", course_ids) } scope :for_differentiable_quizzes, lambda {|user_ids, course_ids| joins("JOIN #{Quizzes::QuizStudentVisibility.quoted_table_name} as qsv ON qsv.quiz_id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND qsv.course_id IN (?) AND content_tags.content_type in ('Quiz', 'Quizzes::Quiz') AND qsv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_assignments, lambda {|user_ids, course_ids| joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv ON asv.assignment_id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id IN (?) AND content_tags.content_type = 'Assignment' AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_discussions, lambda {|user_ids, course_ids| joins("JOIN #{DiscussionTopic.quoted_table_name} as dt ON dt.id = content_tags.content_id AND content_tags.content_type = 'DiscussionTopic'"). joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv ON asv.assignment_id = dt.assignment_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id IN (?) AND content_tags.content_type = 'DiscussionTopic' AND dt.assignment_id IS NOT NULL AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_wiki_pages, lambda{|user_ids, course_ids| joins("JOIN #{WikiPage.quoted_table_name} as wp on wp.id = content_tags.content_id AND content_tags.content_type = 'WikiPage'"). joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv on asv.assignment_id = wp.assignment_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id in (?) AND content_tags.content_type = 'WikiPage' AND wp.assignment_id IS NOT NULL AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } # only intended for learning outcome links def self.outcome_title_order_by_clause best_unicode_collation_key("learning_outcomes.short_description") end def self.order_by_outcome_title eager_load(:learning_outcome_content).order(outcome_title_order_by_clause) end def visible_to_user?(user, opts=nil) return unless self.context_module opts ||= self.context_module.visibility_for_user(user) return false unless opts[:can_read] return true if opts[:can_read_as_admin] return false unless self.published? if self.assignment self.assignment.visible_to_user?(user, opts) elsif self.content_type_quiz? self.content.visible_to_user?(user, opts) else true end end def mark_as_importing!(migration) @importing_migration = migration end def check_for_restricted_content_changes if !self.new_record? && self.title_changed? && !@importing_migration && self.content && self.content.respond_to?(:is_child_content?) && self.content.is_child_content? && self.content.editing_restricted?(:content) self.errors.add(:title, "cannot change title - associated content locked by Master Course") end end end module items in deleted modules shouldn't lock content closes #ADMIN-1539 Change-Id: I2fdc0fb003b29552cbd9903f7e39c20fc35d08d3 Reviewed-on: https://gerrit.instructure.com/168226 Tested-by: Jenkins Reviewed-by: Jeremy Stanley <b3f594e10a9edcf5413cf1190121d45078c62290@instructure.com> QA-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com> Product-Review: James Williams <3a5f4ed991dbfa5c9ef76ad729d240ca12f62180@instructure.com> # # Copyright (C) 2011 - present Instructure, Inc. # # This file is part of Canvas. # # Canvas is free software: you can redistribute it and/or modify it under # the terms of the GNU Affero General Public License as published by the Free # Software Foundation, version 3 of the License. # # Canvas is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Affero General Public License for more # details. # # You should have received a copy of the GNU Affero General Public License along # with this program. If not, see <http://www.gnu.org/licenses/>. # class ContentTag < ActiveRecord::Base class LastLinkToOutcomeNotDestroyed < StandardError end TABLED_CONTENT_TYPES = ['Attachment', 'Assignment', 'WikiPage', 'Quizzes::Quiz', 'LearningOutcome', 'DiscussionTopic', 'Rubric', 'ContextExternalTool', 'LearningOutcomeGroup', 'AssessmentQuestionBank', 'LiveAssessments::Assessment', 'Lti::MessageHandler'].freeze TABLELESS_CONTENT_TYPES = ['ContextModuleSubHeader', 'ExternalUrl'].freeze CONTENT_TYPES = (TABLED_CONTENT_TYPES + TABLELESS_CONTENT_TYPES).freeze include Workflow include SearchTermHelper include MasterCourses::Restrictor restrict_columns :state, [:workflow_state] belongs_to :content, polymorphic: [], exhaustive: false validates_inclusion_of :content_type, :allow_nil => true, :in => CONTENT_TYPES belongs_to :context, polymorphic: [:course, :learning_outcome_group, :assignment, :account, { quiz: 'Quizzes::Quiz' }] belongs_to :associated_asset, polymorphic: [:learning_outcome_group], polymorphic_prefix: true belongs_to :context_module belongs_to :learning_outcome # This allows doing a has_many_through relationship on ContentTags for linked LearningOutcomes. (see LearningOutcomeContext) belongs_to :learning_outcome_content, :class_name => 'LearningOutcome', :foreign_key => :content_id has_many :learning_outcome_results # This allows bypassing loading context for validation if we have # context_id and context_type set, but still allows validating when # context is not yet saved. validates_presence_of :context, :unless => proc { |tag| tag.context_id && tag.context_type } validates_presence_of :workflow_state validates_length_of :comments, :maximum => maximum_text_length, :allow_nil => true, :allow_blank => true before_save :associate_external_tool before_save :default_values after_save :update_could_be_locked after_save :touch_context_module_after_transaction after_save :touch_context_if_learning_outcome include CustomValidations validates_as_url :url validate :check_for_restricted_content_changes acts_as_list :scope => :context_module set_policy do given {|user, session| self.context && self.context.grants_right?(user, session, :manage_content)} can :delete end workflow do state :active do event :unpublish, :transitions_to => :unpublished end state :unpublished do event :publish, :transitions_to => :active end state :deleted end alias_method :published?, :active? scope :active, -> { where(:workflow_state => 'active') } scope :not_deleted, -> { where("content_tags.workflow_state<>'deleted'") } attr_accessor :skip_touch def touch_context_module return true if skip_touch.present? ContentTag.touch_context_modules([self.context_module_id]) end def touch_context_module_after_transaction self.class.connection.after_transaction_commit { touch_context_module } end private :touch_context_module_after_transaction def self.touch_context_modules(ids=[]) if ids.length == 1 ContextModule.where(id: ids).update_all(updated_at: Time.now.utc) elsif ids.empty? # do nothing else ContextModule.where(id: ids).touch_all end true end def touch_context_if_learning_outcome if (self.tag_type == 'learning_outcome_association' || self.tag_type == 'learning_outcome') && skip_touch.blank? self.context_type.constantize.where(:id => self.context_id).update_all(:updated_at => Time.now.utc) end end def associate_external_tool return if content.present? || content_type != 'ContextExternalTool' || context.blank? || url.blank? content = ContextExternalTool.find_external_tool(url, context) self.content = content if content end def default_values self.title ||= self.content.title rescue nil self.title ||= self.content.name rescue nil self.title ||= self.content.display_name rescue nil self.title ||= t(:no_title, "No title") self.comments ||= "" self.comments = "" if self.comments == "Comments" self.context_code = "#{self.context_type.to_s.underscore}_#{self.context_id}" end protected :default_values def context_code read_attribute(:context_code) || "#{self.context_type.to_s.underscore}_#{self.context_id}" rescue nil end def context_name self.context.name rescue "" end def update_could_be_locked ContentTag.update_could_be_locked([self]) unless skip_touch.present? true end def self.update_could_be_locked(tags=[]) content_ids = {} tags.each do |t| (content_ids[t.content_type] ||= []) << t.content_id if t.content_type && t.content_id end content_ids.each do |type, ids| klass = type.constantize next unless klass < ActiveRecord::Base next if klass < Tableless if klass.new.respond_to?(:could_be_locked=) klass.where(:id => ids).update_all(:could_be_locked => true) end end end def confirm_valid_module_requirements self.context_module && self.context_module.confirm_valid_requirements end def scoreable? self.content_type_quiz? || self.graded? end def graded? return true if self.content_type == 'Assignment' return false if self.content_type == 'WikiPage' return false unless self.can_have_assignment? return content && !content.assignment_id.nil? end def duplicate_able? case self.content_type_class when 'assignment' content&.can_duplicate? when 'discussion_topic', 'wiki_page' true else false end end def content_type_class if self.content_type == 'Assignment' if self.content && self.content.submission_types == 'online_quiz' 'quiz' elsif self.content && self.content.submission_types == 'discussion_topic' 'discussion_topic' else 'assignment' end elsif self.content_type == 'Quizzes::Quiz' 'quiz' else self.content_type.underscore end rescue (self.content_type || "").underscore end def item_class (self.content_type || "").gsub(/\A[A-Za-z]+::/, '') + '_' + self.content_id.to_s end def can_have_assignment? ['Assignment', 'DiscussionTopic', 'Quizzes::Quiz', 'WikiPage'].include?(self.content_type) end def assignment if self.content_type == 'Assignment' self.content elsif can_have_assignment? self.content&.assignment else nil end end alias_method :old_content, :content def content TABLELESS_CONTENT_TYPES.include?(self.content_type) ? nil : old_content end def content_or_self content || self end def asset_safe_title(column) name = self.title.to_s if (limit = self.content.class.try(:columns_hash)[column].try(:limit)) && name.length > limit name = name[0, limit][/.{0,#{limit}}/mu] end name end def self.asset_workflow_state(asset) if asset.respond_to?(:published?) if asset.respond_to?(:deleted?) && asset.deleted? 'deleted' elsif asset.published? 'active' else 'unpublished' end else if asset.respond_to?(:workflow_state) workflow_state = asset.workflow_state.to_s if ['active', 'available', 'published'].include?(workflow_state) 'active' elsif ['unpublished', 'deleted'].include?(workflow_state) workflow_state end else nil end end end def asset_workflow_state ContentTag.asset_workflow_state(self.content) end def asset_context_matches? self.content && self.content.respond_to?(:context) && self.content.context == context end def update_asset_name!(user=nil) return unless self.sync_title_to_asset_title? return unless self.asset_context_matches? # Assignment proxies name= and name to title= and title, which breaks the asset_safe_title logic if content.respond_to?("name=") && content.respond_to?("name") && !content.is_a?(Assignment) content.name = asset_safe_title('name') elsif content.respond_to?("title=") content.title = asset_safe_title('title') elsif content.respond_to?("display_name=") content.display_name = asset_safe_title('display_name') end if content.changed? content.user = user if user && content.is_a?(WikiPage) content.save end end def update_asset_workflow_state! return unless self.sync_workflow_state_to_asset? return unless self.asset_context_matches? return unless self.content && self.content.respond_to?(:publish!) # update the asset and also update _other_ content tags that point at it if self.unpublished? && self.content.published? && self.content.can_unpublish? self.content.unpublish! self.class.update_for(self.content, exclude_tag: self) elsif self.active? && !self.content.published? self.content.publish! self.class.update_for(self.content, exclude_tag: self) end end def self.delete_for(asset) ContentTag.where(content_id: asset, content_type: asset.class.to_s).each{|t| t.destroy } ContentTag.where(context_id: asset, context_type: asset.class.to_s).each{|t| t.destroy } end def can_destroy? # if it's a learning outcome link... if self.tag_type == 'learning_outcome_association' # and there are no other links to the same outcome in the same context... outcome = self.content other_link = ContentTag.learning_outcome_links.active. where(:context_type => self.context_type, :context_id => self.context_id, :content_id => outcome). where("id<>?", self).first if !other_link # and there are alignments to the outcome (in the link's context for # foreign links, in any context for native links) alignment_conditions = { :learning_outcome_id => outcome.id } native = outcome.context_type == self.context_type && outcome.context_id == self.context_id if native @should_destroy_outcome = true else alignment_conditions[:context_id] = self.context_id alignment_conditions[:context_type] = self.context_type end if ContentTag.learning_outcome_alignments.active.where(alignment_conditions).exists? # then don't let them delete the link return false end end end true end alias_method :destroy_permanently!, :destroy def destroy unless can_destroy? raise LastLinkToOutcomeNotDestroyed.new('Link is the last link to an aligned outcome. Remove the alignment and then try again') end context_module.remove_completion_requirement(id) if context_module self.workflow_state = 'deleted' self.save! # after deleting the last native link to an unaligned outcome, delete the # outcome. we do this here instead of in LearningOutcome#destroy because # (a) LearningOutcome#destroy *should* only ever be called from here, and # (b) we've already determined other_link and native if @should_destroy_outcome self.content.destroy end true end def locked_for?(user, opts={}) return unless self.context_module && !self.context_module.deleted? self.context_module.locked_for?(user, opts.merge({:tag => self})) end def available_for?(user, opts={}) self.context_module.available_for?(user, opts.merge({:tag => self})) end def self.update_for(asset, exclude_tag: nil) tags = ContentTag.where(:content_id => asset, :content_type => asset.class.to_s).not_deleted tags = tags.where('content_tags.id<>?', exclude_tag.id) if exclude_tag tags = tags.select([:id, :tag_type, :content_type, :context_module_id]).to_a return if tags.empty? module_ids = tags.map(&:context_module_id).compact # update title tag_ids = tags.select{|t| t.sync_title_to_asset_title? }.map(&:id) attr_hash = {:updated_at => Time.now.utc} {:display_name => :title, :name => :title, :title => :title}.each do |attr, val| attr_hash[val] = asset.send(attr) if asset.respond_to?(attr) end ContentTag.where(:id => tag_ids).update_all(attr_hash) unless tag_ids.empty? # update workflow_state tag_ids = tags.select{|t| t.sync_workflow_state_to_asset? }.map(&:id) attr_hash = {:updated_at => Time.now.utc} workflow_state = asset_workflow_state(asset) attr_hash[:workflow_state] = workflow_state if workflow_state ContentTag.where(:id => tag_ids).update_all(attr_hash) if attr_hash[:workflow_state] && !tag_ids.empty? # update the module timestamp ContentTag.touch_context_modules(module_ids) end def sync_title_to_asset_title? self.tag_type != "learning_outcome_association" && !['ContextExternalTool', 'Attachment'].member?(self.content_type) end def sync_workflow_state_to_asset? self.content_type_quiz? || ['Attachment', 'Assignment', 'WikiPage', 'DiscussionTopic'].include?(self.content_type) end def content_type_quiz? Quizzes::Quiz.class_names.include?(self.content_type) end def content_type_discussion? 'DiscussionTopic' == self.content_type end def context_module_action(user, action, points=nil) self.context_module.update_for(user, action, self, points) if self.context_module end def progression_for_user(user) context_module.context_module_progressions.where(user_id: user.id).first end def content_asset_string @content_asset_string ||= "#{self.content_type.underscore}_#{self.content_id}" end def associated_asset_string @associated_asset_string ||= "#{self.associated_asset_type.underscore}_#{self.associated_asset_id}" end def content_asset_string=(val) vals = val.split("_") id = vals.pop type = Context::asset_type_for_string(vals.join("_").classify) if type && id && id.to_i > 0 self.content_type = type.to_s self.content_id = id end end def has_rubric_association? content.respond_to?(:rubric_association) && content.rubric_association end scope :for_tagged_url, lambda { |url, tag| where(:url => url, :tag => tag) } scope :for_context, lambda { |context| case context when Account select("content_tags.*"). joins("INNER JOIN ( SELECT DISTINCT ct.id AS content_tag_id FROM #{ContentTag.quoted_table_name} AS ct INNER JOIN #{CourseAccountAssociation.quoted_table_name} AS caa ON caa.course_id = ct.context_id AND ct.context_type = 'Course' WHERE caa.account_id = #{context.id} UNION SELECT ct.id AS content_tag_id FROM #{ContentTag.quoted_table_name} AS ct WHERE ct.context_id = #{context.id} AND context_type = 'Account') AS related_content_tags ON related_content_tags.content_tag_id = content_tags.id") else where(:context_type => context.class.to_s, :context_id => context) end } scope :learning_outcome_alignments, -> { where(:tag_type => 'learning_outcome') } scope :learning_outcome_links, -> { where(:tag_type => 'learning_outcome_association', :associated_asset_type => 'LearningOutcomeGroup', :content_type => 'LearningOutcome') } # Scopes For Differentiated Assignment Filtering: scope :visible_to_students_in_course_with_da, lambda { |user_ids, course_ids| differentiable_classes = ['Assignment','DiscussionTopic', 'Quiz','Quizzes::Quiz', 'WikiPage'] scope = for_non_differentiable_classes(course_ids, differentiable_classes) non_cyoe_courses = Course.where(id: course_ids).reject{|course| ConditionalRelease::Service.enabled_in_context?(course)} if non_cyoe_courses scope = scope.union(where(context_id: non_cyoe_courses, context_type: 'Course', content_type: 'WikiPage')) end scope.union( for_non_differentiable_wiki_pages(course_ids), for_non_differentiable_discussions(course_ids), for_differentiable_assignments(user_ids, course_ids), for_differentiable_wiki_pages(user_ids, course_ids), for_differentiable_discussions(user_ids, course_ids), for_differentiable_quizzes(user_ids, course_ids) ) } scope :for_non_differentiable_classes, lambda {|course_ids, differentiable_classes| where(context_id: course_ids, context_type: 'Course').where.not(content_type: differentiable_classes) } scope :for_non_differentiable_discussions, lambda {|course_ids| joins("JOIN #{DiscussionTopic.quoted_table_name} as dt ON dt.id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND content_tags.content_type = 'DiscussionTopic' AND dt.assignment_id IS NULL",course_ids) } scope :for_non_differentiable_wiki_pages, lambda {|course_ids| joins("JOIN #{WikiPage.quoted_table_name} as wp ON wp.id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND content_tags.content_type = 'WikiPage' AND wp.assignment_id IS NULL", course_ids) } scope :for_differentiable_quizzes, lambda {|user_ids, course_ids| joins("JOIN #{Quizzes::QuizStudentVisibility.quoted_table_name} as qsv ON qsv.quiz_id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND qsv.course_id IN (?) AND content_tags.content_type in ('Quiz', 'Quizzes::Quiz') AND qsv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_assignments, lambda {|user_ids, course_ids| joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv ON asv.assignment_id = content_tags.content_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id IN (?) AND content_tags.content_type = 'Assignment' AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_discussions, lambda {|user_ids, course_ids| joins("JOIN #{DiscussionTopic.quoted_table_name} as dt ON dt.id = content_tags.content_id AND content_tags.content_type = 'DiscussionTopic'"). joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv ON asv.assignment_id = dt.assignment_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id IN (?) AND content_tags.content_type = 'DiscussionTopic' AND dt.assignment_id IS NOT NULL AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } scope :for_differentiable_wiki_pages, lambda{|user_ids, course_ids| joins("JOIN #{WikiPage.quoted_table_name} as wp on wp.id = content_tags.content_id AND content_tags.content_type = 'WikiPage'"). joins("JOIN #{AssignmentStudentVisibility.quoted_table_name} as asv on asv.assignment_id = wp.assignment_id"). where("content_tags.context_id IN (?) AND content_tags.context_type = 'Course' AND asv.course_id in (?) AND content_tags.content_type = 'WikiPage' AND wp.assignment_id IS NOT NULL AND asv.user_id = ANY( '{?}'::INT8[] ) ",course_ids,course_ids,user_ids) } # only intended for learning outcome links def self.outcome_title_order_by_clause best_unicode_collation_key("learning_outcomes.short_description") end def self.order_by_outcome_title eager_load(:learning_outcome_content).order(outcome_title_order_by_clause) end def visible_to_user?(user, opts=nil) return unless self.context_module opts ||= self.context_module.visibility_for_user(user) return false unless opts[:can_read] return true if opts[:can_read_as_admin] return false unless self.published? if self.assignment self.assignment.visible_to_user?(user, opts) elsif self.content_type_quiz? self.content.visible_to_user?(user, opts) else true end end def mark_as_importing!(migration) @importing_migration = migration end def check_for_restricted_content_changes if !self.new_record? && self.title_changed? && !@importing_migration && self.content && self.content.respond_to?(:is_child_content?) && self.content.is_child_content? && self.content.editing_restricted?(:content) self.errors.add(:title, "cannot change title - associated content locked by Master Course") end end end
module EmsRefresh extend EmsRefresh::SaveInventory extend EmsRefresh::SaveInventoryBlockStorage extend EmsRefresh::SaveInventoryCloud extend EmsRefresh::SaveInventoryInfra extend EmsRefresh::SaveInventoryContainer extend EmsRefresh::SaveInventoryMiddleware extend EmsRefresh::SaveInventoryDatawarehouse extend EmsRefresh::SaveInventoryNetwork extend EmsRefresh::SaveInventoryObjectStorage extend EmsRefresh::SaveInventoryHelper extend EmsRefresh::SaveInventoryProvisioning extend EmsRefresh::SaveInventoryConfiguration extend EmsRefresh::SaveInventoryAutomation extend EmsRefresh::SaveInventoryOrchestrationStacks extend EmsRefresh::LinkInventory extend EmsRefresh::MetadataRelats extend EmsRefresh::VcUpdates def self.debug_trace # TODO: Replace with configuration option false end # If true, Refreshers will raise any exceptions encountered, instead # of quietly recording them as failures and continuing. mattr_accessor :debug_failures # Development helper method for setting up the selector specs for VC def self.init_console(use_vim_broker = false) ManageIQ::Providers::Vmware::InfraManager::Refresher.init_console(use_vim_broker) end cache_with_timeout(:queue_timeout) { MiqEmsRefreshWorker.worker_settings[:queue_timeout] || 60.minutes } def self.queue_refresh(target, id = nil) # Handle targets passed as a single class/id pair, an array of class/id pairs, or an array of references targets = get_ar_objects(target, id) # Group the target refs by zone and role targets_by_ems = targets.each_with_object(Hash.new { |h, k| h[k] = [] }) do |t, h| e = if t.kind_of?(EmsRefresh::Manager) t elsif t.respond_to?(:ext_management_system) && t.ext_management_system t.ext_management_system elsif t.respond_to?(:manager) && t.manager t.manager elsif t.kind_of?(Host) && t.acts_as_ems? t end h[e] << t unless e.nil? end # Queue the refreshes targets_by_ems.collect do |ems, ts| ts = ts.collect { |t| [t.class.to_s, t.id] }.uniq queue_merge(ts, ems) end end def self.queue_refresh_new_target(target_hash, ems) MiqQueue.put( :queue_name => MiqEmsRefreshWorker.queue_name_for_ems(ems), :class_name => name, :method_name => 'refresh_new_target', :role => "ems_inventory", :zone => ems.my_zone, :args => [target_hash, ems.id], :msg_timeout => queue_timeout, :task_id => nil ) end def self.refresh(target, id = nil) EmsRefresh.init_console if defined?(Rails::Console) # Handle targets passed as a single class/id pair, an array of class/id pairs, or an array of references targets = get_ar_objects(target, id) # Split the targets into refresher groups groups = targets.group_by do |t| ems = case when t.respond_to?(:ext_management_system) then t.ext_management_system when t.respond_to?(:manager) then t.manager else t end ems.refresher if ems.respond_to?(:refresher) end # Do the refreshes groups.each do |refresher, group_targets| refresher.refresh(group_targets) if refresher end end def self.refresh_new_target(target_hash, ems_id) ems = ExtManagementSystem.find(ems_id) target = save_new_target(target_hash) if target.nil? _log.warn "Unknown target for event data: #{target_hash}." return end ems.refresher.refresh(get_ar_objects(target)) end def self.get_ar_objects(target, single_id = nil) # Handle targets passed as a single class/id pair, an array of class/id pairs, an array of references target = [[target, single_id]] unless single_id.nil? return [target] unless target.kind_of?(Array) return target unless target[0].kind_of?(Array) # Group by type for a more optimized search targets_by_type = target.each_with_object(Hash.new { |h, k| h[k] = [] }) do |(c, id), h| # Take care of both String or Class type being passed in c = c.to_s.constantize unless c.kind_of?(Class) if [VmOrTemplate, Host, ExtManagementSystem].none? { |k| c <= k } _log.warn "Unknown target type: [#{c}]." next end h[c] << id end # Do lookups to get ActiveRecord objects targets_by_type.each_with_object([]) do |(c, ids), a| ids.uniq! recs = c.where(:id => ids) recs = recs.includes(:ext_management_system) unless c <= ExtManagementSystem if recs.length != ids.length missing = ids - recs.collect(&:id) _log.warn "Unable to find a record for [#{c}] ids: #{missing.inspect}." end a.concat(recs) end end def self.queue_merge(targets, ems) task_options = { :action => "EmsRefresh(#{ems.name}) [#{targets}]", :userid => "system" } task = MiqTask.create( :name => task_options[:action], :userid => task_options[:userid], :state => MiqTask::STATE_QUEUED, :status => MiqTask::STATUS_OK, :message => "Queued the action: [#{task_options[:action]}] being run for user: [#{task_options[:userid]}]" ) queue_options = { :queue_name => MiqEmsRefreshWorker.queue_name_for_ems(ems), :class_name => name, :method_name => 'refresh', :role => "ems_inventory", :zone => ems.my_zone, } # Items will be naturally serialized since there is a dedicated worker. MiqQueue.put_or_update(queue_options) do |msg, item| targets = msg.nil? ? targets : (msg.args[0] | targets) task_id = msg && msg.task_id ? msg.task_id.to_i : task.id item.merge( :args => [targets], :task_id => task_id, :msg_timeout => queue_timeout, :miq_callback => { :class_name => task.class.name, :method_name => :queue_callback, :instance_id => task_id, :args => ['Finished'] } ) end task.id end # # Helper methods for advanced debugging # def self.log_inv_debug_trace(inv, log_header, depth = 1) return unless debug_trace inv.each do |k, v| if depth == 1 $log.debug "#{log_header} #{k.inspect}=>#{v.inspect}" else $log.debug "#{log_header} #{k.inspect}=>" log_inv_debug_trace(v, "#{log_header} ", depth - 1) end end end def self.log_format_deletes(deletes) ret = deletes.collect do |d| s = "id: [#{d.id}]" [:name, :product_name, :device_name].each do |k| next unless d.respond_to?(k) v = d.send(k) next if v.nil? s << " #{k}: [#{v}]" break end s end ret.join(", ") end end Return the task_id that queued the first refresh if merging module EmsRefresh extend EmsRefresh::SaveInventory extend EmsRefresh::SaveInventoryBlockStorage extend EmsRefresh::SaveInventoryCloud extend EmsRefresh::SaveInventoryInfra extend EmsRefresh::SaveInventoryContainer extend EmsRefresh::SaveInventoryMiddleware extend EmsRefresh::SaveInventoryDatawarehouse extend EmsRefresh::SaveInventoryNetwork extend EmsRefresh::SaveInventoryObjectStorage extend EmsRefresh::SaveInventoryHelper extend EmsRefresh::SaveInventoryProvisioning extend EmsRefresh::SaveInventoryConfiguration extend EmsRefresh::SaveInventoryAutomation extend EmsRefresh::SaveInventoryOrchestrationStacks extend EmsRefresh::LinkInventory extend EmsRefresh::MetadataRelats extend EmsRefresh::VcUpdates def self.debug_trace # TODO: Replace with configuration option false end # If true, Refreshers will raise any exceptions encountered, instead # of quietly recording them as failures and continuing. mattr_accessor :debug_failures # Development helper method for setting up the selector specs for VC def self.init_console(use_vim_broker = false) ManageIQ::Providers::Vmware::InfraManager::Refresher.init_console(use_vim_broker) end cache_with_timeout(:queue_timeout) { MiqEmsRefreshWorker.worker_settings[:queue_timeout] || 60.minutes } def self.queue_refresh(target, id = nil) # Handle targets passed as a single class/id pair, an array of class/id pairs, or an array of references targets = get_ar_objects(target, id) # Group the target refs by zone and role targets_by_ems = targets.each_with_object(Hash.new { |h, k| h[k] = [] }) do |t, h| e = if t.kind_of?(EmsRefresh::Manager) t elsif t.respond_to?(:ext_management_system) && t.ext_management_system t.ext_management_system elsif t.respond_to?(:manager) && t.manager t.manager elsif t.kind_of?(Host) && t.acts_as_ems? t end h[e] << t unless e.nil? end # Queue the refreshes targets_by_ems.collect do |ems, ts| ts = ts.collect { |t| [t.class.to_s, t.id] }.uniq queue_merge(ts, ems) end end def self.queue_refresh_new_target(target_hash, ems) MiqQueue.put( :queue_name => MiqEmsRefreshWorker.queue_name_for_ems(ems), :class_name => name, :method_name => 'refresh_new_target', :role => "ems_inventory", :zone => ems.my_zone, :args => [target_hash, ems.id], :msg_timeout => queue_timeout, :task_id => nil ) end def self.refresh(target, id = nil) EmsRefresh.init_console if defined?(Rails::Console) # Handle targets passed as a single class/id pair, an array of class/id pairs, or an array of references targets = get_ar_objects(target, id) # Split the targets into refresher groups groups = targets.group_by do |t| ems = case when t.respond_to?(:ext_management_system) then t.ext_management_system when t.respond_to?(:manager) then t.manager else t end ems.refresher if ems.respond_to?(:refresher) end # Do the refreshes groups.each do |refresher, group_targets| refresher.refresh(group_targets) if refresher end end def self.refresh_new_target(target_hash, ems_id) ems = ExtManagementSystem.find(ems_id) target = save_new_target(target_hash) if target.nil? _log.warn "Unknown target for event data: #{target_hash}." return end ems.refresher.refresh(get_ar_objects(target)) end def self.get_ar_objects(target, single_id = nil) # Handle targets passed as a single class/id pair, an array of class/id pairs, an array of references target = [[target, single_id]] unless single_id.nil? return [target] unless target.kind_of?(Array) return target unless target[0].kind_of?(Array) # Group by type for a more optimized search targets_by_type = target.each_with_object(Hash.new { |h, k| h[k] = [] }) do |(c, id), h| # Take care of both String or Class type being passed in c = c.to_s.constantize unless c.kind_of?(Class) if [VmOrTemplate, Host, ExtManagementSystem].none? { |k| c <= k } _log.warn "Unknown target type: [#{c}]." next end h[c] << id end # Do lookups to get ActiveRecord objects targets_by_type.each_with_object([]) do |(c, ids), a| ids.uniq! recs = c.where(:id => ids) recs = recs.includes(:ext_management_system) unless c <= ExtManagementSystem if recs.length != ids.length missing = ids - recs.collect(&:id) _log.warn "Unable to find a record for [#{c}] ids: #{missing.inspect}." end a.concat(recs) end end def self.queue_merge(targets, ems) task_options = { :action => "EmsRefresh(#{ems.name}) [#{targets}]", :userid => "system" } task = MiqTask.create( :name => task_options[:action], :userid => task_options[:userid], :state => MiqTask::STATE_QUEUED, :status => MiqTask::STATUS_OK, :message => "Queued the action: [#{task_options[:action]}] being run for user: [#{task_options[:userid]}]" ) queue_options = { :queue_name => MiqEmsRefreshWorker.queue_name_for_ems(ems), :class_name => name, :method_name => 'refresh', :role => "ems_inventory", :zone => ems.my_zone, } # If this is the only refresh then we will use the task we just created, # if we merge with another queue item then we will return its task_id task_id = nil # Items will be naturally serialized since there is a dedicated worker. MiqQueue.put_or_update(queue_options) do |msg, item| targets = msg.nil? ? targets : (msg.args[0] | targets) task_id = msg && msg.task_id ? msg.task_id.to_i : task.id item.merge( :args => [targets], :task_id => task_id, :msg_timeout => queue_timeout, :miq_callback => { :class_name => task.class.name, :method_name => :queue_callback, :instance_id => task_id, :args => ['Finished'] } ) end # If we merged with an existing queue item we don't need a new # task, just use the original one task.delete if task_id != task.id task_id end # # Helper methods for advanced debugging # def self.log_inv_debug_trace(inv, log_header, depth = 1) return unless debug_trace inv.each do |k, v| if depth == 1 $log.debug "#{log_header} #{k.inspect}=>#{v.inspect}" else $log.debug "#{log_header} #{k.inspect}=>" log_inv_debug_trace(v, "#{log_header} ", depth - 1) end end end def self.log_format_deletes(deletes) ret = deletes.collect do |d| s = "id: [#{d.id}]" [:name, :product_name, :device_name].each do |k| next unless d.respond_to?(k) v = d.send(k) next if v.nil? s << " #{k}: [#{v}]" break end s end ret.join(", ") end end
require "flex_commerce_api/api_base" module FlexCommerce # # A flex commerce variant model # # This model is used by the Product model as an association so is not # usable directly on the API as there is no corresponding URL # # class EwisOptIn < FlexCommerceApi::ApiBase end end updates comments require "flex_commerce_api/api_base" module FlexCommerce # A flex commerce Email When In Stock (EWIS) model class EwisOptIn < FlexCommerceApi::ApiBase end end
class Fulfillment include Mongoid::Document include Mongoid::Timestamps include Mongoid::History::Trackable include Mongoid::Userstamp # Relationships embedded_in :patient field :pledge_fulfilled, type: Boolean field :procedure_date, type: Time field :weeks_along, type: String field :abortion_care_cost, type: Integer field :check_number, type: Integer field :date_of_check, type: Time field :check_issued_to, type: String field :date_check_cleared, type: Time # Validations validates :created_by, presence: true # History and auditing track_history on: fields.keys + [:updated_by_id], version_field: :version, track_create: true, track_update: true, track_destroy: true mongoid_userstamp user_model: 'User' end Updated Fulfillment fields Removed check_issued_to and date_check_cleared, updated Time fields to proper Date type. Bumps colinxfleming/dcaf_case_management#732 class Fulfillment include Mongoid::Document include Mongoid::Timestamps include Mongoid::History::Trackable include Mongoid::Userstamp # Relationships embedded_in :patient field :pledge_fulfilled, type: Boolean field :procedure_date, type: Date field :weeks_along, type: String field :abortion_care_cost, type: Integer field :check_number, type: Integer field :date_of_check, type: Date # Validations validates :created_by, presence: true # History and auditing track_history on: fields.keys + [:updated_by_id], version_field: :version, track_create: true, track_update: true, track_destroy: true mongoid_userstamp user_model: 'User' end
require 'securerandom' class Googletrend include ActiveModel::Model include Rss URL_GOOGLE = "https://trends.google.co.jp/trends/hottrends/atom/feed?pn=p4" def self.get trendList = getRssContent(URL_GOOGLE) output = { "value" => { "id" => SecureRandom.uuid, "title" => "google trend", "link" => URL_GOOGLE, "description" => "", "items" => trendList } } output end def hello print "Hello ", @name, ".\n" end end fix id require 'securerandom' class Googletrend include ActiveModel::Model include Rss URL_GOOGLE = "https://trends.google.co.jp/trends/hottrends/atom/feed?pn=p4" def self.get trendList = getRssContent(URL_GOOGLE) output = { "value" => { "title" => "google trend", "link" => URL_GOOGLE, "description" => "", "items" => trendList } } output end def hello print "Hello ", @name, ".\n" end end
class Instruction < ActiveRecord::Base NAMES = { RemoteApp::CLIENT_APP_CREATOR => "Create New App", RemoteApp::CLIENT_HUB_DEPLOYER => "Update Client Hub", RemoteApp::CLIENT_HUB => "Update Client Hub Deployer", RemoteApp::CLIENT_APP_CREATOR_DEPLOYER => "Update Client App Creator", RemoteApp::CLIENT_LEADS_SERVICE => "Placeholder" #FIXME } attr_accessible :target_app_kind, :target_app_ids, :remote_app_id, :body # the apps that should perform the instruction # explicit habtm has_many :instructions_target_apps has_many :target_apps, through: :instructions_target_apps, source: :target_app # the app that will be effected by the instruction # only needed for g5-client-app-creator instructions belongs_to :remote_app validates :target_app_kind, presence: true, inclusion: { in: RemoteApp::KINDS }, allow_blank: true validates :remote_app_id, presence: true, if: :client_app_creator_kind? validate :has_at_least_one_target_app # webhooks make things speedy after_save :async_webhook_target_apps def created_at_computer_readable created_at.utc.to_s(:computer) end def created_at_human_readable created_at.to_s(:human) end def updated_at_computer_readable updated_at.utc.to_s(:computer) end def updated_at_human_readable updated_at.to_s(:human) end def name NAMES[target_app_kind] end def async_webhook_target_apps Resque.enqueue(InstructionWebhooker, self.id) end def webhook_target_apps target_apps.pluck(:heroku_app_name).each do |heroku_app_name| begin Webhook.post("http://#{heroku_app_name}.herokuapp.com/webhooks/g5-configurator") rescue ArgumentError => e logger.error e end end end def client_app_creator_kind? target_app_kind == RemoteApp::CLIENT_APP_CREATOR end private def has_at_least_one_target_app unless target_app_ids.length >= 1 errors[:target_app_ids] = "can't be blank" end end end fixes target webhook spec class Instruction < ActiveRecord::Base NAMES = { RemoteApp::CLIENT_APP_CREATOR => "Create New App", RemoteApp::CLIENT_HUB_DEPLOYER => "Update Client Hub", RemoteApp::CLIENT_HUB => "Update Client Hub Deployer", RemoteApp::CLIENT_APP_CREATOR_DEPLOYER => "Update Client App Creator", RemoteApp::CLIENT_LEADS_SERVICE => "Placeholder" #FIXME } attr_accessible :target_app_kind, :target_app_ids, :remote_app_id, :body # the apps that should perform the instruction # explicit habtm has_many :instructions_target_apps has_many :target_apps, through: :instructions_target_apps, source: :target_app # the app that will be effected by the instruction # only needed for g5-client-app-creator instructions belongs_to :remote_app validates :target_app_kind, presence: true, inclusion: { in: RemoteApp::KINDS }, allow_blank: true validates :remote_app_id, presence: true, if: :client_app_creator_kind? validate :has_at_least_one_target_app # webhooks make things speedy after_save :async_webhook_target_apps def created_at_computer_readable created_at.utc.to_s(:computer) end def created_at_human_readable created_at.to_s(:human) end def updated_at_computer_readable updated_at.utc.to_s(:computer) end def updated_at_human_readable updated_at.to_s(:human) end def name NAMES[target_app_kind] end def async_webhook_target_apps Resque.enqueue(InstructionWebhooker, self.id) end def webhook_target_apps target_apps.map(&:heroku_app_name).each do |heroku_app_name| begin Webhook.post("http://#{heroku_app_name}.herokuapp.com/webhooks/g5-configurator") rescue ArgumentError => e logger.error e end end end def client_app_creator_kind? target_app_kind == RemoteApp::CLIENT_APP_CREATOR end private def has_at_least_one_target_app unless target_app_ids.length >= 1 errors[:target_app_ids] = "can't be blank" end end end
# coding: UTF-8 module Kpi class Semanal < ActiveRecord::Base validates :semana, uniqueness: { scope: [:anyo, :modelo, :scope] } def self.dame_ultimas_n_semanas(n_semanas) semana_mas_antigua = dia_de_referencia.cweek - n_semanas + 1 semana_mas_reciente = dia_de_referencia.cweek semana_mas_antigua..semana_mas_reciente end def self.calcula_ultima_semana ultima_semana = dia_de_referencia.cweek self.where(anyo: dia_de_referencia.year, semana: ultima_semana).delete_all Kpi::modelos_y_scopes.each do |modelo_y_scope| dato = encadena_modelo_y_scopes(modelo_y_scope).count self.create(anyo: dia_de_referencia.year, semana: ultima_semana, modelo: modelo_y_scope[:modelo], scope: modelo_y_scope[:scopes].to_s, dato: dato) end end def self.encadena_modelo_y_scopes(modelo_y_scope) q = modelo_y_scope[:modelo].constantize modelo_y_scope[:scopes].each { |scope| q = q.send(scope) } q end def self.dia_de_referencia Date.yesterday end end end Change .count to .size * count doesn't work with select (... as ...) https://github.com/rails/rails/issues/2541 module Kpi class Semanal < ActiveRecord::Base validates :semana, uniqueness: { scope: [:anyo, :modelo, :scope] } def self.dame_ultimas_n_semanas(n_semanas) semana_mas_antigua = dia_de_referencia.cweek - n_semanas + 1 semana_mas_reciente = dia_de_referencia.cweek semana_mas_antigua..semana_mas_reciente end def self.calcula_ultima_semana ultima_semana = dia_de_referencia.cweek self.where(anyo: dia_de_referencia.year, semana: ultima_semana).delete_all Kpi::modelos_y_scopes.each do |modelo_y_scope| dato = encadena_modelo_y_scopes(modelo_y_scope).size self.create(anyo: dia_de_referencia.year, semana: ultima_semana, modelo: modelo_y_scope[:modelo], scope: modelo_y_scope[:scopes].to_s, dato: dato) end end def self.encadena_modelo_y_scopes(modelo_y_scope) q = modelo_y_scope[:modelo].constantize modelo_y_scope[:scopes].each { |scope| q = q.send(scope) } q end def self.dia_de_referencia Date.yesterday end end end
require_relative '../../lib/shared/properties' require 'set' class LdapConfig attr_accessor :config def initialize(root_dir=nil) set_root_dir(root_dir) ldap_config = {} @config = Properties.load_file(config_file_path) if config_file_exists? check_configuration_validity end def self.exists? config = self.instance.config config && config['ldap'] && config['ldap']['enable'].present? end def config @config end def [](key_string) keys = key_string.split('.') keys.inject(@config) do |hash, key| hash.fetch(key) end rescue IndexError nil end def with_temporary_config(new_config_hash) old_config = @config.deep_dup @config.deep_merge! new_config_hash.stringify_keys yield ensure @config = old_config end def self.config_file_path(root_dir=nil) root_dir = Rails.root unless root_dir File.join root_dir, 'config/ldap.properties' end def config_file_path self.class.config_file_path(@root_dir) end def self.deep_merge(hash, other_hash) deep_merge!(hash.dup, other_hash) end def self.deep_merge!(hash, other_hash) other_hash.each_pair do |k,v| tv = hash[k] hash[k] = tv.is_a?(Hash) && v.is_a?(Hash) ? deep_merge(tv, v) : v end hash end def self.instance @instance ||= LdapConfig.new end private def config_file_exists? if File.symlink?(config_file_path) File.exists?(File.readlink(config_file_path)) else File.exists?(config_file_path) end end def set_root_dir(root_dir) @root_dir = root_dir || Rails.root end def check_configuration_validity return if @config.nil? || @config['ldap'].nil? || @config['ldap']['enable'].nil? || @config['ldap']['enable'] == false # will raise error if one of these properties is missing required_properties = [ 'enable', 'host', 'port', { 'bind' => ['username', 'password' ] }, { 'user' => ['search_base', 'filter'] } ] mutually_dependent_properties = [ { 'group' => ['names', 'search_base', 'filter'] } ] required_properties.each do |prop| if prop.is_a? String raise LdapClient::LdapNotCorrectlyConfigured.new("Missing value for property ldap.#{prop} in ldap.properties file") if @config["ldap"][prop].nil? end if prop.is_a? Hash attribute = prop.keys.first nested_props = prop.values.first nested_props.each do |nested_prop| raise LdapClient::LdapNotCorrectlyConfigured.new("Missing value for property ldap.#{attribute}.#{nested_prop} in ldap.properties file") if @config["ldap"][attribute][nested_prop].nil? end end end mutually_dependent_properties.each do |prop| if prop.is_a? Hash attribute = prop.keys.first nested_props = prop.values.first # If the whole group is missing, it's ok. return if @config['ldap'][attribute].nil? # Otherwise, make sure all of the properties exist is_valid = nested_props.all? do |nested_prop| @config['ldap'][attribute].key?(nested_prop) && !@config["ldap"][attribute][nested_prop].nil? end all_props_string = nested_props.inject("") {|result, nested_prop| "#{result} #{attribute}.#{nested_prop}"} raise LdapClient::LdapNotCorrectlyConfigured.new("One or more of these LDAP settings are mis-configured: (#{all_props_string}). Please contact your system administrator.") unless is_valid end end end end bug fix in ldap configuration check require_relative '../../lib/shared/properties' require 'set' class LdapConfig attr_accessor :config def initialize(root_dir=nil) set_root_dir(root_dir) ldap_config = {} @config = Properties.load_file(config_file_path) if config_file_exists? check_configuration_validity end def self.exists? config = self.instance.config config && config['ldap'] && config['ldap']['enable'].present? end def config @config end def [](key_string) keys = key_string.split('.') keys.inject(@config) do |hash, key| hash.fetch(key) end rescue IndexError nil end def with_temporary_config(new_config_hash) old_config = @config.deep_dup @config.deep_merge! new_config_hash.stringify_keys yield ensure @config = old_config end def self.config_file_path(root_dir=nil) root_dir = Rails.root unless root_dir File.join root_dir, 'config/ldap.properties' end def config_file_path self.class.config_file_path(@root_dir) end def self.deep_merge(hash, other_hash) deep_merge!(hash.dup, other_hash) end def self.deep_merge!(hash, other_hash) other_hash.each_pair do |k,v| tv = hash[k] hash[k] = tv.is_a?(Hash) && v.is_a?(Hash) ? deep_merge(tv, v) : v end hash end def self.instance @instance ||= LdapConfig.new end private def config_file_exists? if File.symlink?(config_file_path) File.exists?(File.readlink(config_file_path)) else File.exists?(config_file_path) end end def set_root_dir(root_dir) @root_dir = root_dir || Rails.root end def check_configuration_validity return if @config.nil? || @config['ldap'].nil? || @config['ldap']['enable'].nil? || @config['ldap']['enable'] == false # will raise error if one of these properties is missing required_properties = [ 'enable', 'host', 'port', { 'bind' => ['username', 'password' ] }, { 'user' => ['search_base', 'filter'] } ] mutually_dependent_properties = [ { 'group' => ['names', 'search_base', 'filter'] } ] required_properties.each do |prop| if prop.is_a? String raise LdapClient::LdapNotCorrectlyConfigured.new("Missing value for property ldap.#{prop} in ldap.properties file") if @config["ldap"][prop].nil? end if prop.is_a? Hash attribute = prop.keys.first nested_props = prop.values.first nested_props.each do |nested_prop| if @config["ldap"][attribute].nil? || @config["ldap"][attribute][nested_prop].nil? raise LdapClient::LdapNotCorrectlyConfigured.new("Missing value for property ldap.#{attribute}.#{nested_prop} in ldap.properties file") end end end end mutually_dependent_properties.each do |prop| if prop.is_a? Hash attribute = prop.keys.first nested_props = prop.values.first # If the whole group is missing, it's ok. return if @config['ldap'][attribute].nil? # Otherwise, make sure all of the properties exist is_valid = nested_props.all? do |nested_prop| @config['ldap'][attribute].key?(nested_prop) && !@config["ldap"][attribute][nested_prop].nil? end all_props_string = nested_props.inject("") {|result, nested_prop| "#{result} #{attribute}.#{nested_prop}"} raise LdapClient::LdapNotCorrectlyConfigured.new("One or more of these LDAP settings are mis-configured: (#{all_props_string}). Please contact your system administrator.") unless is_valid end end end end
# frozen_string_literal: true class Name < AbstractModel # Short-hand for calling Name.find_names with +fill_in_authors+ set to +true+. def self.find_names_filling_in_authors(in_str, rank = nil, ignore_deprecated: false) find_names(in_str, rank, ignore_deprecated: ignore_deprecated, fill_in_authors: true) end # Look up Name's with a given name. By default tries to weed out deprecated # Name's, but if that results in an empty set, then it returns the deprecated # ones. Returns an Array of zero or more Name instances. # # +in_str+:: String to parse. # +rank+:: Accept only names of this rank (optional). # +ignore_deprecated+:: If +true+, return all matching names, # even if deprecated. # +fill_in_authors+:: If +true+, will fill in author for Name's missing # authors # if +in_str+ supplies one. # # names = Name.find_names('Letharia vulpina') # def self.find_names(in_str, rank = nil, ignore_deprecated: false, fill_in_authors: false) return [] unless parse = parse_name(in_str) finder = Name.with_rank(rank) # Nimmo note: This syntax is gross but it's the only way to avoid a string # Replaces the much more elegant "search_name = :name" finder_in_search_name = finder.where( Name[:search_name].eq(Name[:name]).to_sql, { name: parse.search_name } ) results = name_search(finder_in_search_name, ignore_deprecated) return results if results.present? finder_in_text_name = finder.where( Name[:text_name].eq(Name[:name]).to_sql, { name: parse.text_name } ) results = name_search(finder_in_text_name, ignore_deprecated) return results if parse.author.blank? return [] if results.any? { |n| n.author.present? } set_author(results, parse.author, fill_in_authors) results end def self.name_search(finder, ignore_deprecated) unless ignore_deprecated results = finder.where(deprecated: 0) return results.to_a if results.present? end finder.to_a end def self.set_author(names, author, fill_in_authors) return unless author.present? && fill_in_authors && names.length == 1 names.first.change_author(author) names.first.save end # Parses a String, creates a Name for it and all its ancestors (if any don't # already exist), returns it in an Array (genus first, then species, etc. If # there is ambiguity at any level (due to different authors), then +nil+ is # returned in that slot. Check last slot especially. Returns an Array of # Name instances, *UNSAVED*!! # # names = Name.find_or_create_name_and_parents('Letharia vulpina (L.) Hue') # raise "Name is ambiguous!" if !names.last # names.each do |name| # name.save if name and name.new_record? # end # def self.find_or_create_name_and_parents(in_str) result = [] if (parsed_name = parse_name(in_str)) result = find_or_create_parsed_name_and_parents(parsed_name) end result end def self.find_or_create_parsed_name_and_parents(parsed_name) result = [] if names_for_unknown.member?(parsed_name.search_name.downcase) result << Name.unknown else if parsed_name.parent_name result = find_or_create_name_and_parents(parsed_name.parent_name) end deprecate = result.any? && result.last && result.last.deprecated result << find_or_create_parsed_name(parsed_name, deprecate) end result end def self.find_or_create_parsed_name(parsed_name, deprecate) result = nil matches = find_matching_names(parsed_name) if matches.empty? result = Name.make_name(parsed_name.params) result.change_deprecated(true) if deprecate elsif matches.length == 1 result = matches.first # Fill in author automatically if we can. if result.author.blank? && parsed_name.author.present? result.change_author(parsed_name.author) end else # Try to resolve ambiguity by taking the one with author. matches.reject! { |name| name.author.blank? } result = matches.first if matches.length == 1 end result end def self.find_matching_names(parsed_name) result = [] if parsed_name.author.blank? result = Name.where(text_name: parsed_name.text_name) else result = Name.where(search_name: parsed_name.search_name) if result.empty? result = Name.where(text_name: parsed_name.text_name, author: "") end end result.to_a end # Look up a Name, creating it as necessary. Requires +rank+ and +text_name+, # at least, supplying defaults for +search_name+, +display_name+, and # +sort_name+, and leaving +author+ blank by default. Requires an # exact match of both +text_name+ and +author+. Returns: # # zero or one matches:: a Name instance, *UNSAVED*!! # multiple matches:: nil # # Used by +make_species+, +make_genus+, and +find_or_create_name_and_parents+. # def self.make_name(params) result = nil search_name = params[:search_name] matches = Name.where(search_name: search_name) if matches.empty? result = Name.new_name(params) elsif matches.length == 1 result = matches.first end result end # make a Name given all the various name formats, etc. # Used only by +make_name+, +new_name_from_parsed_name+, and # +create_test_name+ in unit test. # Returns a Name instance, *UNSAVED*!! def self.new_name(params) result = Name.new(params) result.created_at = now = Time.zone.now result.updated_at = now result end # Make a Name instance from a ParsedName # Used by NameController#create_new_name # Returns a Name instance, *UNSAVED*!! def self.new_name_from_parsed_name(parsed_name) new_name(parsed_name.params) end # Get list of Names that are potential matches when creating a new name. # Takes results of Name.parse_name. Used by NameController#create_name. # Three cases: # # 1. group with author - only accept exact matches # 2. nongroup with author - match names with correct author or no author # 3. any name without author - ignore authors completely when matching names # # If the user provides an author, but the only match has no author, then we # just need to add an author to the existing Name. If the user didn't give # an author, but there are matches with an author, then it already exists # and we should just ignore the request. # def self.names_matching_desired_new_name(parsed_name) if parsed_name.rank == :Group Name.where(search_name: parsed_name.search_name) elsif parsed_name.author.empty? Name.where(text_name: parsed_name.text_name) else Name.where(text_name: parsed_name.text_name). where(author: [parsed_name.author, ""]) end end ############################################################################## end Revert changes in name/create Turns out I don't understand what the where clause was doing! # frozen_string_literal: true class Name < AbstractModel # Short-hand for calling Name.find_names with +fill_in_authors+ set to +true+. def self.find_names_filling_in_authors(in_str, rank = nil, ignore_deprecated: false) find_names(in_str, rank, ignore_deprecated: ignore_deprecated, fill_in_authors: true) end # Look up Name's with a given name. By default tries to weed out deprecated # Name's, but if that results in an empty set, then it returns the deprecated # ones. Returns an Array of zero or more Name instances. # # +in_str+:: String to parse. # +rank+:: Accept only names of this rank (optional). # +ignore_deprecated+:: If +true+, return all matching names, # even if deprecated. # +fill_in_authors+:: If +true+, will fill in author for Name's missing # authors # if +in_str+ supplies one. # # names = Name.find_names('Letharia vulpina') # def self.find_names(in_str, rank = nil, ignore_deprecated: false, fill_in_authors: false) return [] unless parse = parse_name(in_str) finder = Name.with_rank(rank) results = name_search(finder.where("search_name = :name", { name: parse.search_name }), ignore_deprecated) return results if results.present? results = name_search(finder.where("text_name = :name", { name: parse.text_name }), ignore_deprecated) return results if parse.author.blank? return [] if results.any? { |n| n.author.present? } set_author(results, parse.author, fill_in_authors) results end def self.name_search(finder, ignore_deprecated) unless ignore_deprecated results = finder.where(deprecated: 0) return results.to_a if results.present? end finder.to_a end def self.set_author(names, author, fill_in_authors) return unless author.present? && fill_in_authors && names.length == 1 names.first.change_author(author) names.first.save end # Parses a String, creates a Name for it and all its ancestors (if any don't # already exist), returns it in an Array (genus first, then species, etc. If # there is ambiguity at any level (due to different authors), then +nil+ is # returned in that slot. Check last slot especially. Returns an Array of # Name instances, *UNSAVED*!! # # names = Name.find_or_create_name_and_parents('Letharia vulpina (L.) Hue') # raise "Name is ambiguous!" if !names.last # names.each do |name| # name.save if name and name.new_record? # end # def self.find_or_create_name_and_parents(in_str) result = [] if (parsed_name = parse_name(in_str)) result = find_or_create_parsed_name_and_parents(parsed_name) end result end def self.find_or_create_parsed_name_and_parents(parsed_name) result = [] if names_for_unknown.member?(parsed_name.search_name.downcase) result << Name.unknown else if parsed_name.parent_name result = find_or_create_name_and_parents(parsed_name.parent_name) end deprecate = result.any? && result.last && result.last.deprecated result << find_or_create_parsed_name(parsed_name, deprecate) end result end def self.find_or_create_parsed_name(parsed_name, deprecate) result = nil matches = find_matching_names(parsed_name) if matches.empty? result = Name.make_name(parsed_name.params) result.change_deprecated(true) if deprecate elsif matches.length == 1 result = matches.first # Fill in author automatically if we can. if result.author.blank? && parsed_name.author.present? result.change_author(parsed_name.author) end else # Try to resolve ambiguity by taking the one with author. matches.reject! { |name| name.author.blank? } result = matches.first if matches.length == 1 end result end def self.find_matching_names(parsed_name) result = [] if parsed_name.author.blank? result = Name.where(text_name: parsed_name.text_name) else result = Name.where(search_name: parsed_name.search_name) if result.empty? result = Name.where(text_name: parsed_name.text_name, author: "") end end result.to_a end # Look up a Name, creating it as necessary. Requires +rank+ and +text_name+, # at least, supplying defaults for +search_name+, +display_name+, and # +sort_name+, and leaving +author+ blank by default. Requires an # exact match of both +text_name+ and +author+. Returns: # # zero or one matches:: a Name instance, *UNSAVED*!! # multiple matches:: nil # # Used by +make_species+, +make_genus+, and +find_or_create_name_and_parents+. # def self.make_name(params) result = nil search_name = params[:search_name] matches = Name.where(search_name: search_name) if matches.empty? result = Name.new_name(params) elsif matches.length == 1 result = matches.first end result end # make a Name given all the various name formats, etc. # Used only by +make_name+, +new_name_from_parsed_name+, and # +create_test_name+ in unit test. # Returns a Name instance, *UNSAVED*!! def self.new_name(params) result = Name.new(params) result.created_at = now = Time.zone.now result.updated_at = now result end # Make a Name instance from a ParsedName # Used by NameController#create_new_name # Returns a Name instance, *UNSAVED*!! def self.new_name_from_parsed_name(parsed_name) new_name(parsed_name.params) end # Get list of Names that are potential matches when creating a new name. # Takes results of Name.parse_name. Used by NameController#create_name. # Three cases: # # 1. group with author - only accept exact matches # 2. nongroup with author - match names with correct author or no author # 3. any name without author - ignore authors completely when matching names # # If the user provides an author, but the only match has no author, then we # just need to add an author to the existing Name. If the user didn't give # an author, but there are matches with an author, then it already exists # and we should just ignore the request. # def self.names_matching_desired_new_name(parsed_name) if parsed_name.rank == :Group Name.where(search_name: parsed_name.search_name) elsif parsed_name.author.empty? Name.where(text_name: parsed_name.text_name) else Name.where(text_name: parsed_name.text_name). where(author: [parsed_name.author, ""]) end end ############################################################################## end
cask 'font-source-code-pro-for-powerline' do version :latest sha256 :no_check url 'https://github.com/powerline/fonts/trunk/SourceCodePro', using: :svn, trust_cert: true name 'Source Code Pro for Powerline' homepage 'https://github.com/powerline/fonts/tree/master/SourceCodePro' depends_on macos: '>= :sierra' font 'Source Code Pro Black for Powerline.otf' font 'Source Code Pro Bold for Powerline.otf' font 'Source Code Pro ExtraLight for Powerline.otf' font 'Source Code Pro Light for Powerline.otf' font 'Source Code Pro Medium for Powerline.otf' font 'Source Code Pro Powerline BlackItalic.otf' font 'Source Code Pro Powerline BoldItalic.otf' font 'Source Code Pro Powerline ExtraLightItalic.otf' font 'Source Code Pro Powerline Italic.otf' font 'Source Code Pro Powerline LightItalic.otf' font 'Source Code Pro Powerline MediumItalic.otf' font 'Source Code Pro Powerline SemiboldItalic.otf' font 'Source Code Pro Semibold for Powerline.otf' font 'Source Code Pro for Powerline.otf' end font-source-code-pro-for-powerline.rb: fix for new style cask "font-source-code-pro-for-powerline" do version :latest sha256 :no_check url "https://github.com/powerline/fonts/trunk/SourceCodePro", using: :svn, trust_cert: true name "Source Code Pro for Powerline" homepage "https://github.com/powerline/fonts/tree/master/SourceCodePro" depends_on macos: ">= :sierra" font "Source Code Pro Black for Powerline.otf" font "Source Code Pro Bold for Powerline.otf" font "Source Code Pro ExtraLight for Powerline.otf" font "Source Code Pro Light for Powerline.otf" font "Source Code Pro Medium for Powerline.otf" font "Source Code Pro Powerline BlackItalic.otf" font "Source Code Pro Powerline BoldItalic.otf" font "Source Code Pro Powerline ExtraLightItalic.otf" font "Source Code Pro Powerline Italic.otf" font "Source Code Pro Powerline LightItalic.otf" font "Source Code Pro Powerline MediumItalic.otf" font "Source Code Pro Powerline SemiboldItalic.otf" font "Source Code Pro Semibold for Powerline.otf" font "Source Code Pro for Powerline.otf" end
class Observation < ActiveRecord::Base acts_as_activity_streamable :batch_window => 30.minutes, :batch_partial => "observations/activity_stream_batch" acts_as_taggable acts_as_flaggable include Ambidextrous # Set to true if you want to skip the expensive updating of all the user's # lists after saving. Useful if you're saving many observations at once and # you want to update lists in a batch attr_accessor :skip_refresh_lists, :skip_identifications # Set if you need to set the taxon from a name separate from the species # guess attr_accessor :taxon_name # licensing extras attr_accessor :make_license_default attr_accessor :make_licenses_same MASS_ASSIGNABLE_ATTRIBUTES = [:make_license_default, :make_licenses_same] M_TO_OBSCURE_THREATENED_TAXA = 10000 OUT_OF_RANGE_BUFFER = 5000 # meters PLANETARY_RADIUS = 6370997.0 DEGREES_PER_RADIAN = 57.2958 FLOAT_REGEX = /[-+]?[0-9]*\.?[0-9]+/ COORDINATE_REGEX = /[^\d\,]*?(#{FLOAT_REGEX})[^\d\,]*?/ LAT_LON_SEPARATOR_REGEX = /[\,\s]\s*/ LAT_LON_REGEX = /#{COORDINATE_REGEX}#{LAT_LON_SEPARATOR_REGEX}#{COORDINATE_REGEX}/ PRIVATE = "private" OBSCURED = "obscured" GEOPRIVACIES = [OBSCURED, PRIVATE] GEOPRIVACY_DESCRIPTIONS = { nil => "Everyone can see the coordinates unless the taxon is threatened.", OBSCURED => "Public coordinates shown as a random point within " + "#{M_TO_OBSCURE_THREATENED_TAXA / 1000}KM of the true coordinates. " + "True coordinates are only visible to you and the curators of projects " + "to which you add the observation.", PRIVATE => "Coordinates completely hidden from public maps, true " + "coordinates only visible to you and the curators of projects to " + "which you add the observation.", } CASUAL_GRADE = "casual" RESEARCH_GRADE = "research" QUALITY_GRADES = [CASUAL_GRADE, RESEARCH_GRADE] LICENSES = [ ["CC-BY", "Attribution", "This license lets others distribute, remix, tweak, and build upon your work, even commercially, as long as they credit you for the original creation. This is the most accommodating of licenses offered. Recommended for maximum dissemination and use of licensed materials."], ["CC-BY-NC", "Attribution-NonCommercial", "This license lets others remix, tweak, and build upon your work non-commercially, and although their new works must also acknowledge you and be non-commercial, they don’t have to license their derivative works on the same terms."], ["CC-BY-SA", "Attribution-ShareAlike", "This license lets others remix, tweak, and build upon your work even for commercial purposes, as long as they credit you and license their new creations under the identical terms. All new works based on yours will carry the same license, so any derivatives will also allow commercial use."], ["CC-BY-ND", "Attribution-NoDerivs", "This license allows for redistribution, commercial and non-commercial, as long as it is passed along unchanged and in whole, with credit to you."], ["CC-BY-NC-SA", "Attribution-NonCommercial-ShareAlike", "This license lets others remix, tweak, and build upon your work non-commercially, as long as they credit you and license their new creations under the identical terms."], ["CC-BY-NC-ND", "Attribution-NonCommercial-NoDerivs", "This license is the most restrictive of the six main licenses, only allowing others to download your works and share them with others as long as they credit you, but they can’t change them in any way or use them commercially."] ] LICENSE_CODES = LICENSES.map{|row| row.first} LICENSES.each do |code, name, description| const_set code.gsub(/\-/, '_'), code end PREFERRED_LICENSES = [CC_BY, CC_BY_NC] belongs_to :user, :counter_cache => true belongs_to :taxon, :counter_cache => true belongs_to :iconic_taxon, :class_name => 'Taxon', :foreign_key => 'iconic_taxon_id' has_many :observation_photos, :dependent => :destroy, :order => "id asc" has_many :photos, :through => :observation_photos # note last_observation and first_observation on listed taxa will get reset # by CheckList.refresh_with_observation has_many :listed_taxa, :foreign_key => 'last_observation_id' has_many :goal_contributions, :as => :contribution, :dependent => :destroy has_many :comments, :as => :parent, :dependent => :destroy has_many :identifications, :dependent => :delete_all has_many :project_observations, :dependent => :destroy has_many :project_invitations, :dependent => :destroy has_many :projects, :through => :project_observations has_many :quality_metrics, :dependent => :destroy has_many :observation_field_values, :dependent => :destroy, :order => "id asc" has_many :observation_fields, :through => :observation_field_values define_index do indexes taxon.taxon_names.name, :as => :names indexes tags.name, :as => :tags indexes :species_guess, :sortable => true, :as => :species_guess indexes :description, :as => :description indexes :place_guess, :as => :place, :sortable => true indexes user.login, :as => :user, :sortable => true indexes :observed_on_string, :as => :observed_on_string has :user_id has :taxon_id # Sadly, the following doesn't work, because self_and_ancestors is not an # association. I'm not entirely sure if there's a way to work the ancestry # query in as col in a SQL query on observations. If at some point we # need to have the ancestor ids in the Sphinx index, though, we can always # add a col to the taxa table holding the ancestor IDs. Kind of a # redundant, and it would slow down moves, but it might be worth it for # the snappy searches. --KMU 2009-04-4 # has taxon.self_and_ancestors(:id), :as => :taxon_self_and_ancestors_ids has photos(:id), :as => :has_photos, :type => :boolean has :created_at, :sortable => true has :observed_on, :sortable => true has :iconic_taxon_id has :id_please, :as => :has_id_please has "latitude IS NOT NULL AND longitude IS NOT NULL", :as => :has_geo, :type => :boolean has 'RADIANS(latitude)', :as => :latitude, :type => :float has 'RADIANS(longitude)', :as => :longitude, :type => :float # HACK: TS doesn't seem to include attributes in the GROUP BY correctly # for Postgres when using custom SQL attr definitions. It may or may not # be fixed in more up-to-date versions, but the issue has been raised: # http://groups.google.com/group/thinking-sphinx/browse_thread/thread/e8397477b201d1e4 has :latitude, :as => :fake_latitude has :longitude, :as => :fake_longitude has :num_identification_agreements has :num_identification_disagreements # END HACK has "num_identification_agreements > num_identification_disagreements", :as => :identifications_most_agree, :type => :boolean has "num_identification_agreements > 0", :as => :identifications_some_agree, :type => :boolean has "num_identification_agreements < num_identification_disagreements", :as => :identifications_most_disagree, :type => :boolean has project_observations(:project_id), :as => :projects, :type => :multi set_property :delta => :delayed end SPHINX_FIELD_NAMES = %w(names tags species_guess description place user observed_on_string) SPHINX_ATTRIBUTE_NAMES = %w(user_id taxon_id has_photos created_at observed_on iconic_taxon_id id_please has_geo latitude longitude fake_latitude fake_longitude num_identification_agreements num_identification_disagreements identifications_most_agree identifications_some_agree identifications_most_disagree projects) accepts_nested_attributes_for :observation_field_values, :allow_destroy => true, :reject_if => lambda { |attrs| attrs[:value].blank? } ## # Validations # validates_presence_of :user_id validate :must_be_in_the_past, :must_not_be_a_range validates_numericality_of :latitude, :allow_blank => true, :less_than_or_equal_to => 90, :greater_than_or_equal_to => -90 validates_numericality_of :longitude, :allow_blank => true, :less_than_or_equal_to => 180, :greater_than_or_equal_to => -180 before_validation :munge_observed_on_with_chronic, :set_time_zone, :set_time_in_time_zone, :cast_lat_lon before_save :strip_species_guess, :set_taxon_from_species_guess, :set_taxon_from_taxon_name, :set_iconic_taxon, :keep_old_taxon_id, :set_latlon_from_place_guess, :reset_private_coordinates_if_coordinates_changed, :obscure_coordinates_for_geoprivacy, :obscure_coordinates_for_threatened_taxa, :set_geom_from_latlon, :set_license before_update :set_quality_grade after_save :refresh_lists, :update_identifications_after_save, :refresh_check_lists, :update_out_of_range_later, :update_default_license, :update_all_licenses before_destroy :keep_old_taxon_id after_destroy :refresh_lists_after_destroy, :refresh_check_lists # Activity updates # after_save :update_activity_update # before_destroy :delete_activity_update ## # Named scopes # # Area scopes named_scope :in_bounding_box, lambda { |swlat, swlng, nelat, nelng| if swlng.to_f > 0 && nelng.to_f < 0 {:conditions => ['latitude > ? AND latitude < ? AND (longitude > ? OR longitude < ?)', swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f]} else {:conditions => ['latitude > ? AND latitude < ? AND longitude > ? AND longitude < ?', swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f]} end } do def distinct_taxon all(:group => "taxon_id", :conditions => "taxon_id IS NOT NULL", :include => :taxon) end end named_scope :in_place, lambda {|place| place_id = place.is_a?(Place) ? place.id : place.to_i { :joins => "JOIN place_geometries ON place_geometries.place_id = #{place_id}", :conditions => [ "(observations.private_latitude IS NULL AND ST_Intersects(place_geometries.geom, observations.geom)) OR " + "(observations.private_latitude IS NOT NULL AND ST_Intersects(place_geometries.geom, ST_Point(observations.private_longitude, observations.private_latitude)))" ] } } # possibly radius in kilometers named_scope :near_point, Proc.new { |lat, lng, radius| lat = lat.to_f lng = lng.to_f radius = radius.to_f radius = 10.0 if radius == 0 planetary_radius = PLANETARY_RADIUS / 1000 # km radius_degrees = radius / (2*Math::PI*planetary_radius) * 360.0 {:conditions => ["ST_Distance(ST_Point(?,?), geom) <= ?", lng.to_f, lat.to_f, radius_degrees]} # # The following attempts to utilize the spatial index by restricting to a # # bounding box. It doesn't seem to be a speed improvement given the # # current number of obs, but maybe later... Note that it's messed up # # around the poles # box_xmin = lng - radius_degrees # box_ymin = lat - radius_degrees # box_xmax = lng + radius_degrees # box_ymax = lat + radius_degrees # box_xmin = 180 - (box_xmin * -1 - 180) if box_xmin < -180 # box_ymin = -90 if box_ymin < -90 # box_xmax = -180 + box_max - 180 if box_xmax > 180 # box_ymax = 90 if box_ymin > 90 # # {:conditions => [ # "geom && 'BOX3D(? ?, ? ?)'::box3d AND ST_Distance(ST_Point(?,?), geom) <= ?", # box_xmin, box_ymin, box_xmax, box_ymax, # lng.to_f, lat.to_f, radius_degrees]} } # Has_property scopes named_scope :has_taxon, lambda { |taxon_id| if taxon_id.nil? then return {:conditions => "taxon_id IS NOT NULL"} else {:conditions => ["taxon_id IN (?)", taxon_id]} end } named_scope :has_iconic_taxa, lambda { |iconic_taxon_ids| iconic_taxon_ids = [iconic_taxon_ids].flatten # make array if single if iconic_taxon_ids.include?(nil) {:conditions => [ "observations.iconic_taxon_id IS NULL OR observations.iconic_taxon_id IN (?)", iconic_taxon_ids]} elsif !iconic_taxon_ids.empty? {:conditions => [ "observations.iconic_taxon_id IN (?)", iconic_taxon_ids]} end } named_scope :has_geo, :conditions => ["latitude IS NOT NULL AND longitude IS NOT NULL"] named_scope :has_id_please, :conditions => ["id_please IS TRUE"] named_scope :has_photos, :select => "DISTINCT observations.*", :joins => "JOIN observation_photos AS _op ON _op.observation_id = observations.id ", :conditions => ['_op.id IS NOT NULL'] named_scope :has_quality_grade, lambda {|quality_grade| quality_grade = '' unless QUALITY_GRADES.include?(quality_grade) {:conditions => ["quality_grade = ?", quality_grade]} } # Find observations by a taxon object. Querying on taxa columns forces # massive joins, it's a bit sluggish named_scope :of, lambda { |taxon| taxon = Taxon.find_by_id(taxon.to_i) unless taxon.is_a? Taxon return {:conditions => "1 = 2"} unless taxon { :joins => :taxon, :conditions => [ "observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon ] } } named_scope :at_or_below_rank, lambda {|rank| rank_level = Taxon::RANK_LEVELS[rank] {:joins => [:taxon], :conditions => ["taxa.rank_level <= ?", rank_level]} } # Find observations by user named_scope :by, lambda { |user| {:conditions => ["observations.user_id = ?", user]} } # Order observations by date and time observed named_scope :latest, :order => "observed_on DESC NULLS LAST, time_observed_at DESC NULLS LAST" named_scope :recently_added, :order => "observations.id DESC" # TODO: Make this work for any SQL order statement, including multiple cols named_scope :order_by, lambda { |order| pieces = order.split order_by = pieces[0] order = pieces[1] || 'ASC' extra = [pieces[2..-1]].flatten.join(' ') extra = "NULLS LAST" if extra.blank? options = {} case order_by when 'observed_on' options[:order] = "observed_on #{order} #{extra}, " + "time_observed_at #{order} #{extra}" when 'user' options[:include] = [:user] options[:order] = "users.login #{order} #{extra}" when 'place' options[:order] = "place_guess #{order} #{extra}" when 'created_at' options[:order] = "observations.created_at #{order} #{extra}" else options[:order] = "#{order_by} #{order} #{extra}" end options } named_scope :identifications, lambda { |agreement| limited_scope = {:include => :identifications} case agreement when 'most_agree' limited_scope[:conditions] = "num_identification_agreements > num_identification_disagreements" when 'some_agree' limited_scope[:conditions] = "num_identification_agreements > 0" when 'most_disagree' limited_scope[:conditions] = "num_identification_agreements < num_identification_disagreements" end limited_scope } # Time based named scopes named_scope :created_after, lambda { |time| {:conditions => ['created_at >= ?', time]} } named_scope :created_before, lambda { |time| {:conditions => ['created_at <= ?', time]} } named_scope :updated_after, lambda { |time| {:conditions => ['updated_at >= ?', time]} } named_scope :updated_before, lambda { |time| {:conditions => ['updated_at <= ?', time]} } named_scope :observed_after, lambda { |time| {:conditions => ['time_observed_at >= ?', time]} } named_scope :observed_before, lambda { |time| {:conditions => ['time_observed_at <= ?', time]} } named_scope :in_month, lambda {|month| {:conditions => ["EXTRACT(MONTH FROM observed_on) = ?", month]} } named_scope :in_projects, lambda { |projects| projects = projects.split(',') if projects.is_a?(String) # NOTE using :include seems to trigger an erroneous eager load of # observations that screws up sorting kueda 2011-07-22 { :joins => [:project_observations], :conditions => ["project_observations.project_id IN (?)", projects] } } named_scope :on, lambda {|date| {:conditions => Observation.conditions_for_date(:observed_on, date)} } named_scope :created_on, lambda {|date| {:conditions => Observation.conditions_for_date("observations.created_at", date)} } named_scope :out_of_range, :conditions => {:out_of_range => true} named_scope :in_range, :conditions => {:out_of_range => false} def self.conditions_for_date(column, date) year, month, day = date.to_s.split('-').map do |d| d = d.blank? ? nil : d.to_i d == 0 ? nil : d end if date.to_s =~ /^\d{4}/ && year && month && day ["#{column}::DATE = ?", "#{year}-#{month}-#{day}"] elsif year || month || day conditions, values = [[],[]] if year conditions << "EXTRACT(YEAR FROM #{column}) = ?" values << year end if month conditions << "EXTRACT(MONTH FROM #{column}) = ?" values << month end if day conditions << "EXTRACT(DAY FROM #{column}) = ?" values << day end [conditions.join(' AND '), *values] else ["1 = 2"] end end def self.near_place(place) place = Place.find_by_id(place) unless place.is_a?(Place) if place.swlat Observation.in_bounding_box(place.swlat, place.swlng, place.nelat, place.nelng).scoped({}) else Observation.near_point(place.latitude, place.longitude).scoped({}) end end # # Uses scopes to perform a conditional search. # May be worth looking into squirrel or some other rails friendly search add on # def self.query(params = {}) scope = self.scoped({}) # support bounding box queries if (!params[:swlat].blank? && !params[:swlng].blank? && !params[:nelat].blank? && !params[:nelng].blank?) scope = scope.in_bounding_box(params[:swlat], params[:swlng], params[:nelat], params[:nelng]) elsif params[:lat] && params[:lng] scope = scope.near_point(params[:lat], params[:lng], params[:radius]) end # has (boolean) selectors if params[:has] params[:has] = params[:has].split(',') if params[:has].is_a? String params[:has].each do |prop| scope = case prop when 'geo' then scope.has_geo when 'id_please' then scope.has_id_please when 'photos' then scope.has_photos else scope.conditions "? IS NOT NULL OR ? != ''", prop, prop # hmmm... this seems less than ideal end end end scope = scope.identifications(params[:identifications]) if (params[:identifications]) scope = scope.has_iconic_taxa(params[:iconic_taxa]) if params[:iconic_taxa] scope = scope.order_by(params[:order_by]) if params[:order_by] scope = scope.has_quality_grade( params[:quality_grade]) if QUALITY_GRADES.include?(params[:quality_grade]) if taxon = params[:taxon] scope = scope.of(taxon.is_a?(Taxon) ? taxon : taxon.to_i) elsif !params[:taxon_id].blank? scope = scope.of(params[:taxon_id].to_i) elsif !params[:taxon_name].blank? taxon_name = TaxonName.find_single(params[:taxon_name], :iconic_taxa => params[:iconic_taxa]) scope = scope.of(taxon_name.try(:taxon)) end scope = scope.by(params[:user_id]) if params[:user_id] scope = scope.in_projects(params[:projects]) if params[:projects] scope = scope.in_place(params[:place_id]) if params[:place_id] scope = scope.on(params[:on]) if params[:on] scope = scope.created_on(params[:created_on]) if params[:created_on] scope = scope.out_of_range if params[:out_of_range] == 'true' scope = scope.in_range if params[:out_of_range] == 'false' # return the scope, we can use this for will_paginate calls like: # Observation.query(params).paginate() scope end # help_txt_for :species_guess, <<-DESC # Type a name for what you saw. It can be common or scientific, accurate # or just a placeholder. When you enter it, we'll try to look it up and find # the matching species of higher level taxon. # DESC # # instruction_for :place_guess, "Type the name of a place" # help_txt_for :place_guess, <<-DESC # Enter the name of a place and we'll try to find where it is. If we find # it, you can drag the map marker around to get more specific. # DESC def to_s "<Observation #{self.id}: #{to_plain_s}>" end def to_plain_s(options = {}) s = self.species_guess.blank? ? 'something' : self.species_guess if options[:verb] s += options[:verb] == true ? " observed" : " #{options[:verb]}" end unless self.place_guess.blank? || options[:no_place_guess] s += " in #{self.place_guess}" end s += " on #{self.observed_on.to_s(:long)}" unless self.observed_on.blank? unless self.time_observed_at.blank? || options[:no_time] s += " at #{self.time_observed_at_in_zone.to_s(:plain_time)}" end s += " by #{self.user.try(:login)}" unless options[:no_user] s end def time_observed_at_utc time_observed_at.try(:utc) end def to_json(options = {}) # don't use delete here, it will just remove the option for all # subsequent records in an array options[:methods] ||= [] options[:methods] << :time_observed_at_utc viewer = options[:viewer] viewer_id = viewer.is_a?(User) ? viewer.id : viewer.to_i options[:except] ||= [] options[:except] += [:user_agent] if viewer_id != user_id && !options[:force_coordinate_visibility] options[:except] ||= [] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom] options[:except].uniq! options[:methods] << :coordinates_obscured options[:methods].uniq! end super(options).gsub(/<script.*script>/i, "") end def to_xml(options = {}) options[:except] ||= [] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom] super(options) end # # Return a time from observed_on and time_observed_at # def datetime if observed_on && errors.on(:observed_on).blank? if time_observed_at Time.mktime(observed_on.year, observed_on.month, observed_on.day, time_observed_at.hour, time_observed_at.min, time_observed_at.sec, time_observed_at.zone) else Time.mktime(observed_on.year, observed_on.month, observed_on.day) end end end # Return time_observed_at in the observation's time zone def time_observed_at_in_zone self.time_observed_at.in_time_zone(self.time_zone) end # # Set all the time fields based on the contents of observed_on_string # def munge_observed_on_with_chronic if observed_on_string.blank? self.observed_on = nil self.time_observed_at = nil return true end date_string = observed_on_string.strip if parsed_time_zone = ActiveSupport::TimeZone::CODES[date_string[/\s([A-Z]{3,})$/, 1]] date_string = observed_on_string.sub(/\s([A-Z]{3,})$/, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[/([+-]\d{4})$/, 1]) && (parsed_time_zone = ActiveSupport::TimeZone[offset.to_f / 100]) date_string = observed_on_string.sub(/([+-]\d{4})$/, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? end date_string.sub!('T', ' ') if date_string =~ /\d{4}-\d{2}-\d{2}T/ date_string.sub!(/(\d{2}:\d{2}:\d{2})\.\d+/, '\\1') # Set the time zone appropriately old_time_zone = Time.zone Time.zone = time_zone || user.try(:time_zone) Chronic.time_class = Time.zone begin # Start parsing... return true unless t = Chronic.parse(date_string) # Re-interpret future dates as being in the past if t > Time.now t = Chronic.parse(date_string, :context => :past) end self.observed_on = t.to_date # try to determine if the user specified a time by ask Chronic to return # a time range. Time ranges less than a day probably specified a time. if tspan = Chronic.parse(date_string, :context => :past, :guess => false) # If tspan is less than a day and the string wasn't 'today', set time if tspan.width < 86400 && date_string.strip.downcase != 'today' self.time_observed_at = t else self.time_observed_at = nil end end rescue RuntimeError errors.add(:observed_on, "was not recognized, some working examples are: yesterday, 3 years " + "ago, 5/27/1979, 1979-05-27 05:00. " + "(<a href='http://chronic.rubyforge.org/'>others</a>)") return end # don't store relative observed_on_strings, or they will change # every time you save an observation! if date_string =~ /today|yesterday|ago|last|this|now|monday|tuesday|wednesday|thursday|friday|saturday|sunday/i self.observed_on_string = self.observed_on.to_s if self.time_observed_at self.observed_on_string = self.time_observed_at.strftime("%Y-%m-%d %H:%M:%S") end end # Set the time zone back the way it was Time.zone = old_time_zone true end # # Adds, updates, or destroys the identification corresponding to the taxon # the user selected. # def update_identifications_after_save return true if @skip_identifications return true unless taxon_id_changed? owners_ident = identifications.first(:conditions => {:user_id => self.user_id}) owners_ident.skip_observation = true if owners_ident # If there's a taxon we need to make ure the owner's ident agrees if taxon # If the owner doesn't have an identification for this obs, make one unless owners_ident owners_ident = identifications.build(:user => user, :taxon => taxon) owners_ident.skip_observation = true owners_ident.skip_update = true owners_ident.save end # If the obs taxon and the owner's ident don't agree, make them if owners_ident.taxon_id != taxon_id owners_ident.update_attributes(:taxon_id => taxon_id) end # If there's no taxon, we should destroy the owner's ident elsif owners_ident owners_ident.destroy end true end # # Update the user's lists with changes to this observation's taxon # # If the observation is the last_observation in any of the user's lists, # then the last_observation should be reset to another observation. # def refresh_lists return true if @skip_refresh_lists return true unless taxon_id_changed? # Update the observation's current taxon and/or a previous one that was # just removed/changed target_taxa = [ taxon, Taxon.find_by_id(@old_observation_taxon_id) ].compact.uniq # Don't refresh all the lists if nothing changed return if target_taxa.empty? # project_observations.each do |po| # Project.send_later(:refresh_project_list, po.project_id, # :taxa => target_taxa.map(&:id), :add_new_taxa => true) # end List.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) ProjectList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) # ProjectList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :skip_update => true) # Reset the instance var so it doesn't linger around @old_observation_taxon_id = nil true end def refresh_check_lists refresh_needed = (georeferenced? || was_georeferenced?) && (taxon_id || taxon_id_was) && (quality_grade_changed? || taxon_id_changed? || latitude_changed? || longitude_changed? || observed_on_changed?) return true unless refresh_needed CheckList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_changed? ? taxon_id_was : nil, :latitude_was => (latitude_changed? || longitude_changed?) ? latitude_was : nil, :longitude_was => (latitude_changed? || longitude_changed?) ? longitude_was : nil, :new => id_was.blank?, :skip_update => true, :dj_priority => 1) true end # Because it has to be slightly different, in that the taxon of a destroyed # obs shouldn't be removed by default from life lists (maybe you've seen it # in the past, but you don't have any other obs), but those listed_taxa of # this taxon should have their last_observation reset. # def refresh_lists_after_destroy return if @skip_refresh_lists return unless taxon List.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) true end # # Preserve the old taxon id if the taxon has changed so we know to update # that taxon in the user's lists after_save # def keep_old_taxon_id @old_observation_taxon_id = taxon_id_was if taxon_id_changed? end # # This is the hook used to check each observation to see if it may apply # to a system based goal. It does so by collecting all of the user's # current goals, including global goals and checking to see if the # observation passes each rule established by the goal. If it does, the # goal is recorded as a contribution in the goal_contributions table. # def update_goal_contributions user.goal_participants_for_incomplete_goals.each do |participant| participant.goal.validate_and_add_contribution(self, participant) end true end # # Remove any instructional text that may have been submitted with the form. # def scrub_instructions_before_save self.attributes.each do |attr_name, value| if Observation.instructions[attr_name.to_sym] and value and Observation.instructions[attr_name.to_sym] == value write_attribute(attr_name.to_sym, nil) end end end # # Set the iconic taxon if it hasn't been set # def set_iconic_taxon return unless self.taxon_id_changed? if taxon self.iconic_taxon_id ||= taxon.iconic_taxon_id else self.iconic_taxon_id = nil end end # # Trim whitespace around species guess # def strip_species_guess self.species_guess.strip! unless species_guess.nil? true end # # Set the time_zone of this observation if not already set # def set_time_zone self.time_zone = nil if time_zone.blank? self.time_zone ||= user.time_zone if user && !user.time_zone.blank? self.time_zone ||= Time.zone.try(:name) unless time_observed_at.blank? self.time_zone ||= 'UTC' true end # # Cast lat and lon so they will (hopefully) pass the numericallity test # def cast_lat_lon # self.latitude = latitude.to_f unless latitude.blank? # self.longitude = longitude.to_f unless longitude.blank? true end # # Force time_observed_at into the time zone # def set_time_in_time_zone return if time_observed_at.blank? || time_zone.blank? return unless time_observed_at_changed? || time_zone_changed? # Render the time as a string time_s = time_observed_at_before_type_cast unless time_s.is_a? String time_s = time_observed_at_before_type_cast.strftime("%Y-%m-%d %H:%M:%S") end # Get the time zone offset as a string and append it offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false) time_s += " #{offset_s}" self.time_observed_at = Time.parse(time_s) end def lsid "lsid:inaturalist.org:observations:#{id}" end def component_cache_key(options = {}) Observation.component_cache_key(id, options) end def self.component_cache_key(id, options = {}) key = "obs_comp_#{id}" key += "_"+options.map{|k,v| "#{k}-#{v}"}.join('_') unless options.blank? key end def num_identifications_by_others identifications.select{|i| i.user_id != user_id}.size end ##### Rules ############################################################### # # This section contains all of the rules that can be used for list creation # or goal completion class << self # this just prevents me from having to write def self.* # Written for the Goals framework. # Accepts two parameters, the first is 'thing' from GoalRule, # the second is an array created when the GoalRule splits on pipes "|" def within_the_first_n_contributions?(observation, args) return false unless observation.instance_of? self return true if count <= args[0].to_i find(:all, :select => "id", :order => "created_at ASC", :limit => args[0]).include?(observation) end end # # Checks whether this observation has been flagged # def flagged? self.flags.select { |f| not f.resolved? }.size > 0 end def georeferenced? (latitude? && longitude?) || (private_latitude? && private_longitude?) end def was_georeferenced? (latitude_was && longitude_was) || (private_latitude_was && private_longitude_was) end def quality_metric_score(metric) quality_metrics.all unless quality_metrics.loaded? metrics = quality_metrics.select{|qm| qm.metric == metric} return nil if metrics.blank? metrics.select{|qm| qm.agree?}.size.to_f / metrics.size end def community_supported_id? num_identification_agreements.to_i > 0 && num_identification_agreements > num_identification_disagreements end def quality_metrics_pass? QualityMetric::METRICS.each do |metric| score = quality_metric_score(metric) return false if score && score < 0.5 end true end def research_grade? georeferenced? && community_supported_id? && quality_metrics_pass? && observed_on? && photos? end def photos? observation_photos.exists? end def casual_grade? !research_grade? end def set_quality_grade(options = {}) if options[:force] || quality_grade_changed? || latitude_changed? || longitude_changed? || observed_on_changed? || taxon_id_changed? self.quality_grade = get_quality_grade end true end def get_quality_grade research_grade? ? RESEARCH_GRADE : CASUAL_GRADE end def coordinates_obscured? !private_latitude.blank? || !private_longitude.blank? end alias :coordinates_obscured :coordinates_obscured? def geoprivacy_private? geoprivacy == PRIVATE end def geoprivacy_obscured? geoprivacy == OBSCURED end def coordinates_viewable_by?(user) return true unless coordinates_obscured? user = User.find_by_id(user) unless user.is_a?(User) return false unless user return true if user_id == user.id return true if user.project_users.curators.exists?(["project_id IN (?)", project_ids]) false end def reset_private_coordinates_if_coordinates_changed if (latitude_changed? || longitude_changed?) self.private_latitude = nil self.private_longitude = nil end true end def obscure_coordinates_for_geoprivacy self.geoprivacy = nil if geoprivacy.blank? return true if geoprivacy.blank? && !geoprivacy_changed? case geoprivacy when PRIVATE obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) unless coordinates_obscured? self.latitude, self.longitude = [nil, nil] when OBSCURED obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) unless coordinates_obscured? else unobscure_coordinates end true end def obscure_coordinates_for_threatened_taxa if !taxon.blank? && taxon.species_or_lower? && georeferenced? && !coordinates_obscured? && (taxon.threatened? || (taxon.parent && taxon.parent.threatened?)) obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) elsif geoprivacy.blank? unobscure_coordinates end true end def obscure_coordinates(distance = M_TO_OBSCURE_THREATENED_TAXA) self.place_guess = obscured_place_guess return if latitude.blank? || longitude.blank? if latitude_changed? || longitude_changed? self.private_latitude = latitude self.private_longitude = longitude else self.private_latitude ||= latitude self.private_longitude ||= longitude end self.latitude, self.longitude = random_neighbor_lat_lon(private_latitude, private_longitude, distance) end def lat_lon_in_place_guess? !place_guess.blank? && place_guess !~ /[a-cf-mo-rt-vx-z]/i && !place_guess.scan(COORDINATE_REGEX).blank? end def obscured_place_guess return place_guess if place_guess.blank? return nil if lat_lon_in_place_guess? place_guess.sub(/^[\d\-]+\s+/, '') end def unobscure_coordinates return unless coordinates_obscured? return unless geoprivacy.blank? self.latitude = private_latitude self.longitude = private_longitude self.private_latitude = nil self.private_longitude = nil end def iconic_taxon_name Taxon::ICONIC_TAXA_BY_ID[iconic_taxon_id].try(:name) end def self.obscure_coordinates_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon Observation.find_observations_of(taxon) do |o| o.obscure_coordinates Observation.update_all({ :place_guess => o.place_guess, :latitude => o.latitude, :longitude => o.longitude, :private_latitude => o.private_latitude, :private_longitude => o.private_longitude, }, {:id => o.id}) end end def self.unobscure_coordinates_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon Observation.find_observations_of(taxon) do |o| o.unobscure_coordinates Observation.update_all({ :latitude => o.latitude, :longitude => o.longitude, :private_latitude => o.private_latitude, :private_longitude => o.private_longitude, }, {:id => o.id}) end end def self.find_observations_of(taxon) options = { :include => :taxon, :conditions => [ "observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon ] } Observation.find_each(options) do |o| yield(o) end end ##### Validations ######################################################### # # Make sure the observation is not in the future. # def must_be_in_the_past unless observed_on.nil? || observed_on <= Date.today errors.add(:observed_on, "can't be in the future") end true end # # Make sure the observation resolves to a single day. Right now we don't # store ambiguity... # def must_not_be_a_range return if observed_on_string.blank? is_a_range = false begin if tspan = Chronic.parse(observed_on_string, :context => :past, :guess => false) is_a_range = true if tspan.width.seconds > 1.day.seconds end rescue RuntimeError errors.add(:observed_on, "was not recognized, some working examples are: yesterday, 3 years " + "ago, 5/27/1979, 1979-05-27 05:00. " + "(<a href='http://chronic.rubyforge.org/'>others</a>)" ) return end # Special case: dates like '2004', which ordinarily resolve to today at # 8:04pm observed_on_int = observed_on_string.gsub(/[^\d]/, '').to_i if observed_on_int > 1900 && observed_on_int <= Date.today.year is_a_range = true end if is_a_range errors.add(:observed_on, "must be a single day, not a range") end end def set_taxon_from_taxon_name return true if @taxon_name.blank? return true if taxon_id self.taxon_id = single_taxon_id_for_name(@taxon_name) true end def set_taxon_from_species_guess return true unless species_guess_changed? && taxon_id.blank? return true if species_guess.blank? self.taxon_id = single_taxon_id_for_name(species_guess) true end def single_taxon_for_name(name) Taxon.single_taxon_for_name(name) end def single_taxon_id_for_name(name) Taxon.single_taxon_for_name(name).try(:id) end def set_latlon_from_place_guess return true unless latitude.blank? && longitude.blank? return true if place_guess.blank? return true if place_guess =~ /[a-cf-mo-rt-vx-z]/i # ignore anything with word chars other than NSEW return true unless place_guess.strip =~ /[.+,\s.+]/ # ignore anything without a legit separator matches = place_guess.strip.scan(COORDINATE_REGEX).flatten return true if matches.blank? case matches.size when 2 # decimal degrees self.latitude, self.longitude = matches when 4 # decimal minutes self.latitude = matches[0].to_i + matches[1].to_f/60.0 self.longitude = matches[3].to_i + matches[4].to_f/60.0 when 6 # degrees / minutes / seconds self.latitude = matches[0].to_i + matches[1].to_i/60.0 + matches[2].to_f/60/60 self.longitude = matches[3].to_i + matches[4].to_i/60.0 + matches[5].to_f/60/60 end self.latitude *= -1 if latitude.to_f > 0 && place_guess =~ /s/i self.longitude *= -1 if longitude.to_f > 0 && place_guess =~ /w/i true end def set_geom_from_latlon if longitude.blank? || latitude.blank? self.geom = nil elsif longitude_changed? || latitude_changed? self.geom = Point.from_x_y(longitude, latitude) end true end def set_license return true if license_changed? && license.blank? self.license ||= user.preferred_observation_license self.license = nil unless LICENSE_CODES.include?(license) true end def update_out_of_range_later if taxon_id_changed? && taxon.blank? update_out_of_range elsif latitude_changed? || private_latitude_changed? || taxon_id_changed? send_later(:update_out_of_range) end true end def update_out_of_range set_out_of_range Observation.update_all(["out_of_range = ?", out_of_range], ["id = ?", id]) end def set_out_of_range if taxon_id.blank? || !georeferenced? || !TaxonRange.exists?(["taxon_id = ?", taxon_id]) self.out_of_range = nil return end # buffer the point to accomodate simplified or slightly inaccurate ranges buffer_degrees = OUT_OF_RANGE_BUFFER / (2*Math::PI*Observation::PLANETARY_RADIUS) * 360.0 self.out_of_range = if coordinates_obscured? TaxonRange.exists?([ "taxon_ranges.taxon_id = ? AND ST_Distance(taxon_ranges.geom, ST_Point(?,?)) > ?", taxon_id, private_longitude, private_latitude, buffer_degrees ]) else TaxonRange.count( :from => "taxon_ranges, observations", :conditions => [ "taxon_ranges.taxon_id = ? AND observations.id = ? AND ST_Distance(taxon_ranges.geom, observations.geom) > ?", taxon_id, id, buffer_degrees] ) > 0 end end def update_default_license return true unless [true, "1", "true"].include?(@make_license_default) user.update_attribute(:preferred_observation_license, license) true end def update_all_licenses return true unless [true, "1", "true"].include?(@make_licenses_same) Observation.update_all(["license = ?", license], ["user_id = ?", user_id]) true end def update_attributes(attributes) # hack around a weird android bug, should be removeable after any release post-March 2012 attributes.delete(:iconic_taxon_name) MASS_ASSIGNABLE_ATTRIBUTES.each do |a| self.send("#{a}=", attributes.delete(a.to_s)) if attributes.has_key?(a.to_s) self.send("#{a}=", attributes.delete(a)) if attributes.has_key?(a) end super(attributes) end def license_name return nil if license.blank? s = "Creative Commons " s += LICENSES.detect{|row| row.first == license}.try(:[], 1).to_s s end # I'm not psyched about having this stuff here, but it makes generating # more compact JSON a lot easier. include ObservationsHelper include ActionView::Helpers::SanitizeHelper include ActionView::Helpers::TextHelper include ActionController::UrlWriter def image_url observation_image_url(self) end def obs_image_url image_url end def short_description short_observation_description(self) end def scientific_name taxon.scientific_name.name if taxon && taxon.scientific_name end def common_name taxon.common_name.name if taxon && taxon.common_name end def url observation_url(self, ActionMailer::Base.default_url_options) end def user_login user.login end def update_stats if taxon_id.blank? num_agreements = 0 num_disagreements = 0 else idents = identifications.all(:include => [:observation, :taxon]) num_agreements = idents.select(&:is_agreement?).size num_disagreements = idents.select(&:is_disagreement?).size end # Kinda lame, but Observation#get_quality_grade relies on these numbers self.num_identification_agreements = num_agreements self.num_identification_disagreements = num_disagreements new_quality_grade = get_quality_grade self.quality_grade = new_quality_grade Observation.update_all( ["num_identification_agreements = ?, num_identification_disagreements = ?, quality_grade = ?", num_agreements, num_disagreements, new_quality_grade], "id = #{id}") refresh_check_lists end def random_neighbor_lat_lon(lat, lon, max_distance, radius = PLANETARY_RADIUS) latrads = lat.to_f / DEGREES_PER_RADIAN lonrads = lon.to_f / DEGREES_PER_RADIAN max_distance = max_distance / radius random_distance = Math.acos(rand * (Math.cos(max_distance) - 1) + 1) random_bearing = 2 * Math::PI * rand new_latrads = Math.asin( Math.sin(latrads)*Math.cos(random_distance) + Math.cos(latrads)*Math.sin(random_distance)*Math.cos(random_bearing) ) new_lonrads = lonrads + Math.atan2( Math.sin(random_bearing)*Math.sin(random_distance)*Math.cos(latrads), Math.cos(random_distance)-Math.sin(latrads)*Math.sin(latrads) ) [new_latrads * DEGREES_PER_RADIAN, new_lonrads * DEGREES_PER_RADIAN] end def places return nil unless georeferenced? Place.containing_lat_lng( private_latitude || latitude, private_longitude || longitude).sort_by(&:bbox_area) end def mobile? return false unless user_agent MOBILE_APP_USER_AGENT_PATTERNS.each do |pattern| return true if user_agent =~ pattern end false end def device_name return "unknown" unless user_agent if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "iNaturalist Android App" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "iNaturalist iPhone App" else "web browser" end end def device_url return unless user_agent if user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/inaturalist/id421397028?mt=8" elsif user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "https://market.android.com/details?id=org.inaturalist.android" else "/" end end def owners_identification if identifications.loaded? identifications.detect {|ident| ident.user_id == user_id} else identifications.first(:conditions => {:user_id => user_id}) end end # Required for use of the sanitize method in # ObservationsHelper#short_observation_description def self.white_list_sanitizer @white_list_sanitizer ||= HTML::WhiteListSanitizer.new end def self.expire_components_for(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) Observation.of(taxon).find_each do |o| ctrl = ActionController::Base.new ctrl.expire_fragment(o.component_cache_key) ctrl.expire_fragment(o.component_cache_key(:for_owner => true)) end end end Made observed_on_date parsing more accomodating for JS-style dates. class Observation < ActiveRecord::Base acts_as_activity_streamable :batch_window => 30.minutes, :batch_partial => "observations/activity_stream_batch" acts_as_taggable acts_as_flaggable include Ambidextrous # Set to true if you want to skip the expensive updating of all the user's # lists after saving. Useful if you're saving many observations at once and # you want to update lists in a batch attr_accessor :skip_refresh_lists, :skip_identifications # Set if you need to set the taxon from a name separate from the species # guess attr_accessor :taxon_name # licensing extras attr_accessor :make_license_default attr_accessor :make_licenses_same MASS_ASSIGNABLE_ATTRIBUTES = [:make_license_default, :make_licenses_same] M_TO_OBSCURE_THREATENED_TAXA = 10000 OUT_OF_RANGE_BUFFER = 5000 # meters PLANETARY_RADIUS = 6370997.0 DEGREES_PER_RADIAN = 57.2958 FLOAT_REGEX = /[-+]?[0-9]*\.?[0-9]+/ COORDINATE_REGEX = /[^\d\,]*?(#{FLOAT_REGEX})[^\d\,]*?/ LAT_LON_SEPARATOR_REGEX = /[\,\s]\s*/ LAT_LON_REGEX = /#{COORDINATE_REGEX}#{LAT_LON_SEPARATOR_REGEX}#{COORDINATE_REGEX}/ PRIVATE = "private" OBSCURED = "obscured" GEOPRIVACIES = [OBSCURED, PRIVATE] GEOPRIVACY_DESCRIPTIONS = { nil => "Everyone can see the coordinates unless the taxon is threatened.", OBSCURED => "Public coordinates shown as a random point within " + "#{M_TO_OBSCURE_THREATENED_TAXA / 1000}KM of the true coordinates. " + "True coordinates are only visible to you and the curators of projects " + "to which you add the observation.", PRIVATE => "Coordinates completely hidden from public maps, true " + "coordinates only visible to you and the curators of projects to " + "which you add the observation.", } CASUAL_GRADE = "casual" RESEARCH_GRADE = "research" QUALITY_GRADES = [CASUAL_GRADE, RESEARCH_GRADE] LICENSES = [ ["CC-BY", "Attribution", "This license lets others distribute, remix, tweak, and build upon your work, even commercially, as long as they credit you for the original creation. This is the most accommodating of licenses offered. Recommended for maximum dissemination and use of licensed materials."], ["CC-BY-NC", "Attribution-NonCommercial", "This license lets others remix, tweak, and build upon your work non-commercially, and although their new works must also acknowledge you and be non-commercial, they don’t have to license their derivative works on the same terms."], ["CC-BY-SA", "Attribution-ShareAlike", "This license lets others remix, tweak, and build upon your work even for commercial purposes, as long as they credit you and license their new creations under the identical terms. All new works based on yours will carry the same license, so any derivatives will also allow commercial use."], ["CC-BY-ND", "Attribution-NoDerivs", "This license allows for redistribution, commercial and non-commercial, as long as it is passed along unchanged and in whole, with credit to you."], ["CC-BY-NC-SA", "Attribution-NonCommercial-ShareAlike", "This license lets others remix, tweak, and build upon your work non-commercially, as long as they credit you and license their new creations under the identical terms."], ["CC-BY-NC-ND", "Attribution-NonCommercial-NoDerivs", "This license is the most restrictive of the six main licenses, only allowing others to download your works and share them with others as long as they credit you, but they can’t change them in any way or use them commercially."] ] LICENSE_CODES = LICENSES.map{|row| row.first} LICENSES.each do |code, name, description| const_set code.gsub(/\-/, '_'), code end PREFERRED_LICENSES = [CC_BY, CC_BY_NC] belongs_to :user, :counter_cache => true belongs_to :taxon, :counter_cache => true belongs_to :iconic_taxon, :class_name => 'Taxon', :foreign_key => 'iconic_taxon_id' has_many :observation_photos, :dependent => :destroy, :order => "id asc" has_many :photos, :through => :observation_photos # note last_observation and first_observation on listed taxa will get reset # by CheckList.refresh_with_observation has_many :listed_taxa, :foreign_key => 'last_observation_id' has_many :goal_contributions, :as => :contribution, :dependent => :destroy has_many :comments, :as => :parent, :dependent => :destroy has_many :identifications, :dependent => :delete_all has_many :project_observations, :dependent => :destroy has_many :project_invitations, :dependent => :destroy has_many :projects, :through => :project_observations has_many :quality_metrics, :dependent => :destroy has_many :observation_field_values, :dependent => :destroy, :order => "id asc" has_many :observation_fields, :through => :observation_field_values define_index do indexes taxon.taxon_names.name, :as => :names indexes tags.name, :as => :tags indexes :species_guess, :sortable => true, :as => :species_guess indexes :description, :as => :description indexes :place_guess, :as => :place, :sortable => true indexes user.login, :as => :user, :sortable => true indexes :observed_on_string, :as => :observed_on_string has :user_id has :taxon_id # Sadly, the following doesn't work, because self_and_ancestors is not an # association. I'm not entirely sure if there's a way to work the ancestry # query in as col in a SQL query on observations. If at some point we # need to have the ancestor ids in the Sphinx index, though, we can always # add a col to the taxa table holding the ancestor IDs. Kind of a # redundant, and it would slow down moves, but it might be worth it for # the snappy searches. --KMU 2009-04-4 # has taxon.self_and_ancestors(:id), :as => :taxon_self_and_ancestors_ids has photos(:id), :as => :has_photos, :type => :boolean has :created_at, :sortable => true has :observed_on, :sortable => true has :iconic_taxon_id has :id_please, :as => :has_id_please has "latitude IS NOT NULL AND longitude IS NOT NULL", :as => :has_geo, :type => :boolean has 'RADIANS(latitude)', :as => :latitude, :type => :float has 'RADIANS(longitude)', :as => :longitude, :type => :float # HACK: TS doesn't seem to include attributes in the GROUP BY correctly # for Postgres when using custom SQL attr definitions. It may or may not # be fixed in more up-to-date versions, but the issue has been raised: # http://groups.google.com/group/thinking-sphinx/browse_thread/thread/e8397477b201d1e4 has :latitude, :as => :fake_latitude has :longitude, :as => :fake_longitude has :num_identification_agreements has :num_identification_disagreements # END HACK has "num_identification_agreements > num_identification_disagreements", :as => :identifications_most_agree, :type => :boolean has "num_identification_agreements > 0", :as => :identifications_some_agree, :type => :boolean has "num_identification_agreements < num_identification_disagreements", :as => :identifications_most_disagree, :type => :boolean has project_observations(:project_id), :as => :projects, :type => :multi set_property :delta => :delayed end SPHINX_FIELD_NAMES = %w(names tags species_guess description place user observed_on_string) SPHINX_ATTRIBUTE_NAMES = %w(user_id taxon_id has_photos created_at observed_on iconic_taxon_id id_please has_geo latitude longitude fake_latitude fake_longitude num_identification_agreements num_identification_disagreements identifications_most_agree identifications_some_agree identifications_most_disagree projects) accepts_nested_attributes_for :observation_field_values, :allow_destroy => true, :reject_if => lambda { |attrs| attrs[:value].blank? } ## # Validations # validates_presence_of :user_id validate :must_be_in_the_past, :must_not_be_a_range validates_numericality_of :latitude, :allow_blank => true, :less_than_or_equal_to => 90, :greater_than_or_equal_to => -90 validates_numericality_of :longitude, :allow_blank => true, :less_than_or_equal_to => 180, :greater_than_or_equal_to => -180 before_validation :munge_observed_on_with_chronic, :set_time_zone, :set_time_in_time_zone, :cast_lat_lon before_save :strip_species_guess, :set_taxon_from_species_guess, :set_taxon_from_taxon_name, :set_iconic_taxon, :keep_old_taxon_id, :set_latlon_from_place_guess, :reset_private_coordinates_if_coordinates_changed, :obscure_coordinates_for_geoprivacy, :obscure_coordinates_for_threatened_taxa, :set_geom_from_latlon, :set_license before_update :set_quality_grade after_save :refresh_lists, :update_identifications_after_save, :refresh_check_lists, :update_out_of_range_later, :update_default_license, :update_all_licenses before_destroy :keep_old_taxon_id after_destroy :refresh_lists_after_destroy, :refresh_check_lists # Activity updates # after_save :update_activity_update # before_destroy :delete_activity_update ## # Named scopes # # Area scopes named_scope :in_bounding_box, lambda { |swlat, swlng, nelat, nelng| if swlng.to_f > 0 && nelng.to_f < 0 {:conditions => ['latitude > ? AND latitude < ? AND (longitude > ? OR longitude < ?)', swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f]} else {:conditions => ['latitude > ? AND latitude < ? AND longitude > ? AND longitude < ?', swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f]} end } do def distinct_taxon all(:group => "taxon_id", :conditions => "taxon_id IS NOT NULL", :include => :taxon) end end named_scope :in_place, lambda {|place| place_id = place.is_a?(Place) ? place.id : place.to_i { :joins => "JOIN place_geometries ON place_geometries.place_id = #{place_id}", :conditions => [ "(observations.private_latitude IS NULL AND ST_Intersects(place_geometries.geom, observations.geom)) OR " + "(observations.private_latitude IS NOT NULL AND ST_Intersects(place_geometries.geom, ST_Point(observations.private_longitude, observations.private_latitude)))" ] } } # possibly radius in kilometers named_scope :near_point, Proc.new { |lat, lng, radius| lat = lat.to_f lng = lng.to_f radius = radius.to_f radius = 10.0 if radius == 0 planetary_radius = PLANETARY_RADIUS / 1000 # km radius_degrees = radius / (2*Math::PI*planetary_radius) * 360.0 {:conditions => ["ST_Distance(ST_Point(?,?), geom) <= ?", lng.to_f, lat.to_f, radius_degrees]} # # The following attempts to utilize the spatial index by restricting to a # # bounding box. It doesn't seem to be a speed improvement given the # # current number of obs, but maybe later... Note that it's messed up # # around the poles # box_xmin = lng - radius_degrees # box_ymin = lat - radius_degrees # box_xmax = lng + radius_degrees # box_ymax = lat + radius_degrees # box_xmin = 180 - (box_xmin * -1 - 180) if box_xmin < -180 # box_ymin = -90 if box_ymin < -90 # box_xmax = -180 + box_max - 180 if box_xmax > 180 # box_ymax = 90 if box_ymin > 90 # # {:conditions => [ # "geom && 'BOX3D(? ?, ? ?)'::box3d AND ST_Distance(ST_Point(?,?), geom) <= ?", # box_xmin, box_ymin, box_xmax, box_ymax, # lng.to_f, lat.to_f, radius_degrees]} } # Has_property scopes named_scope :has_taxon, lambda { |taxon_id| if taxon_id.nil? then return {:conditions => "taxon_id IS NOT NULL"} else {:conditions => ["taxon_id IN (?)", taxon_id]} end } named_scope :has_iconic_taxa, lambda { |iconic_taxon_ids| iconic_taxon_ids = [iconic_taxon_ids].flatten # make array if single if iconic_taxon_ids.include?(nil) {:conditions => [ "observations.iconic_taxon_id IS NULL OR observations.iconic_taxon_id IN (?)", iconic_taxon_ids]} elsif !iconic_taxon_ids.empty? {:conditions => [ "observations.iconic_taxon_id IN (?)", iconic_taxon_ids]} end } named_scope :has_geo, :conditions => ["latitude IS NOT NULL AND longitude IS NOT NULL"] named_scope :has_id_please, :conditions => ["id_please IS TRUE"] named_scope :has_photos, :select => "DISTINCT observations.*", :joins => "JOIN observation_photos AS _op ON _op.observation_id = observations.id ", :conditions => ['_op.id IS NOT NULL'] named_scope :has_quality_grade, lambda {|quality_grade| quality_grade = '' unless QUALITY_GRADES.include?(quality_grade) {:conditions => ["quality_grade = ?", quality_grade]} } # Find observations by a taxon object. Querying on taxa columns forces # massive joins, it's a bit sluggish named_scope :of, lambda { |taxon| taxon = Taxon.find_by_id(taxon.to_i) unless taxon.is_a? Taxon return {:conditions => "1 = 2"} unless taxon { :joins => :taxon, :conditions => [ "observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon ] } } named_scope :at_or_below_rank, lambda {|rank| rank_level = Taxon::RANK_LEVELS[rank] {:joins => [:taxon], :conditions => ["taxa.rank_level <= ?", rank_level]} } # Find observations by user named_scope :by, lambda { |user| {:conditions => ["observations.user_id = ?", user]} } # Order observations by date and time observed named_scope :latest, :order => "observed_on DESC NULLS LAST, time_observed_at DESC NULLS LAST" named_scope :recently_added, :order => "observations.id DESC" # TODO: Make this work for any SQL order statement, including multiple cols named_scope :order_by, lambda { |order| pieces = order.split order_by = pieces[0] order = pieces[1] || 'ASC' extra = [pieces[2..-1]].flatten.join(' ') extra = "NULLS LAST" if extra.blank? options = {} case order_by when 'observed_on' options[:order] = "observed_on #{order} #{extra}, " + "time_observed_at #{order} #{extra}" when 'user' options[:include] = [:user] options[:order] = "users.login #{order} #{extra}" when 'place' options[:order] = "place_guess #{order} #{extra}" when 'created_at' options[:order] = "observations.created_at #{order} #{extra}" else options[:order] = "#{order_by} #{order} #{extra}" end options } named_scope :identifications, lambda { |agreement| limited_scope = {:include => :identifications} case agreement when 'most_agree' limited_scope[:conditions] = "num_identification_agreements > num_identification_disagreements" when 'some_agree' limited_scope[:conditions] = "num_identification_agreements > 0" when 'most_disagree' limited_scope[:conditions] = "num_identification_agreements < num_identification_disagreements" end limited_scope } # Time based named scopes named_scope :created_after, lambda { |time| {:conditions => ['created_at >= ?', time]} } named_scope :created_before, lambda { |time| {:conditions => ['created_at <= ?', time]} } named_scope :updated_after, lambda { |time| {:conditions => ['updated_at >= ?', time]} } named_scope :updated_before, lambda { |time| {:conditions => ['updated_at <= ?', time]} } named_scope :observed_after, lambda { |time| {:conditions => ['time_observed_at >= ?', time]} } named_scope :observed_before, lambda { |time| {:conditions => ['time_observed_at <= ?', time]} } named_scope :in_month, lambda {|month| {:conditions => ["EXTRACT(MONTH FROM observed_on) = ?", month]} } named_scope :in_projects, lambda { |projects| projects = projects.split(',') if projects.is_a?(String) # NOTE using :include seems to trigger an erroneous eager load of # observations that screws up sorting kueda 2011-07-22 { :joins => [:project_observations], :conditions => ["project_observations.project_id IN (?)", projects] } } named_scope :on, lambda {|date| {:conditions => Observation.conditions_for_date(:observed_on, date)} } named_scope :created_on, lambda {|date| {:conditions => Observation.conditions_for_date("observations.created_at", date)} } named_scope :out_of_range, :conditions => {:out_of_range => true} named_scope :in_range, :conditions => {:out_of_range => false} def self.conditions_for_date(column, date) year, month, day = date.to_s.split('-').map do |d| d = d.blank? ? nil : d.to_i d == 0 ? nil : d end if date.to_s =~ /^\d{4}/ && year && month && day ["#{column}::DATE = ?", "#{year}-#{month}-#{day}"] elsif year || month || day conditions, values = [[],[]] if year conditions << "EXTRACT(YEAR FROM #{column}) = ?" values << year end if month conditions << "EXTRACT(MONTH FROM #{column}) = ?" values << month end if day conditions << "EXTRACT(DAY FROM #{column}) = ?" values << day end [conditions.join(' AND '), *values] else ["1 = 2"] end end def self.near_place(place) place = Place.find_by_id(place) unless place.is_a?(Place) if place.swlat Observation.in_bounding_box(place.swlat, place.swlng, place.nelat, place.nelng).scoped({}) else Observation.near_point(place.latitude, place.longitude).scoped({}) end end # # Uses scopes to perform a conditional search. # May be worth looking into squirrel or some other rails friendly search add on # def self.query(params = {}) scope = self.scoped({}) # support bounding box queries if (!params[:swlat].blank? && !params[:swlng].blank? && !params[:nelat].blank? && !params[:nelng].blank?) scope = scope.in_bounding_box(params[:swlat], params[:swlng], params[:nelat], params[:nelng]) elsif params[:lat] && params[:lng] scope = scope.near_point(params[:lat], params[:lng], params[:radius]) end # has (boolean) selectors if params[:has] params[:has] = params[:has].split(',') if params[:has].is_a? String params[:has].each do |prop| scope = case prop when 'geo' then scope.has_geo when 'id_please' then scope.has_id_please when 'photos' then scope.has_photos else scope.conditions "? IS NOT NULL OR ? != ''", prop, prop # hmmm... this seems less than ideal end end end scope = scope.identifications(params[:identifications]) if (params[:identifications]) scope = scope.has_iconic_taxa(params[:iconic_taxa]) if params[:iconic_taxa] scope = scope.order_by(params[:order_by]) if params[:order_by] scope = scope.has_quality_grade( params[:quality_grade]) if QUALITY_GRADES.include?(params[:quality_grade]) if taxon = params[:taxon] scope = scope.of(taxon.is_a?(Taxon) ? taxon : taxon.to_i) elsif !params[:taxon_id].blank? scope = scope.of(params[:taxon_id].to_i) elsif !params[:taxon_name].blank? taxon_name = TaxonName.find_single(params[:taxon_name], :iconic_taxa => params[:iconic_taxa]) scope = scope.of(taxon_name.try(:taxon)) end scope = scope.by(params[:user_id]) if params[:user_id] scope = scope.in_projects(params[:projects]) if params[:projects] scope = scope.in_place(params[:place_id]) if params[:place_id] scope = scope.on(params[:on]) if params[:on] scope = scope.created_on(params[:created_on]) if params[:created_on] scope = scope.out_of_range if params[:out_of_range] == 'true' scope = scope.in_range if params[:out_of_range] == 'false' # return the scope, we can use this for will_paginate calls like: # Observation.query(params).paginate() scope end # help_txt_for :species_guess, <<-DESC # Type a name for what you saw. It can be common or scientific, accurate # or just a placeholder. When you enter it, we'll try to look it up and find # the matching species of higher level taxon. # DESC # # instruction_for :place_guess, "Type the name of a place" # help_txt_for :place_guess, <<-DESC # Enter the name of a place and we'll try to find where it is. If we find # it, you can drag the map marker around to get more specific. # DESC def to_s "<Observation #{self.id}: #{to_plain_s}>" end def to_plain_s(options = {}) s = self.species_guess.blank? ? 'something' : self.species_guess if options[:verb] s += options[:verb] == true ? " observed" : " #{options[:verb]}" end unless self.place_guess.blank? || options[:no_place_guess] s += " in #{self.place_guess}" end s += " on #{self.observed_on.to_s(:long)}" unless self.observed_on.blank? unless self.time_observed_at.blank? || options[:no_time] s += " at #{self.time_observed_at_in_zone.to_s(:plain_time)}" end s += " by #{self.user.try(:login)}" unless options[:no_user] s end def time_observed_at_utc time_observed_at.try(:utc) end def to_json(options = {}) # don't use delete here, it will just remove the option for all # subsequent records in an array options[:methods] ||= [] options[:methods] << :time_observed_at_utc viewer = options[:viewer] viewer_id = viewer.is_a?(User) ? viewer.id : viewer.to_i options[:except] ||= [] options[:except] += [:user_agent] if viewer_id != user_id && !options[:force_coordinate_visibility] options[:except] ||= [] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom] options[:except].uniq! options[:methods] << :coordinates_obscured options[:methods].uniq! end super(options).gsub(/<script.*script>/i, "") end def to_xml(options = {}) options[:except] ||= [] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom] super(options) end # # Return a time from observed_on and time_observed_at # def datetime if observed_on && errors.on(:observed_on).blank? if time_observed_at Time.mktime(observed_on.year, observed_on.month, observed_on.day, time_observed_at.hour, time_observed_at.min, time_observed_at.sec, time_observed_at.zone) else Time.mktime(observed_on.year, observed_on.month, observed_on.day) end end end # Return time_observed_at in the observation's time zone def time_observed_at_in_zone self.time_observed_at.in_time_zone(self.time_zone) end # # Set all the time fields based on the contents of observed_on_string # def munge_observed_on_with_chronic if observed_on_string.blank? self.observed_on = nil self.time_observed_at = nil return true end date_string = observed_on_string.strip tz_abbrev_pattern = /\s\(?([A-Z]{3,})\)?$/ tz_offset_pattern = /([+-]\d{4})$/ tz_js_offset_pattern = /(GMT)?[+-]\d{4}/ if parsed_time_zone = ActiveSupport::TimeZone::CODES[date_string[tz_abbrev_pattern, 1]] date_string = observed_on_string.sub(tz_abbrev_pattern, '') date_string = date_string.sub(tz_js_offset_pattern, '').strip self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_offset_pattern, 1]) && (parsed_time_zone = ActiveSupport::TimeZone[offset.to_f / 100]) date_string = observed_on_string.sub(tz_offset_pattern, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? end date_string.sub!('T', ' ') if date_string =~ /\d{4}-\d{2}-\d{2}T/ date_string.sub!(/(\d{2}:\d{2}:\d{2})\.\d+/, '\\1') # strip leading month if present date_string.sub!(/^[A-z]{3} ([A-z]{3})/, '\\1') # Set the time zone appropriately old_time_zone = Time.zone Time.zone = time_zone || user.try(:time_zone) Chronic.time_class = Time.zone begin # Start parsing... return true unless t = Chronic.parse(date_string) # Re-interpret future dates as being in the past if t > Time.now t = Chronic.parse(date_string, :context => :past) end self.observed_on = t.to_date # try to determine if the user specified a time by ask Chronic to return # a time range. Time ranges less than a day probably specified a time. if tspan = Chronic.parse(date_string, :context => :past, :guess => false) # If tspan is less than a day and the string wasn't 'today', set time if tspan.width < 86400 && date_string.strip.downcase != 'today' self.time_observed_at = t else self.time_observed_at = nil end end rescue RuntimeError errors.add(:observed_on, "was not recognized, some working examples are: yesterday, 3 years " + "ago, 5/27/1979, 1979-05-27 05:00. " + "(<a href='http://chronic.rubyforge.org/'>others</a>)") return end # don't store relative observed_on_strings, or they will change # every time you save an observation! if date_string =~ /today|yesterday|ago|last|this|now|monday|tuesday|wednesday|thursday|friday|saturday|sunday/i self.observed_on_string = self.observed_on.to_s if self.time_observed_at self.observed_on_string = self.time_observed_at.strftime("%Y-%m-%d %H:%M:%S") end end # Set the time zone back the way it was Time.zone = old_time_zone true end # # Adds, updates, or destroys the identification corresponding to the taxon # the user selected. # def update_identifications_after_save return true if @skip_identifications return true unless taxon_id_changed? owners_ident = identifications.first(:conditions => {:user_id => self.user_id}) owners_ident.skip_observation = true if owners_ident # If there's a taxon we need to make ure the owner's ident agrees if taxon # If the owner doesn't have an identification for this obs, make one unless owners_ident owners_ident = identifications.build(:user => user, :taxon => taxon) owners_ident.skip_observation = true owners_ident.skip_update = true owners_ident.save end # If the obs taxon and the owner's ident don't agree, make them if owners_ident.taxon_id != taxon_id owners_ident.update_attributes(:taxon_id => taxon_id) end # If there's no taxon, we should destroy the owner's ident elsif owners_ident owners_ident.destroy end true end # # Update the user's lists with changes to this observation's taxon # # If the observation is the last_observation in any of the user's lists, # then the last_observation should be reset to another observation. # def refresh_lists return true if @skip_refresh_lists return true unless taxon_id_changed? # Update the observation's current taxon and/or a previous one that was # just removed/changed target_taxa = [ taxon, Taxon.find_by_id(@old_observation_taxon_id) ].compact.uniq # Don't refresh all the lists if nothing changed return if target_taxa.empty? # project_observations.each do |po| # Project.send_later(:refresh_project_list, po.project_id, # :taxa => target_taxa.map(&:id), :add_new_taxa => true) # end List.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) ProjectList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) # ProjectList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :skip_update => true) # Reset the instance var so it doesn't linger around @old_observation_taxon_id = nil true end def refresh_check_lists refresh_needed = (georeferenced? || was_georeferenced?) && (taxon_id || taxon_id_was) && (quality_grade_changed? || taxon_id_changed? || latitude_changed? || longitude_changed? || observed_on_changed?) return true unless refresh_needed CheckList.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_changed? ? taxon_id_was : nil, :latitude_was => (latitude_changed? || longitude_changed?) ? latitude_was : nil, :longitude_was => (latitude_changed? || longitude_changed?) ? longitude_was : nil, :new => id_was.blank?, :skip_update => true, :dj_priority => 1) true end # Because it has to be slightly different, in that the taxon of a destroyed # obs shouldn't be removed by default from life lists (maybe you've seen it # in the past, but you don't have any other obs), but those listed_taxa of # this taxon should have their last_observation reset. # def refresh_lists_after_destroy return if @skip_refresh_lists return unless taxon List.send_later(:refresh_with_observation, id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :dj_priority => 1) true end # # Preserve the old taxon id if the taxon has changed so we know to update # that taxon in the user's lists after_save # def keep_old_taxon_id @old_observation_taxon_id = taxon_id_was if taxon_id_changed? end # # This is the hook used to check each observation to see if it may apply # to a system based goal. It does so by collecting all of the user's # current goals, including global goals and checking to see if the # observation passes each rule established by the goal. If it does, the # goal is recorded as a contribution in the goal_contributions table. # def update_goal_contributions user.goal_participants_for_incomplete_goals.each do |participant| participant.goal.validate_and_add_contribution(self, participant) end true end # # Remove any instructional text that may have been submitted with the form. # def scrub_instructions_before_save self.attributes.each do |attr_name, value| if Observation.instructions[attr_name.to_sym] and value and Observation.instructions[attr_name.to_sym] == value write_attribute(attr_name.to_sym, nil) end end end # # Set the iconic taxon if it hasn't been set # def set_iconic_taxon return unless self.taxon_id_changed? if taxon self.iconic_taxon_id ||= taxon.iconic_taxon_id else self.iconic_taxon_id = nil end end # # Trim whitespace around species guess # def strip_species_guess self.species_guess.strip! unless species_guess.nil? true end # # Set the time_zone of this observation if not already set # def set_time_zone self.time_zone = nil if time_zone.blank? self.time_zone ||= user.time_zone if user && !user.time_zone.blank? self.time_zone ||= Time.zone.try(:name) unless time_observed_at.blank? self.time_zone ||= 'UTC' true end # # Cast lat and lon so they will (hopefully) pass the numericallity test # def cast_lat_lon # self.latitude = latitude.to_f unless latitude.blank? # self.longitude = longitude.to_f unless longitude.blank? true end # # Force time_observed_at into the time zone # def set_time_in_time_zone return if time_observed_at.blank? || time_zone.blank? return unless time_observed_at_changed? || time_zone_changed? # Render the time as a string time_s = time_observed_at_before_type_cast unless time_s.is_a? String time_s = time_observed_at_before_type_cast.strftime("%Y-%m-%d %H:%M:%S") end # Get the time zone offset as a string and append it offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false) time_s += " #{offset_s}" self.time_observed_at = Time.parse(time_s) end def lsid "lsid:inaturalist.org:observations:#{id}" end def component_cache_key(options = {}) Observation.component_cache_key(id, options) end def self.component_cache_key(id, options = {}) key = "obs_comp_#{id}" key += "_"+options.map{|k,v| "#{k}-#{v}"}.join('_') unless options.blank? key end def num_identifications_by_others identifications.select{|i| i.user_id != user_id}.size end ##### Rules ############################################################### # # This section contains all of the rules that can be used for list creation # or goal completion class << self # this just prevents me from having to write def self.* # Written for the Goals framework. # Accepts two parameters, the first is 'thing' from GoalRule, # the second is an array created when the GoalRule splits on pipes "|" def within_the_first_n_contributions?(observation, args) return false unless observation.instance_of? self return true if count <= args[0].to_i find(:all, :select => "id", :order => "created_at ASC", :limit => args[0]).include?(observation) end end # # Checks whether this observation has been flagged # def flagged? self.flags.select { |f| not f.resolved? }.size > 0 end def georeferenced? (latitude? && longitude?) || (private_latitude? && private_longitude?) end def was_georeferenced? (latitude_was && longitude_was) || (private_latitude_was && private_longitude_was) end def quality_metric_score(metric) quality_metrics.all unless quality_metrics.loaded? metrics = quality_metrics.select{|qm| qm.metric == metric} return nil if metrics.blank? metrics.select{|qm| qm.agree?}.size.to_f / metrics.size end def community_supported_id? num_identification_agreements.to_i > 0 && num_identification_agreements > num_identification_disagreements end def quality_metrics_pass? QualityMetric::METRICS.each do |metric| score = quality_metric_score(metric) return false if score && score < 0.5 end true end def research_grade? georeferenced? && community_supported_id? && quality_metrics_pass? && observed_on? && photos? end def photos? observation_photos.exists? end def casual_grade? !research_grade? end def set_quality_grade(options = {}) if options[:force] || quality_grade_changed? || latitude_changed? || longitude_changed? || observed_on_changed? || taxon_id_changed? self.quality_grade = get_quality_grade end true end def get_quality_grade research_grade? ? RESEARCH_GRADE : CASUAL_GRADE end def coordinates_obscured? !private_latitude.blank? || !private_longitude.blank? end alias :coordinates_obscured :coordinates_obscured? def geoprivacy_private? geoprivacy == PRIVATE end def geoprivacy_obscured? geoprivacy == OBSCURED end def coordinates_viewable_by?(user) return true unless coordinates_obscured? user = User.find_by_id(user) unless user.is_a?(User) return false unless user return true if user_id == user.id return true if user.project_users.curators.exists?(["project_id IN (?)", project_ids]) false end def reset_private_coordinates_if_coordinates_changed if (latitude_changed? || longitude_changed?) self.private_latitude = nil self.private_longitude = nil end true end def obscure_coordinates_for_geoprivacy self.geoprivacy = nil if geoprivacy.blank? return true if geoprivacy.blank? && !geoprivacy_changed? case geoprivacy when PRIVATE obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) unless coordinates_obscured? self.latitude, self.longitude = [nil, nil] when OBSCURED obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) unless coordinates_obscured? else unobscure_coordinates end true end def obscure_coordinates_for_threatened_taxa if !taxon.blank? && taxon.species_or_lower? && georeferenced? && !coordinates_obscured? && (taxon.threatened? || (taxon.parent && taxon.parent.threatened?)) obscure_coordinates(M_TO_OBSCURE_THREATENED_TAXA) elsif geoprivacy.blank? unobscure_coordinates end true end def obscure_coordinates(distance = M_TO_OBSCURE_THREATENED_TAXA) self.place_guess = obscured_place_guess return if latitude.blank? || longitude.blank? if latitude_changed? || longitude_changed? self.private_latitude = latitude self.private_longitude = longitude else self.private_latitude ||= latitude self.private_longitude ||= longitude end self.latitude, self.longitude = random_neighbor_lat_lon(private_latitude, private_longitude, distance) end def lat_lon_in_place_guess? !place_guess.blank? && place_guess !~ /[a-cf-mo-rt-vx-z]/i && !place_guess.scan(COORDINATE_REGEX).blank? end def obscured_place_guess return place_guess if place_guess.blank? return nil if lat_lon_in_place_guess? place_guess.sub(/^[\d\-]+\s+/, '') end def unobscure_coordinates return unless coordinates_obscured? return unless geoprivacy.blank? self.latitude = private_latitude self.longitude = private_longitude self.private_latitude = nil self.private_longitude = nil end def iconic_taxon_name Taxon::ICONIC_TAXA_BY_ID[iconic_taxon_id].try(:name) end def self.obscure_coordinates_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon Observation.find_observations_of(taxon) do |o| o.obscure_coordinates Observation.update_all({ :place_guess => o.place_guess, :latitude => o.latitude, :longitude => o.longitude, :private_latitude => o.private_latitude, :private_longitude => o.private_longitude, }, {:id => o.id}) end end def self.unobscure_coordinates_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon Observation.find_observations_of(taxon) do |o| o.unobscure_coordinates Observation.update_all({ :latitude => o.latitude, :longitude => o.longitude, :private_latitude => o.private_latitude, :private_longitude => o.private_longitude, }, {:id => o.id}) end end def self.find_observations_of(taxon) options = { :include => :taxon, :conditions => [ "observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon ] } Observation.find_each(options) do |o| yield(o) end end ##### Validations ######################################################### # # Make sure the observation is not in the future. # def must_be_in_the_past unless observed_on.nil? || observed_on <= Date.today errors.add(:observed_on, "can't be in the future") end true end # # Make sure the observation resolves to a single day. Right now we don't # store ambiguity... # def must_not_be_a_range return if observed_on_string.blank? is_a_range = false begin if tspan = Chronic.parse(observed_on_string, :context => :past, :guess => false) is_a_range = true if tspan.width.seconds > 1.day.seconds end rescue RuntimeError errors.add(:observed_on, "was not recognized, some working examples are: yesterday, 3 years " + "ago, 5/27/1979, 1979-05-27 05:00. " + "(<a href='http://chronic.rubyforge.org/'>others</a>)" ) return end # Special case: dates like '2004', which ordinarily resolve to today at # 8:04pm observed_on_int = observed_on_string.gsub(/[^\d]/, '').to_i if observed_on_int > 1900 && observed_on_int <= Date.today.year is_a_range = true end if is_a_range errors.add(:observed_on, "must be a single day, not a range") end end def set_taxon_from_taxon_name return true if @taxon_name.blank? return true if taxon_id self.taxon_id = single_taxon_id_for_name(@taxon_name) true end def set_taxon_from_species_guess return true unless species_guess_changed? && taxon_id.blank? return true if species_guess.blank? self.taxon_id = single_taxon_id_for_name(species_guess) true end def single_taxon_for_name(name) Taxon.single_taxon_for_name(name) end def single_taxon_id_for_name(name) Taxon.single_taxon_for_name(name).try(:id) end def set_latlon_from_place_guess return true unless latitude.blank? && longitude.blank? return true if place_guess.blank? return true if place_guess =~ /[a-cf-mo-rt-vx-z]/i # ignore anything with word chars other than NSEW return true unless place_guess.strip =~ /[.+,\s.+]/ # ignore anything without a legit separator matches = place_guess.strip.scan(COORDINATE_REGEX).flatten return true if matches.blank? case matches.size when 2 # decimal degrees self.latitude, self.longitude = matches when 4 # decimal minutes self.latitude = matches[0].to_i + matches[1].to_f/60.0 self.longitude = matches[3].to_i + matches[4].to_f/60.0 when 6 # degrees / minutes / seconds self.latitude = matches[0].to_i + matches[1].to_i/60.0 + matches[2].to_f/60/60 self.longitude = matches[3].to_i + matches[4].to_i/60.0 + matches[5].to_f/60/60 end self.latitude *= -1 if latitude.to_f > 0 && place_guess =~ /s/i self.longitude *= -1 if longitude.to_f > 0 && place_guess =~ /w/i true end def set_geom_from_latlon if longitude.blank? || latitude.blank? self.geom = nil elsif longitude_changed? || latitude_changed? self.geom = Point.from_x_y(longitude, latitude) end true end def set_license return true if license_changed? && license.blank? self.license ||= user.preferred_observation_license self.license = nil unless LICENSE_CODES.include?(license) true end def update_out_of_range_later if taxon_id_changed? && taxon.blank? update_out_of_range elsif latitude_changed? || private_latitude_changed? || taxon_id_changed? send_later(:update_out_of_range) end true end def update_out_of_range set_out_of_range Observation.update_all(["out_of_range = ?", out_of_range], ["id = ?", id]) end def set_out_of_range if taxon_id.blank? || !georeferenced? || !TaxonRange.exists?(["taxon_id = ?", taxon_id]) self.out_of_range = nil return end # buffer the point to accomodate simplified or slightly inaccurate ranges buffer_degrees = OUT_OF_RANGE_BUFFER / (2*Math::PI*Observation::PLANETARY_RADIUS) * 360.0 self.out_of_range = if coordinates_obscured? TaxonRange.exists?([ "taxon_ranges.taxon_id = ? AND ST_Distance(taxon_ranges.geom, ST_Point(?,?)) > ?", taxon_id, private_longitude, private_latitude, buffer_degrees ]) else TaxonRange.count( :from => "taxon_ranges, observations", :conditions => [ "taxon_ranges.taxon_id = ? AND observations.id = ? AND ST_Distance(taxon_ranges.geom, observations.geom) > ?", taxon_id, id, buffer_degrees] ) > 0 end end def update_default_license return true unless [true, "1", "true"].include?(@make_license_default) user.update_attribute(:preferred_observation_license, license) true end def update_all_licenses return true unless [true, "1", "true"].include?(@make_licenses_same) Observation.update_all(["license = ?", license], ["user_id = ?", user_id]) true end def update_attributes(attributes) # hack around a weird android bug, should be removeable after any release post-March 2012 attributes.delete(:iconic_taxon_name) MASS_ASSIGNABLE_ATTRIBUTES.each do |a| self.send("#{a}=", attributes.delete(a.to_s)) if attributes.has_key?(a.to_s) self.send("#{a}=", attributes.delete(a)) if attributes.has_key?(a) end super(attributes) end def license_name return nil if license.blank? s = "Creative Commons " s += LICENSES.detect{|row| row.first == license}.try(:[], 1).to_s s end # I'm not psyched about having this stuff here, but it makes generating # more compact JSON a lot easier. include ObservationsHelper include ActionView::Helpers::SanitizeHelper include ActionView::Helpers::TextHelper include ActionController::UrlWriter def image_url observation_image_url(self) end def obs_image_url image_url end def short_description short_observation_description(self) end def scientific_name taxon.scientific_name.name if taxon && taxon.scientific_name end def common_name taxon.common_name.name if taxon && taxon.common_name end def url observation_url(self, ActionMailer::Base.default_url_options) end def user_login user.login end def update_stats if taxon_id.blank? num_agreements = 0 num_disagreements = 0 else idents = identifications.all(:include => [:observation, :taxon]) num_agreements = idents.select(&:is_agreement?).size num_disagreements = idents.select(&:is_disagreement?).size end # Kinda lame, but Observation#get_quality_grade relies on these numbers self.num_identification_agreements = num_agreements self.num_identification_disagreements = num_disagreements new_quality_grade = get_quality_grade self.quality_grade = new_quality_grade Observation.update_all( ["num_identification_agreements = ?, num_identification_disagreements = ?, quality_grade = ?", num_agreements, num_disagreements, new_quality_grade], "id = #{id}") refresh_check_lists end def random_neighbor_lat_lon(lat, lon, max_distance, radius = PLANETARY_RADIUS) latrads = lat.to_f / DEGREES_PER_RADIAN lonrads = lon.to_f / DEGREES_PER_RADIAN max_distance = max_distance / radius random_distance = Math.acos(rand * (Math.cos(max_distance) - 1) + 1) random_bearing = 2 * Math::PI * rand new_latrads = Math.asin( Math.sin(latrads)*Math.cos(random_distance) + Math.cos(latrads)*Math.sin(random_distance)*Math.cos(random_bearing) ) new_lonrads = lonrads + Math.atan2( Math.sin(random_bearing)*Math.sin(random_distance)*Math.cos(latrads), Math.cos(random_distance)-Math.sin(latrads)*Math.sin(latrads) ) [new_latrads * DEGREES_PER_RADIAN, new_lonrads * DEGREES_PER_RADIAN] end def places return nil unless georeferenced? Place.containing_lat_lng( private_latitude || latitude, private_longitude || longitude).sort_by(&:bbox_area) end def mobile? return false unless user_agent MOBILE_APP_USER_AGENT_PATTERNS.each do |pattern| return true if user_agent =~ pattern end false end def device_name return "unknown" unless user_agent if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "iNaturalist Android App" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "iNaturalist iPhone App" else "web browser" end end def device_url return unless user_agent if user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/inaturalist/id421397028?mt=8" elsif user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "https://market.android.com/details?id=org.inaturalist.android" else "/" end end def owners_identification if identifications.loaded? identifications.detect {|ident| ident.user_id == user_id} else identifications.first(:conditions => {:user_id => user_id}) end end # Required for use of the sanitize method in # ObservationsHelper#short_observation_description def self.white_list_sanitizer @white_list_sanitizer ||= HTML::WhiteListSanitizer.new end def self.expire_components_for(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) Observation.of(taxon).find_each do |o| ctrl = ActionController::Base.new ctrl.expire_fragment(o.component_cache_key) ctrl.expire_fragment(o.component_cache_key(:for_owner => true)) end end end
# frozen_string_literal: true # = Observation Model # # An Observation is a mushroom seen at a certain Location and time, as # recorded by a User. This is at the core of the site. It can have any # number of Image's, Naming's, Comment's, Interest's. # # == Voting # # Voting is still in a state of flux. At the moment User's create Naming's # and other User's Vote on them. We combine the Vote's for each Naming, cache # the Vote for each Naming in the Naming. However no Naming necessarily wins # -- instead Vote's are tallied for each Synonym (see calc_consensus for full # details). Thus the accepted Name of the winning Synonym is cached in the # Observation along with its winning Vote score. # # == Location # # An Observation can belong to either a defined Location (+location+, a # Location instance) or an undefined one (+where+, just a String), and even # occasionally both (see below). To make this a little easier, you can refer # to +place_name+ instead, which returns the name of whichever is present. # # *NOTE*: We were clearly having trouble making up our mind whether or not to # set +where+ when +location+ was present. The only safe heuristic is to use # +location+ if it's present, then fall back on +where+ -- +where+ may or may # not be set (or even accurate?) if +location+ is present. # # *NOTE*: If a mushroom is seen at a mushroom fair or an herbarium, we don't # necessarily know where the mushroom actually grew. In this case, we enter # the mushroom fair / herbarium as the +place_name+ and set the special flag # +is_collection_location+ to false. # # == Attributes # # id:: Locally unique numerical id, starting at 1. # created_at:: Date/time it was first created. # updated_at:: Date/time it was last updated. # user_id:: User that created it. # when:: Date it was seen. # where:: Where it was seen (just a String). # location:: Where it was seen (Location). # lat:: Exact latitude of location. # long:: Exact longitude of location. # alt:: Exact altitude of location. (meters) # is_collection_location:: Is this where it was growing? # gps_hidden:: Hide exact lat/long? # name:: Consensus Name (never deprecated, never nil). # vote_cache:: Cache Vote score for the winning Name. # thumb_image:: Image to use as thumbnail (if any). # specimen:: Does User have a specimen available? # notes:: Arbitrary text supplied by User and serialized. # num_views:: Number of times it has been viewed. # last_view:: Last time it was viewed. # # ==== "Fake" attributes # place_name:: Wrapper on top of +where+ and +location+. # Handles location_format. # # == Class methods # # refresh_vote_cache:: Refresh cache for all Observation's. # define_a_location:: Update any observations using the old "where" name. # --- # no_notes:: value of observation.notes if there are no notes # no_notes_persisted:: no_notes persisted in the db # other_notes_key:: key used for general Observation notes # other_notes_part:: other_notes_key as a String # notes_part_id:: id of textarea for a Notes heading # notes_area_id_prefix prefix for id of textarea for a Notes heading # notes_part_name:: name of textarea for a Notes heading # export_formatted:: notes (or any hash) to string with marked up # captions (keys) # show_formatted:: notes (or any hash) to string with plain # captions (keys) # # == Instance methods # # comments:: List of Comment's attached to this Observation. # interests:: List of Interest's attached to this Observation. # sequences:: List of Sequences which belong to this Observation. # species_lists:: List of SpeciesList's that contain this Observation. # other_notes_key:: key used for general Observation notes # other_notes_part:: other_notes_key as a String # notes_part_id:: id of textarea for a Notes heading # notes_part_name:: name of textarea for a Notes heading # notes_part_value:: value for textarea for a Notes heading # form_notes_parts:: note parts to display in create & edit form # notes_export_formatted:: notes to string with marked up captions (keys) # notes_show_formatted:: notes to string with plain captions (keys) # # ==== Name Formats # text_name:: Plain text. # format_name:: Textilized. (uses name.observation_name) # unique_text_name:: Plain text, with id added to make unique. # unique_format_name:: Textilized, with id added to make unique. # # ==== Namings and Votes # name:: Conensus Name instance. (never nil) # namings:: List of Naming's proposed for this Observation. # name_been_proposed?:: Has someone proposed this Name already? # owner_voted?:: Has the owner voted on a given Naming? # user_voted?:: Has a given User voted on a given Naming? # owners_vote:: Owner's Vote on a given Naming. # users_vote:: A given User's Vote on a given Naming # owners_votes:: Get all of the onwer's Vote's for this Observation. # owners_favorite?:: Is a given Naming one of the owner's favorite(s) # for this Observation? # users_favorite?:: Is a given Naming one of the given user's # favorites for this Observation? # owner_preference owners's unique prefered Name (if any) for this Obs # change_vote:: Change a given User's Vote for a given Naming. # consensus_naming:: Guess which Naming is responsible for consensus. # calc_consensus:: Calculate and cache the consensus naming/name. # review_status:: Decide what the review status is for this Obs. # lookup_naming:: Return corresponding Naming instance from this # Observation's namings association. # dump_votes:: Dump all the Naming and Vote info as known by this # Observation and its associations. # # ==== Images # images:: List of Image's attached to this Observation. # add_image:: Attach an Image. # remove_image:: Remove an Image. # # ==== Projects # can_edit?:: Check if user has permission to edit this obs. # # ==== Callbacks # add_spl_callback:: After add: update contribution. # remove_spl_callback:: After remove: update contribution. # notify_species_lists:: Before destroy: log destruction on spls. # destroy_dependents:: After destroy: destroy Naming's. # notify_users_after_change:: After save: call notify_users (if important). # notify_users_before_destroy:: Before destroy: call notify_users. # notify_users:: After save/destroy/image: send email. # announce_consensus_change:: After consensus changes: send email. # class Observation < AbstractModel belongs_to :thumb_image, class_name: "Image", foreign_key: "thumb_image_id" belongs_to :name # (used to cache consensus name) belongs_to :location belongs_to :rss_log belongs_to :user # Has to go before "has many interests" or interests will be destroyed # before it has a chance to notify the interested users of the destruction. before_destroy :notify_users_before_destroy has_many :votes has_many :comments, as: :target, dependent: :destroy has_many :interests, as: :target, dependent: :destroy has_many :sequences, dependent: :destroy has_many :external_links, dependent: :destroy # DO NOT use :dependent => :destroy -- this causes it to recalc the # consensus several times and send bogus emails!! has_many :namings has_many :observation_images, dependent: :destroy has_many :images, through: :observation_images has_many :project_observations, dependent: :destroy has_many :projects, through: :project_observations has_many :species_list_observations, dependent: :destroy has_many :species_lists, through: :species_list_observations, after_add: :add_spl_callback, before_remove: :remove_spl_callback has_many :observation_collection_numbers, dependent: :destroy has_many :collection_numbers, through: :observation_collection_numbers has_many :observation_herbarium_records, dependent: :destroy has_many :herbarium_records, through: :observation_herbarium_records has_many :observation_views, dependent: :destroy has_many :viewers, class_name: "User", through: :observation_views, source: :user # rubocop:disable Rails/ActiveRecordCallbacksOrder # else Rubocop says: "before_save is supposed to appear before before_destroy" # because a before_destroy must precede the has_many's before_save :cache_content_filter_data # rubocop:enable Rails/ActiveRecordCallbacksOrder after_update :notify_users_after_change before_destroy :destroy_orphaned_collection_numbers before_destroy :notify_species_lists after_destroy :destroy_dependents # Automatically (but silently) log destruction. self.autolog_events = [:destroyed] include ScopesForTimestamps # Current goal is to accept either a string or a Name instance as the first # argument. Other args: # # include_synonyms: boolean # include_subtaxa: boolean # include_all_name_proposals: boolean # of_look_alikes: boolean # # NOTE: Experimental. Tests written & commented out in PatternSearchTest. scope :of_name, lambda { |name, **args| # First, get a name record if string submitted name_record = Name.find_by(text_name: name) if name.is_a?(String) return unless name_record.is_a?(Name) # Filter args may add to an array of names to collect Observations names_array = [name_record] # Maybe add synonyms: #synonyms includes original name names_array = name_record.synonyms if args[:include_synonyms] # I'm thinking it's easier to pass an array of ids to the Observation query name_ids = names_array.map(&:id) # Add subtaxa to name_ids array, possibly also subtaxa of synonyms # (without modifying names_array we're iterating over) if args[:include_subtaxa] names_array.each do |n| # |= don't add duplicates name_ids |= Name.subtaxa_of(n).map(&:id) end end # Query, possibly with join to Naming. These are mutually exclusive: if args[:include_all_name_proposals] joins(:namings).where(namings: { name_id: name_ids }) elsif args[:of_look_alikes] joins(:namings).where(namings: { name_id: name_ids }). where.not(name: name_ids) else where(name_id: name_ids) end } scope :of_name_like, ->(name) { where(name: Name.text_name_includes(name)) } scope :with_name, -> { where.not(name: Name.unknown) } scope :without_name, -> { where(name: Name.unknown) } scope :by_user, ->(user) { where(user: user) } scope :at_location, ->(location) { where(location: location) } scope :in_region, ->(where) { where(Observation[:where].matches("%#{where}")) } scope :is_collection_location, -> { where(is_collection_location: true) } scope :not_collection_location, -> { where(is_collection_location: false) } scope :with_image, -> { where.not(thumb_image: nil) } scope :without_image, -> { where(thumb_image: nil) } scope :with_location, -> { where.not(location: nil) } scope :without_location, -> { where(location: nil) } scope :notes_include, ->(notes) { where(Observation[:notes].matches("%#{notes}%")) } scope :with_notes, -> { where.not(notes: Observation.no_notes) } scope :without_notes, -> { where(notes: Observation.no_notes) } scope :with_specimen, -> { where(specimen: true) } scope :without_specimen, -> { where(specimen: false) } scope :with_sequence, -> { joins(:sequences).distinct } scope :without_sequence, -> { missing(:sequences) } scope :comments_include, lambda { |summary| joins(:comments).where(Comment[:summary].matches("%#{summary}%")).distinct } scope :with_comments, -> { joins(:comments).distinct } scope :without_comments, -> { missing(:comments) } scope :for_project, lambda { |project| joins(:project_observations). where(ProjectObservation[:project_id] == project.id).distinct } scope :in_herbarium, lambda { |herbarium| joins(:herbarium_records). where(HerbariumRecord[:herbarium_id] == herbarium.id).distinct } scope :on_species_list, lambda { |species_list| joins(:species_list_observations). where(SpeciesListObservation[:species_list_id] == species_list.id). distinct } scope :on_species_list_of_project, lambda { |project| joins(species_lists: :project_species_lists). where(ProjectSpeciesList[:project_id] == project.id).distinct } # Override the default show_controller def self.show_controller "/observations" end # Override the default show_action def self.show_action "show" end def is_location? false end def is_observation? true end def can_edit?(user = User.current) Project.can_edit?(self, user) end # There is no value to keeping a collection number record after all its # observations are destroyed or removed from it. def destroy_orphaned_collection_numbers collection_numbers.each do |col_num| col_num.destroy_without_callbacks if col_num.observations == [self] end end # Cache location and name data used by content filters. def cache_content_filter_data if name && name_id_changed? self.lifeform = name.lifeform self.text_name = name.text_name self.classification = name.classification end self.where = location.name if location && location_id_changed? end # This is meant to be run nightly to ensure that the cached name # and location data used by content filters is kept in sync. def self.refresh_content_filter_caches refresh_cached_column("name", "lifeform") + refresh_cached_column("name", "text_name") + refresh_cached_column("name", "classification") + refresh_cached_column("location", "name", "where") end # Refresh a column which is a mirror of a foreign column. Fixes all the # errors, and reports which ids were broken. def self.refresh_cached_column(type, foreign, local = foreign) tbl = type.camelize.constantize.arel_table broken_caches = get_broken_caches(type, tbl, foreign, local) broken_caches.map do |id| "Fixing #{type} #{foreign} for obs ##{id}." end # Refresh the mirror of a foreign table's column in the observations table. broken_caches.update_all( Observation[local.to_sym].eq(tbl[foreign.to_sym]).to_sql ) end private_class_method def self.get_broken_caches(type, tbl, foreign, local) Observation.joins(type.to_sym). where(Observation[local.to_sym].not_eq(tbl[foreign.to_sym])) end # Used by Name and Location to update the observation cache when a cached # field value is changed. def self.update_cache(type, field, id, val) Observation.where("#{type}_id": id).update_all("#{field}": val) end # Check for any observations whose consensus is a misspelled name. This can # mess up the mirrors because misspelled names are "invisible", so their # classification and lifeform and such will not necessarily be kept up to # date. Fixes and returns a messages for each one that was wrong. def self.make_sure_no_observations_are_misspelled misspellings = Observation.joins(:name). where(Name[:correct_spelling_id].not_eq(nil)) misspellings. pluck(Observation[:id], Name[:text_name]).map do |id, search_name| "Observation ##{id} was misspelled: #{search_name.inspect}" end misspellings.update_all( Observation[:name_id].eq(Name[:correct_spelling_id]).to_sql ) end def update_view_stats super return if User.current.blank? @old_last_viewed_by ||= {} @old_last_viewed_by[User.current_id] = last_viewed_by(User.current) ObservationView.update_view_stats(self, User.current) end def last_viewed_by(user) observation_views.find_by(user: user)&.last_view end def old_last_viewed_by(user) @old_last_viewed_by && @old_last_viewed_by[user&.id] end ############################################################################## # # :section: Location Stuff # ############################################################################## # Abstraction over +where+ and +location.display_name+. Returns Location # name as a string, preferring +location+ over +where+ wherever both exist. # Also applies the location_format of the current user (defaults to "postal"). def place_name if location location.display_name elsif User.current_location_format == "scientific" Location.reverse_name(where) else where end end # Set +where+ or +location+, depending on whether a Location is defined with # the given +display_name+. (Fills the other in with +nil+.) # Adjusts for the current user's location_format as well. def place_name=(place_name) place_name = place_name.strip_squeeze where = if User.current_location_format == "scientific" Location.reverse_name(place_name) else place_name end loc = Location.find_by_name(where) if loc self.where = loc.name self.location = loc else self.where = where self.location = nil end end # Useful for forms in which date is entered in YYYYMMDD format: When form tag # helper creates input field, it reads obs.when_str and gets date in # YYYYMMDD. When form submits, assigning string to obs.when_str saves string # verbatim in @when_str, and if it is valid, sets the actual when field. # When you go to save the observation, it detects invalid format and prevents # save. When it renders form again, it notes the error, populates the input # field with the old invalid string for editing, and colors it red. def when_str @when_str || self.when.strftime("%Y-%m-%d") end def when_str=(val) @when_str = val begin self.when = val if Date.parse(val) rescue ArgumentError end val end def lat=(val) lat = Location.parse_latitude(val) lat = val if lat.nil? && val.present? self[:lat] = lat end def long=(val) long = Location.parse_longitude(val) long = val if long.nil? && val.present? self[:long] = long end def alt=(val) alt = Location.parse_altitude(val) alt = val if alt.nil? && val.present? self[:alt] = alt end # Is lat/long more than 10% outside of location extents? def lat_long_dubious? lat && location && !location.lat_long_close?(lat, long) end def place_name_and_coordinates if lat.present? && long.present? lat_string = format_coordinate(lat, "N", "S") long_string = format_coordinate(long, "E", "W") "#{place_name} (#{lat_string} #{long_string})" else place_name end end # Returns latitude if public or if the current user owns the observation. # The user should also be able to see hidden latitudes if they are an admin # or they are members of a project that the observation belongs to, but # those are harder to determine. This catches the majority of cases. def public_lat gps_hidden && user_id != User.current_id ? nil : lat end def public_long gps_hidden && user_id != User.current_id ? nil : long end def display_lat_long return "" unless lat "#{lat.abs}°#{lat.negative? ? "S" : "N"} " \ "#{long.abs}°#{long.negative? ? "W" : "E"}" end def display_alt return "" unless alt "#{alt.round}m" end def saved_change_to_place? saved_change_to_location_id? || saved_change_to_where? end ############################################################################## # # :section: Notes # ############################################################################## # # Notes are arbitrary text supplied by the User. # They are read and written as a serialized Hash. # # The Hash keys are: # - key(s) from the User's notes template, and # - a general Other key supplied by the system. # # Keys with empty values are not saved. # # The notes template is a comma-separated list of arbitrary keys (except for # the following which are reserved for the system: "Other", "other", etc., and # translations thereof. # Sample observation.notes # { } no notes # { Other: "rare" } generalized notes # { Cap: "red", stem: "white" } With only user-supplied keys # { Cap: "red", stem: "white", Other: rare } both user-supplied and general # # The create Observation form displays separate fields for the keys in the # following order: # - each key in the notes template, in the order listed in the template; and # - Other. # # The edit Observation form displays separate fields in the following order: # - each key in the notes template, in the order listed in the template; # - each "orphaned" key -- one which is neither in the template nor Other; # - Other. # # The show Observation view displays notes as follows, with Other underscored: # no notes - nothing shown # only generalized notes: # Notes: value # only user-supplied keys: # Notes: # First user key: value # Second user key: value # ... # both user-supplied and general Other keys: # Notes: # First user key: value # Second user key: value # ... # Other: value # Because keys with empty values are not saved in observation.notes, they are # not displayed with show Observaation. # # Notes are exported as shown, except that the intial "Notes:" caption is # omitted, and any markup is stripped from the keys. serialize :notes # value of observation.notes if there are no notes def self.no_notes {} end # no_notes persisted in the db def self.no_notes_persisted no_notes.to_yaml end # Key used for general Observation.notes # (notes which were not entered in a notes_template field) def self.other_notes_key :Other end # convenience wrapper around class method of same name def other_notes_key Observation.other_notes_key end # other_notes_key as a String # Makes it easy to combine with notes_template def self.other_notes_part other_notes_key.to_s end def other_notes_part Observation.other_notes_part end def other_notes notes ? notes[other_notes_key] : nil end def other_notes=(val) self.notes ||= {} notes[other_notes_key] = val end # id of view textarea for a Notes heading def self.notes_part_id(part) "#{notes_area_id_prefix}#{part.tr(" ", "_")}" end def notes_part_id(part) Observation.notes_part_id(part) end # prefix for id of textarea def self.notes_area_id_prefix "observation_notes_" end # name of view textarea for a Notes heading def self.notes_part_name(part) "observation[notes][#{part.tr(" ", "_")}]" end def notes_part_name(part) Observation.notes_part_name(part) end # value of notes part # notes: { Other: abc } # observation.notes_part_value("Other") #=> "abc" # observation.notes_part_value(:Other) #=> "abc" def notes_part_value(part) notes.blank? ? "" : notes[notes_normalized_key(part)] end # Change spaces to underscores in keys # notes_normalized_key("Nearby trees") #=> :Nearby_trees # notes_normalized_key(:Other) #=> :Other def notes_normalized_key(part) part.to_s.tr(" ", "_").to_sym end # Array of note parts (Strings) to display in create & edit form, # in following (display) order. Used by views. # notes_template fields # orphaned fields (field in obs, but not in notes_template, not "Other") # "Other" # Example outputs: # ["Other"] # ["orphaned_part", "Other"] # ["template_1st_part", "template_2nd_part", "Other"] # ["template_1st_part", "template_2nd_part", "orphaned_part", "Other"] def form_notes_parts(user) return user.notes_template_parts + [other_notes_part] if notes.blank? user.notes_template_parts + notes_orphaned_parts(user) + [other_notes_part] end # Array of notes parts (Strings) which are # neither in the notes_template nor the caption for other notes def notes_orphaned_parts(user) return [] if notes.blank? # Change spaces to underscores in order to subtract template parts from # stringified keys because keys have underscores instead of spaces template_parts_underscored = user.notes_template_parts.each do |part| part.tr!(" ", "_") end notes.keys.map(&:to_s) - template_parts_underscored - [other_notes_part] end # notes as a String, captions (keys) without added formstting, # omitting "Other" if it's the only caption. # notes: {} ::=> "" # notes: { Other: "abc" } ::=> "abc" # notes: { cap: "red" } ::=> "cap: red" # notes: { cap: "red", stem: , Other: "x" } ::=> "cap: red # stem: # Other: x" def self.export_formatted(notes, markup = nil) return "" if notes.blank? return notes[other_notes_key] if notes.keys == [other_notes_key] result = notes.each_with_object(+"") do |(key, value), str| str << "#{markup}#{key}#{markup}: #{value}\n" end result.chomp end # wraps Class method with slightly different name def notes_export_formatted Observation.export_formatted(notes) end # Notes (or other hash) as a String, captions (keys) with added formstting, # omitting "Other" if it's the only caption. # # Used in views which display notes # notes: {} => "" # notes: { Other: "abc" } => "abc" # notes: { cap: "red" } => "+cap+: red" # notes: { cap: "red", stem: , other: "x" } => "+cap+: red # +stem+: # +Other+: x" def self.show_formatted(notes) export_formatted(notes, "+") end # wraps Class method with slightly different name def notes_show_formatted Observation.show_formatted(notes) end ############################################################################## # # :section: Namings and Votes # ############################################################################## # Name in plain text with id to make it unique, never nil. def unique_text_name string_with_id(name.real_search_name) end # Textile-marked-up name, never nil. def format_name name.observation_name end # Textile-marked-up name with id to make it unique, never nil. def unique_format_name string_with_id(name.observation_name) rescue StandardError "" end # Look up the corresponding instance in our namings association. If we are # careful to keep all the operations within the tree of assocations of the # observations, we should never need to reload anything. def lookup_naming(naming) # Disable cop; test suite chokes when the following "raise" # is re-written in "exploded" style (the Rubocop default) # rubocop:disable Style/RaiseArgs namings.find { |n| n == naming } || raise(ActiveRecord::RecordNotFound, "Observation doesn't have naming with ID=#{naming.id}") # rubocop:enable Style/RaiseArgs end # Dump out the sitatuation as the observation sees it. Useful for debugging # problems with reloading requirements. def dump_votes namings.map do |n| str = "#{n.id} #{n.name.real_search_name}: " if n.votes.empty? str += "no votes" else votes = n.votes.map do |v| "#{v.user.login}=#{v.value}" + (v.favorite ? "(*)" : "") end str += votes.join(", ") end str end.join("\n") end # Has anyone proposed a given Name yet for this observation? def name_been_proposed?(name) namings.count { |n| n.name == name }.positive? end # Has the owner voted on a given Naming? def owner_voted?(naming) !lookup_naming(naming).users_vote(user).nil? end # Has a given User owner voted on a given Naming? def user_voted?(naming, user) !lookup_naming(naming).users_vote(user).nil? end # Get the owner's Vote on a given Naming. def owners_vote(naming) lookup_naming(naming).users_vote(user) end # Get a given User's Vote on a given Naming. def users_vote(naming, user) lookup_naming(naming).users_vote(user) end # Disable method name cops to avoid breaking 3rd parties' use of API # Returns true if a given Naming has received one of the highest positive # votes from the owner of this observation. # Note: multiple namings can return true for a given observation. # This is used to display eyes next to Proposed Name on Observation page def owners_favorite?(naming) lookup_naming(naming).users_favorite?(user) end # Returns true if a given Naming has received one of the highest positive # votes from the given user (among namings for this observation). # Note: multiple namings can return true for a given user and observation. def users_favorite?(naming, user) lookup_naming(naming).users_favorite?(user) end # All of observation.user's votes on all Namings for this Observation # Used in Observation and in tests def owners_votes user_votes(user) end # All of a given User's votes on all Namings for this Observation def user_votes(user) namings.each_with_object([]) do |n, votes| v = n.users_vote(user) votes << v if v end end # Change User's Vote for this naming. Automatically recalculates the # consensus for the Observation in question if anything is changed. Returns # true if something was changed. def change_vote(naming, value, user = User.current) result = false naming = lookup_naming(naming) vote = naming.users_vote(user) value = value.to_f if value == Vote.delete_vote result = delete_vote(naming, vote, user) # If no existing vote, or if changing value. elsif !vote || (vote.value != value) result = true process_real_vote(naming, vote, value, user) end # Update consensus if anything changed. calc_consensus if result result end def logged_change_vote(naming, vote) reload change_vote(naming, vote.value, naming.user) log(:log_naming_created, name: naming.format_name) end # Try to guess which Naming is responsible for the consensus. This will # always return a Naming, no matter how ambiguous, unless there are no # namings. def consensus_naming matches = find_matches return nil if matches.empty? return matches.first if matches.length == 1 best_naming = matches.first best_value = matches.first.vote_cache matches.each do |naming| next unless naming.vote_cache > best_value best_naming = naming best_value = naming.vote_cache end best_naming end def calc_consensus reload calculator = Observation::ConsensusCalculator.new(namings) best, best_val = calculator.calc old = name if name != best || vote_cache != best_val self.name = best self.vote_cache = best_val save end announce_consensus_change(old, best) if best != old end # Admin tool that refreshes the vote cache for all observations with a vote. def self.refresh_vote_cache Observation.all.find_each(&:calc_consensus) end ############################################################################## # # :section: Preferred ID # ############################################################################## # Observation.user's unique preferred positive Name for this observation # Returns falsy if there's no unique preferred positive id # Used on show_observation page def owner_preference owner_uniq_favorite_name if owner_preference? end private def find_matches matches = namings.select { |n| n.name_id == name_id } return matches unless matches == [] && name && name.synonym_id namings.select { |n| name.synonyms.include?(n.name) } end def format_coordinate(value, positive_point, negative_point) return "#{-value.round(4)}°#{negative_point}" if value.negative? "#{value.round(4)}°#{positive_point}" end def delete_vote(naming, vote, user) return false unless vote naming.votes.delete(vote) find_new_favorite(user) if vote.favorite true end def find_new_favorite(user) max = max_positive_vote(user) return unless max.positive? user_votes(user).each do |v| next if v.value != max || v.favorite v.favorite = true v.save end end def max_positive_vote(user) max = 0 user_votes(user).each do |v| max = v.value if v.value > max end max end def process_real_vote(naming, vote, value, user) downgrade_totally_confident_votes(value, user) favorite = adjust_other_favorites(value, other_votes(vote, user)) if !vote naming.votes.create!( user: user, observation: self, value: value, favorite: favorite ) else vote.value = value vote.favorite = favorite vote.save end end def downgrade_totally_confident_votes(value, user) # First downgrade any existing 100% votes (if casting a 100% vote). v80 = Vote.next_best_vote return if value <= v80 user_votes(user).each do |v| next unless v.value > v80 v.value = v80 v.save end end def adjust_other_favorites(value, other_votes) favorite = false if value.positive? favorite = true other_votes.each do |v| if v.value > value favorite = false break end if (v.value < value) && v.favorite v.favorite = false v.save end end end # Will any other vote become a favorite? max_positive_value = (other_votes.map(&:value) + [value, 0]).max other_votes.each do |v| if (v.value >= max_positive_value) && !v.favorite v.favorite = true v.save end end favorite end def other_votes(vote, user) user_votes(user) - [vote] end # Does observation.user have a single preferred id for this observation? def owner_preference? owner_uniq_favorite_vote&.value&.>= Vote.owner_id_min_confidence end def owner_uniq_favorite_name favs = owner_favorite_votes favs[0].naming.name if favs.count == 1 end def owner_uniq_favorite_vote votes = owner_favorite_votes return votes.first if votes.count == 1 end def owner_favorite_votes votes.where(user_id: user_id, favorite: true) end public ############################################################################## # # :section: Images # ############################################################################## # Add Image to this Observation, making it the thumbnail if none set already. # Saves changes. Returns Image. def add_image(img) unless images.include?(img) images << img self.thumb_image = img unless thumb_image self.updated_at = Time.zone.now save notify_users(:added_image) reload end img end # Removes an Image from this Observation. If it's the thumbnail, changes # thumbnail to next available Image. Saves change to thumbnail, might save # change to Image. Returns Image. def remove_image(img) if images.include?(img) images.delete(img) if thumb_image_id == img.id self.thumb_image = images.empty? ? nil : images.first save end notify_users(:removed_image) end img end def has_backup_data? !thumb_image_id.nil? || species_lists.count.positive? || herbarium_records.count.positive? || specimen || notes.length >= 100 end ############################################################################## # # :section: Specimens # ############################################################################## def turn_off_specimen_if_no_more_records return unless specimen return unless collection_numbers.empty? return unless herbarium_records.empty? return unless sequences.empty? update(specimen: false) end # Return primary collector and their number if available, else just return # the observer's name. def collector_and_number return user.legal_name if collection_numbers.empty? collection_numbers.first.format_name end ############################################################################## # # :section: Callbacks # ############################################################################## # Callback that updates a User's contribution after adding an Observation to # a SpeciesList. def add_spl_callback(_obs) SiteData.update_contribution(:add, :species_list_entries, user_id) end # Callback that updates a User's contribution after removing an Observation # from a SpeciesList. def remove_spl_callback(_obs) SiteData.update_contribution(:del, :species_list_entries, user_id) end # Callback that logs an Observation's destruction on all of its # SpeciesList's. (Also saves list of Namings so they can be destroyed # by hand afterword without causing superfluous calc_consensuses.) def notify_species_lists # Tell all the species lists it belonged to. species_lists.each do |spl| spl.log(:log_observation_destroyed2, name: unique_format_name, touch: false) end # Save namings so we can delete them after it's dead. @old_namings = namings end # Callback that destroys an Observation's Naming's (carefully) after the # Observation is destroyed. def destroy_dependents @old_namings.each do |naming| naming.observation = nil # (tells it not to recalc consensus) naming.destroy end end # Callback that sends email notifications after save. def notify_users_after_change if !id || saved_change_to_when? || saved_change_to_where? || saved_change_to_location_id? || saved_change_to_notes? || saved_change_to_specimen? || saved_change_to_is_collection_location? || saved_change_to_thumb_image_id? notify_users(:change) end end # Callback that sends email notifications after destroy. def notify_users_before_destroy notify_users(:destroy) end # Send email notifications upon change to Observation. Several actions are # possible: # # added_image:: Image was added. # removed_image:: Image was removed. # change:: Other changes (e.g. to notes). # destroy:: Observation destroyed. # # obs.images << Image.create # obs.notify_users(:added_image) # def notify_users(action) sender = user recipients = [] # Send to people who have registered interest. interests.each do |interest| recipients.push(interest.user) if interest.state end # Tell masochists who want to know about all observation changes. User.where(email_observations_all: true).find_each do |user| recipients.push(user) end # Send notification to all except the person who triggered the change. recipients.uniq.each do |recipient| next if !recipient || recipient == sender case action when :destroy QueuedEmail::ObservationChange.destroy_observation(sender, recipient, self) when :change QueuedEmail::ObservationChange.change_observation(sender, recipient, self) else QueuedEmail::ObservationChange.change_images(sender, recipient, self, action) end end end # Send email notifications upon change to consensus. # # old_name = obs.name # obs.name = new_name # obs.announce_consensus_change(old_name, new_name) # def announce_consensus_change(old_name, new_name) log_consensus_change(old_name, new_name) # Change can trigger emails. owner = user sender = User.current recipients = [] # Tell owner of observation if they want. recipients.push(owner) if owner&.email_observations_consensus # Send to people who have registered interest. # Also remove everyone who has explicitly said they are NOT interested. interests.each do |interest| if interest.state recipients.push(interest.user) else recipients.delete(interest.user) end end # Send notification to all except the person who triggered the change. (recipients.uniq - [sender]).each do |recipient| QueuedEmail::ConsensusChange.create_email(sender, recipient, self, old_name, new_name) end end def log_consensus_change(old_name, new_name) if old_name log(:log_consensus_changed, old: old_name.display_name, new: new_name.display_name) else log(:log_consensus_created, name: new_name.display_name) end end # After defining a location, update any lists using old "where" name. def self.define_a_location(location, old_name) old_name = connection.quote(old_name) new_name = connection.quote(location.name) connection.update(%( UPDATE observations SET `where` = #{new_name}, location_id = #{location.id} WHERE `where` = #{old_name} )) end ############################################################################## protected include Validations validate :check_requirements, :check_when def check_requirements check_where check_user check_coordinates return unless @when_str begin Date.parse(@when_str) rescue ArgumentError if /^\d{4}-\d{1,2}-\d{1,2}$/.match?(@when_str) errors.add(:when_str, :runtime_date_invalid.t) else errors.add(:when_str, :runtime_date_should_be_yyyymmdd.t) end end end def check_where # Clean off leading/trailing whitespace from +where+. self.where = where.strip_squeeze if where self.where = nil if where == "" if where.to_s.blank? && !location_id self.location = Location.unknown # errors.add(:where, :validate_observation_where_missing.t) elsif where.to_s.size > 1024 errors.add(:where, :validate_observation_where_too_long.t) end end def check_user return if user || User.current errors.add(:user, :validate_observation_user_missing.t) end def check_coordinates check_latitude check_longitude check_altitude end def check_latitude if lat.blank? && long.present? || lat.present? && !Location.parse_latitude(lat) errors.add(:lat, :runtime_lat_long_error.t) end end def check_longitude if lat.present? && long.blank? || long.present? && !Location.parse_longitude(long) errors.add(:long, :runtime_lat_long_error.t) end end def check_altitude return unless alt.present? && !Location.parse_altitude(alt) # As of July 5, 2020 this statement appears to be unreachable # because .to_i returns 0 for unparsable strings. errors.add(:alt, :runtime_altitude_error.t) end def check_when self.when ||= Time.zone.now validate_when(self.when, errors) end end Also accept id for Observation.of_name # frozen_string_literal: true # = Observation Model # # An Observation is a mushroom seen at a certain Location and time, as # recorded by a User. This is at the core of the site. It can have any # number of Image's, Naming's, Comment's, Interest's. # # == Voting # # Voting is still in a state of flux. At the moment User's create Naming's # and other User's Vote on them. We combine the Vote's for each Naming, cache # the Vote for each Naming in the Naming. However no Naming necessarily wins # -- instead Vote's are tallied for each Synonym (see calc_consensus for full # details). Thus the accepted Name of the winning Synonym is cached in the # Observation along with its winning Vote score. # # == Location # # An Observation can belong to either a defined Location (+location+, a # Location instance) or an undefined one (+where+, just a String), and even # occasionally both (see below). To make this a little easier, you can refer # to +place_name+ instead, which returns the name of whichever is present. # # *NOTE*: We were clearly having trouble making up our mind whether or not to # set +where+ when +location+ was present. The only safe heuristic is to use # +location+ if it's present, then fall back on +where+ -- +where+ may or may # not be set (or even accurate?) if +location+ is present. # # *NOTE*: If a mushroom is seen at a mushroom fair or an herbarium, we don't # necessarily know where the mushroom actually grew. In this case, we enter # the mushroom fair / herbarium as the +place_name+ and set the special flag # +is_collection_location+ to false. # # == Attributes # # id:: Locally unique numerical id, starting at 1. # created_at:: Date/time it was first created. # updated_at:: Date/time it was last updated. # user_id:: User that created it. # when:: Date it was seen. # where:: Where it was seen (just a String). # location:: Where it was seen (Location). # lat:: Exact latitude of location. # long:: Exact longitude of location. # alt:: Exact altitude of location. (meters) # is_collection_location:: Is this where it was growing? # gps_hidden:: Hide exact lat/long? # name:: Consensus Name (never deprecated, never nil). # vote_cache:: Cache Vote score for the winning Name. # thumb_image:: Image to use as thumbnail (if any). # specimen:: Does User have a specimen available? # notes:: Arbitrary text supplied by User and serialized. # num_views:: Number of times it has been viewed. # last_view:: Last time it was viewed. # # ==== "Fake" attributes # place_name:: Wrapper on top of +where+ and +location+. # Handles location_format. # # == Class methods # # refresh_vote_cache:: Refresh cache for all Observation's. # define_a_location:: Update any observations using the old "where" name. # --- # no_notes:: value of observation.notes if there are no notes # no_notes_persisted:: no_notes persisted in the db # other_notes_key:: key used for general Observation notes # other_notes_part:: other_notes_key as a String # notes_part_id:: id of textarea for a Notes heading # notes_area_id_prefix prefix for id of textarea for a Notes heading # notes_part_name:: name of textarea for a Notes heading # export_formatted:: notes (or any hash) to string with marked up # captions (keys) # show_formatted:: notes (or any hash) to string with plain # captions (keys) # # == Instance methods # # comments:: List of Comment's attached to this Observation. # interests:: List of Interest's attached to this Observation. # sequences:: List of Sequences which belong to this Observation. # species_lists:: List of SpeciesList's that contain this Observation. # other_notes_key:: key used for general Observation notes # other_notes_part:: other_notes_key as a String # notes_part_id:: id of textarea for a Notes heading # notes_part_name:: name of textarea for a Notes heading # notes_part_value:: value for textarea for a Notes heading # form_notes_parts:: note parts to display in create & edit form # notes_export_formatted:: notes to string with marked up captions (keys) # notes_show_formatted:: notes to string with plain captions (keys) # # ==== Name Formats # text_name:: Plain text. # format_name:: Textilized. (uses name.observation_name) # unique_text_name:: Plain text, with id added to make unique. # unique_format_name:: Textilized, with id added to make unique. # # ==== Namings and Votes # name:: Conensus Name instance. (never nil) # namings:: List of Naming's proposed for this Observation. # name_been_proposed?:: Has someone proposed this Name already? # owner_voted?:: Has the owner voted on a given Naming? # user_voted?:: Has a given User voted on a given Naming? # owners_vote:: Owner's Vote on a given Naming. # users_vote:: A given User's Vote on a given Naming # owners_votes:: Get all of the onwer's Vote's for this Observation. # owners_favorite?:: Is a given Naming one of the owner's favorite(s) # for this Observation? # users_favorite?:: Is a given Naming one of the given user's # favorites for this Observation? # owner_preference owners's unique prefered Name (if any) for this Obs # change_vote:: Change a given User's Vote for a given Naming. # consensus_naming:: Guess which Naming is responsible for consensus. # calc_consensus:: Calculate and cache the consensus naming/name. # review_status:: Decide what the review status is for this Obs. # lookup_naming:: Return corresponding Naming instance from this # Observation's namings association. # dump_votes:: Dump all the Naming and Vote info as known by this # Observation and its associations. # # ==== Images # images:: List of Image's attached to this Observation. # add_image:: Attach an Image. # remove_image:: Remove an Image. # # ==== Projects # can_edit?:: Check if user has permission to edit this obs. # # ==== Callbacks # add_spl_callback:: After add: update contribution. # remove_spl_callback:: After remove: update contribution. # notify_species_lists:: Before destroy: log destruction on spls. # destroy_dependents:: After destroy: destroy Naming's. # notify_users_after_change:: After save: call notify_users (if important). # notify_users_before_destroy:: Before destroy: call notify_users. # notify_users:: After save/destroy/image: send email. # announce_consensus_change:: After consensus changes: send email. # class Observation < AbstractModel belongs_to :thumb_image, class_name: "Image", foreign_key: "thumb_image_id" belongs_to :name # (used to cache consensus name) belongs_to :location belongs_to :rss_log belongs_to :user # Has to go before "has many interests" or interests will be destroyed # before it has a chance to notify the interested users of the destruction. before_destroy :notify_users_before_destroy has_many :votes has_many :comments, as: :target, dependent: :destroy has_many :interests, as: :target, dependent: :destroy has_many :sequences, dependent: :destroy has_many :external_links, dependent: :destroy # DO NOT use :dependent => :destroy -- this causes it to recalc the # consensus several times and send bogus emails!! has_many :namings has_many :observation_images, dependent: :destroy has_many :images, through: :observation_images has_many :project_observations, dependent: :destroy has_many :projects, through: :project_observations has_many :species_list_observations, dependent: :destroy has_many :species_lists, through: :species_list_observations, after_add: :add_spl_callback, before_remove: :remove_spl_callback has_many :observation_collection_numbers, dependent: :destroy has_many :collection_numbers, through: :observation_collection_numbers has_many :observation_herbarium_records, dependent: :destroy has_many :herbarium_records, through: :observation_herbarium_records has_many :observation_views, dependent: :destroy has_many :viewers, class_name: "User", through: :observation_views, source: :user # rubocop:disable Rails/ActiveRecordCallbacksOrder # else Rubocop says: "before_save is supposed to appear before before_destroy" # because a before_destroy must precede the has_many's before_save :cache_content_filter_data # rubocop:enable Rails/ActiveRecordCallbacksOrder after_update :notify_users_after_change before_destroy :destroy_orphaned_collection_numbers before_destroy :notify_species_lists after_destroy :destroy_dependents # Automatically (but silently) log destruction. self.autolog_events = [:destroyed] include ScopesForTimestamps # Current goal is to accept either a string or a Name instance as the first # argument. Other args: # # include_synonyms: boolean # include_subtaxa: boolean # include_all_name_proposals: boolean # of_look_alikes: boolean # # NOTE: Experimental. Tests written & commented out in PatternSearchTest. scope :of_name, lambda { |name, **args| # First, get a name record if string or id submitted case name when String name_record = Name.find_by(text_name: name) when Integer name_record = Name.find_by(id: name) end return unless name_record.is_a?(Name) # Filter args may add to an array of names to collect Observations names_array = [name_record] # Maybe add synonyms (Name#synonyms includes original name) names_array = name_record.synonyms if args[:include_synonyms] # Keep names_array intact as is; we'll maybe add more to its clone name_ids. # I'm thinking it's easier to pass an array of ids to the Observation query name_ids = names_array.map(&:id) # Add subtaxa to name_ids array, i.e. possibly also subtaxa of synonyms too # (without modifying names_array we're iterating over) if args[:include_subtaxa] names_array.each do |n| # |= don't add duplicates name_ids |= Name.subtaxa_of(n).map(&:id) end end # Query, possible join to Naming. These three are mutually exclusive: if args[:include_all_name_proposals] joins(:namings).where(namings: { name_id: name_ids }) elsif args[:of_look_alikes] joins(:namings).where(namings: { name_id: name_ids }). where.not(name: name_ids) else where(name_id: name_ids) end } scope :of_name_like, ->(name) { where(name: Name.text_name_includes(name)) } scope :with_name, -> { where.not(name: Name.unknown) } scope :without_name, -> { where(name: Name.unknown) } scope :by_user, ->(user) { where(user: user) } scope :at_location, ->(location) { where(location: location) } scope :in_region, ->(where) { where(Observation[:where].matches("%#{where}")) } scope :is_collection_location, -> { where(is_collection_location: true) } scope :not_collection_location, -> { where(is_collection_location: false) } scope :with_image, -> { where.not(thumb_image: nil) } scope :without_image, -> { where(thumb_image: nil) } scope :with_location, -> { where.not(location: nil) } scope :without_location, -> { where(location: nil) } scope :notes_include, ->(notes) { where(Observation[:notes].matches("%#{notes}%")) } scope :with_notes, -> { where.not(notes: Observation.no_notes) } scope :without_notes, -> { where(notes: Observation.no_notes) } scope :with_specimen, -> { where(specimen: true) } scope :without_specimen, -> { where(specimen: false) } scope :with_sequence, -> { joins(:sequences).distinct } scope :without_sequence, -> { missing(:sequences) } scope :comments_include, lambda { |summary| joins(:comments).where(Comment[:summary].matches("%#{summary}%")).distinct } scope :with_comments, -> { joins(:comments).distinct } scope :without_comments, -> { missing(:comments) } scope :for_project, lambda { |project| joins(:project_observations). where(ProjectObservation[:project_id] == project.id).distinct } scope :in_herbarium, lambda { |herbarium| joins(:herbarium_records). where(HerbariumRecord[:herbarium_id] == herbarium.id).distinct } scope :on_species_list, lambda { |species_list| joins(:species_list_observations). where(SpeciesListObservation[:species_list_id] == species_list.id). distinct } scope :on_species_list_of_project, lambda { |project| joins(species_lists: :project_species_lists). where(ProjectSpeciesList[:project_id] == project.id).distinct } # Override the default show_controller def self.show_controller "/observations" end # Override the default show_action def self.show_action "show" end def is_location? false end def is_observation? true end def can_edit?(user = User.current) Project.can_edit?(self, user) end # There is no value to keeping a collection number record after all its # observations are destroyed or removed from it. def destroy_orphaned_collection_numbers collection_numbers.each do |col_num| col_num.destroy_without_callbacks if col_num.observations == [self] end end # Cache location and name data used by content filters. def cache_content_filter_data if name && name_id_changed? self.lifeform = name.lifeform self.text_name = name.text_name self.classification = name.classification end self.where = location.name if location && location_id_changed? end # This is meant to be run nightly to ensure that the cached name # and location data used by content filters is kept in sync. def self.refresh_content_filter_caches refresh_cached_column("name", "lifeform") + refresh_cached_column("name", "text_name") + refresh_cached_column("name", "classification") + refresh_cached_column("location", "name", "where") end # Refresh a column which is a mirror of a foreign column. Fixes all the # errors, and reports which ids were broken. def self.refresh_cached_column(type, foreign, local = foreign) tbl = type.camelize.constantize.arel_table broken_caches = get_broken_caches(type, tbl, foreign, local) broken_caches.map do |id| "Fixing #{type} #{foreign} for obs ##{id}." end # Refresh the mirror of a foreign table's column in the observations table. broken_caches.update_all( Observation[local.to_sym].eq(tbl[foreign.to_sym]).to_sql ) end private_class_method def self.get_broken_caches(type, tbl, foreign, local) Observation.joins(type.to_sym). where(Observation[local.to_sym].not_eq(tbl[foreign.to_sym])) end # Used by Name and Location to update the observation cache when a cached # field value is changed. def self.update_cache(type, field, id, val) Observation.where("#{type}_id": id).update_all("#{field}": val) end # Check for any observations whose consensus is a misspelled name. This can # mess up the mirrors because misspelled names are "invisible", so their # classification and lifeform and such will not necessarily be kept up to # date. Fixes and returns a messages for each one that was wrong. def self.make_sure_no_observations_are_misspelled misspellings = Observation.joins(:name). where(Name[:correct_spelling_id].not_eq(nil)) misspellings. pluck(Observation[:id], Name[:text_name]).map do |id, search_name| "Observation ##{id} was misspelled: #{search_name.inspect}" end misspellings.update_all( Observation[:name_id].eq(Name[:correct_spelling_id]).to_sql ) end def update_view_stats super return if User.current.blank? @old_last_viewed_by ||= {} @old_last_viewed_by[User.current_id] = last_viewed_by(User.current) ObservationView.update_view_stats(self, User.current) end def last_viewed_by(user) observation_views.find_by(user: user)&.last_view end def old_last_viewed_by(user) @old_last_viewed_by && @old_last_viewed_by[user&.id] end ############################################################################## # # :section: Location Stuff # ############################################################################## # Abstraction over +where+ and +location.display_name+. Returns Location # name as a string, preferring +location+ over +where+ wherever both exist. # Also applies the location_format of the current user (defaults to "postal"). def place_name if location location.display_name elsif User.current_location_format == "scientific" Location.reverse_name(where) else where end end # Set +where+ or +location+, depending on whether a Location is defined with # the given +display_name+. (Fills the other in with +nil+.) # Adjusts for the current user's location_format as well. def place_name=(place_name) place_name = place_name.strip_squeeze where = if User.current_location_format == "scientific" Location.reverse_name(place_name) else place_name end loc = Location.find_by_name(where) if loc self.where = loc.name self.location = loc else self.where = where self.location = nil end end # Useful for forms in which date is entered in YYYYMMDD format: When form tag # helper creates input field, it reads obs.when_str and gets date in # YYYYMMDD. When form submits, assigning string to obs.when_str saves string # verbatim in @when_str, and if it is valid, sets the actual when field. # When you go to save the observation, it detects invalid format and prevents # save. When it renders form again, it notes the error, populates the input # field with the old invalid string for editing, and colors it red. def when_str @when_str || self.when.strftime("%Y-%m-%d") end def when_str=(val) @when_str = val begin self.when = val if Date.parse(val) rescue ArgumentError end val end def lat=(val) lat = Location.parse_latitude(val) lat = val if lat.nil? && val.present? self[:lat] = lat end def long=(val) long = Location.parse_longitude(val) long = val if long.nil? && val.present? self[:long] = long end def alt=(val) alt = Location.parse_altitude(val) alt = val if alt.nil? && val.present? self[:alt] = alt end # Is lat/long more than 10% outside of location extents? def lat_long_dubious? lat && location && !location.lat_long_close?(lat, long) end def place_name_and_coordinates if lat.present? && long.present? lat_string = format_coordinate(lat, "N", "S") long_string = format_coordinate(long, "E", "W") "#{place_name} (#{lat_string} #{long_string})" else place_name end end # Returns latitude if public or if the current user owns the observation. # The user should also be able to see hidden latitudes if they are an admin # or they are members of a project that the observation belongs to, but # those are harder to determine. This catches the majority of cases. def public_lat gps_hidden && user_id != User.current_id ? nil : lat end def public_long gps_hidden && user_id != User.current_id ? nil : long end def display_lat_long return "" unless lat "#{lat.abs}°#{lat.negative? ? "S" : "N"} " \ "#{long.abs}°#{long.negative? ? "W" : "E"}" end def display_alt return "" unless alt "#{alt.round}m" end def saved_change_to_place? saved_change_to_location_id? || saved_change_to_where? end ############################################################################## # # :section: Notes # ############################################################################## # # Notes are arbitrary text supplied by the User. # They are read and written as a serialized Hash. # # The Hash keys are: # - key(s) from the User's notes template, and # - a general Other key supplied by the system. # # Keys with empty values are not saved. # # The notes template is a comma-separated list of arbitrary keys (except for # the following which are reserved for the system: "Other", "other", etc., and # translations thereof. # Sample observation.notes # { } no notes # { Other: "rare" } generalized notes # { Cap: "red", stem: "white" } With only user-supplied keys # { Cap: "red", stem: "white", Other: rare } both user-supplied and general # # The create Observation form displays separate fields for the keys in the # following order: # - each key in the notes template, in the order listed in the template; and # - Other. # # The edit Observation form displays separate fields in the following order: # - each key in the notes template, in the order listed in the template; # - each "orphaned" key -- one which is neither in the template nor Other; # - Other. # # The show Observation view displays notes as follows, with Other underscored: # no notes - nothing shown # only generalized notes: # Notes: value # only user-supplied keys: # Notes: # First user key: value # Second user key: value # ... # both user-supplied and general Other keys: # Notes: # First user key: value # Second user key: value # ... # Other: value # Because keys with empty values are not saved in observation.notes, they are # not displayed with show Observaation. # # Notes are exported as shown, except that the intial "Notes:" caption is # omitted, and any markup is stripped from the keys. serialize :notes # value of observation.notes if there are no notes def self.no_notes {} end # no_notes persisted in the db def self.no_notes_persisted no_notes.to_yaml end # Key used for general Observation.notes # (notes which were not entered in a notes_template field) def self.other_notes_key :Other end # convenience wrapper around class method of same name def other_notes_key Observation.other_notes_key end # other_notes_key as a String # Makes it easy to combine with notes_template def self.other_notes_part other_notes_key.to_s end def other_notes_part Observation.other_notes_part end def other_notes notes ? notes[other_notes_key] : nil end def other_notes=(val) self.notes ||= {} notes[other_notes_key] = val end # id of view textarea for a Notes heading def self.notes_part_id(part) "#{notes_area_id_prefix}#{part.tr(" ", "_")}" end def notes_part_id(part) Observation.notes_part_id(part) end # prefix for id of textarea def self.notes_area_id_prefix "observation_notes_" end # name of view textarea for a Notes heading def self.notes_part_name(part) "observation[notes][#{part.tr(" ", "_")}]" end def notes_part_name(part) Observation.notes_part_name(part) end # value of notes part # notes: { Other: abc } # observation.notes_part_value("Other") #=> "abc" # observation.notes_part_value(:Other) #=> "abc" def notes_part_value(part) notes.blank? ? "" : notes[notes_normalized_key(part)] end # Change spaces to underscores in keys # notes_normalized_key("Nearby trees") #=> :Nearby_trees # notes_normalized_key(:Other) #=> :Other def notes_normalized_key(part) part.to_s.tr(" ", "_").to_sym end # Array of note parts (Strings) to display in create & edit form, # in following (display) order. Used by views. # notes_template fields # orphaned fields (field in obs, but not in notes_template, not "Other") # "Other" # Example outputs: # ["Other"] # ["orphaned_part", "Other"] # ["template_1st_part", "template_2nd_part", "Other"] # ["template_1st_part", "template_2nd_part", "orphaned_part", "Other"] def form_notes_parts(user) return user.notes_template_parts + [other_notes_part] if notes.blank? user.notes_template_parts + notes_orphaned_parts(user) + [other_notes_part] end # Array of notes parts (Strings) which are # neither in the notes_template nor the caption for other notes def notes_orphaned_parts(user) return [] if notes.blank? # Change spaces to underscores in order to subtract template parts from # stringified keys because keys have underscores instead of spaces template_parts_underscored = user.notes_template_parts.each do |part| part.tr!(" ", "_") end notes.keys.map(&:to_s) - template_parts_underscored - [other_notes_part] end # notes as a String, captions (keys) without added formstting, # omitting "Other" if it's the only caption. # notes: {} ::=> "" # notes: { Other: "abc" } ::=> "abc" # notes: { cap: "red" } ::=> "cap: red" # notes: { cap: "red", stem: , Other: "x" } ::=> "cap: red # stem: # Other: x" def self.export_formatted(notes, markup = nil) return "" if notes.blank? return notes[other_notes_key] if notes.keys == [other_notes_key] result = notes.each_with_object(+"") do |(key, value), str| str << "#{markup}#{key}#{markup}: #{value}\n" end result.chomp end # wraps Class method with slightly different name def notes_export_formatted Observation.export_formatted(notes) end # Notes (or other hash) as a String, captions (keys) with added formstting, # omitting "Other" if it's the only caption. # # Used in views which display notes # notes: {} => "" # notes: { Other: "abc" } => "abc" # notes: { cap: "red" } => "+cap+: red" # notes: { cap: "red", stem: , other: "x" } => "+cap+: red # +stem+: # +Other+: x" def self.show_formatted(notes) export_formatted(notes, "+") end # wraps Class method with slightly different name def notes_show_formatted Observation.show_formatted(notes) end ############################################################################## # # :section: Namings and Votes # ############################################################################## # Name in plain text with id to make it unique, never nil. def unique_text_name string_with_id(name.real_search_name) end # Textile-marked-up name, never nil. def format_name name.observation_name end # Textile-marked-up name with id to make it unique, never nil. def unique_format_name string_with_id(name.observation_name) rescue StandardError "" end # Look up the corresponding instance in our namings association. If we are # careful to keep all the operations within the tree of assocations of the # observations, we should never need to reload anything. def lookup_naming(naming) # Disable cop; test suite chokes when the following "raise" # is re-written in "exploded" style (the Rubocop default) # rubocop:disable Style/RaiseArgs namings.find { |n| n == naming } || raise(ActiveRecord::RecordNotFound, "Observation doesn't have naming with ID=#{naming.id}") # rubocop:enable Style/RaiseArgs end # Dump out the sitatuation as the observation sees it. Useful for debugging # problems with reloading requirements. def dump_votes namings.map do |n| str = "#{n.id} #{n.name.real_search_name}: " if n.votes.empty? str += "no votes" else votes = n.votes.map do |v| "#{v.user.login}=#{v.value}" + (v.favorite ? "(*)" : "") end str += votes.join(", ") end str end.join("\n") end # Has anyone proposed a given Name yet for this observation? def name_been_proposed?(name) namings.count { |n| n.name == name }.positive? end # Has the owner voted on a given Naming? def owner_voted?(naming) !lookup_naming(naming).users_vote(user).nil? end # Has a given User owner voted on a given Naming? def user_voted?(naming, user) !lookup_naming(naming).users_vote(user).nil? end # Get the owner's Vote on a given Naming. def owners_vote(naming) lookup_naming(naming).users_vote(user) end # Get a given User's Vote on a given Naming. def users_vote(naming, user) lookup_naming(naming).users_vote(user) end # Disable method name cops to avoid breaking 3rd parties' use of API # Returns true if a given Naming has received one of the highest positive # votes from the owner of this observation. # Note: multiple namings can return true for a given observation. # This is used to display eyes next to Proposed Name on Observation page def owners_favorite?(naming) lookup_naming(naming).users_favorite?(user) end # Returns true if a given Naming has received one of the highest positive # votes from the given user (among namings for this observation). # Note: multiple namings can return true for a given user and observation. def users_favorite?(naming, user) lookup_naming(naming).users_favorite?(user) end # All of observation.user's votes on all Namings for this Observation # Used in Observation and in tests def owners_votes user_votes(user) end # All of a given User's votes on all Namings for this Observation def user_votes(user) namings.each_with_object([]) do |n, votes| v = n.users_vote(user) votes << v if v end end # Change User's Vote for this naming. Automatically recalculates the # consensus for the Observation in question if anything is changed. Returns # true if something was changed. def change_vote(naming, value, user = User.current) result = false naming = lookup_naming(naming) vote = naming.users_vote(user) value = value.to_f if value == Vote.delete_vote result = delete_vote(naming, vote, user) # If no existing vote, or if changing value. elsif !vote || (vote.value != value) result = true process_real_vote(naming, vote, value, user) end # Update consensus if anything changed. calc_consensus if result result end def logged_change_vote(naming, vote) reload change_vote(naming, vote.value, naming.user) log(:log_naming_created, name: naming.format_name) end # Try to guess which Naming is responsible for the consensus. This will # always return a Naming, no matter how ambiguous, unless there are no # namings. def consensus_naming matches = find_matches return nil if matches.empty? return matches.first if matches.length == 1 best_naming = matches.first best_value = matches.first.vote_cache matches.each do |naming| next unless naming.vote_cache > best_value best_naming = naming best_value = naming.vote_cache end best_naming end def calc_consensus reload calculator = Observation::ConsensusCalculator.new(namings) best, best_val = calculator.calc old = name if name != best || vote_cache != best_val self.name = best self.vote_cache = best_val save end announce_consensus_change(old, best) if best != old end # Admin tool that refreshes the vote cache for all observations with a vote. def self.refresh_vote_cache Observation.all.find_each(&:calc_consensus) end ############################################################################## # # :section: Preferred ID # ############################################################################## # Observation.user's unique preferred positive Name for this observation # Returns falsy if there's no unique preferred positive id # Used on show_observation page def owner_preference owner_uniq_favorite_name if owner_preference? end private def find_matches matches = namings.select { |n| n.name_id == name_id } return matches unless matches == [] && name && name.synonym_id namings.select { |n| name.synonyms.include?(n.name) } end def format_coordinate(value, positive_point, negative_point) return "#{-value.round(4)}°#{negative_point}" if value.negative? "#{value.round(4)}°#{positive_point}" end def delete_vote(naming, vote, user) return false unless vote naming.votes.delete(vote) find_new_favorite(user) if vote.favorite true end def find_new_favorite(user) max = max_positive_vote(user) return unless max.positive? user_votes(user).each do |v| next if v.value != max || v.favorite v.favorite = true v.save end end def max_positive_vote(user) max = 0 user_votes(user).each do |v| max = v.value if v.value > max end max end def process_real_vote(naming, vote, value, user) downgrade_totally_confident_votes(value, user) favorite = adjust_other_favorites(value, other_votes(vote, user)) if !vote naming.votes.create!( user: user, observation: self, value: value, favorite: favorite ) else vote.value = value vote.favorite = favorite vote.save end end def downgrade_totally_confident_votes(value, user) # First downgrade any existing 100% votes (if casting a 100% vote). v80 = Vote.next_best_vote return if value <= v80 user_votes(user).each do |v| next unless v.value > v80 v.value = v80 v.save end end def adjust_other_favorites(value, other_votes) favorite = false if value.positive? favorite = true other_votes.each do |v| if v.value > value favorite = false break end if (v.value < value) && v.favorite v.favorite = false v.save end end end # Will any other vote become a favorite? max_positive_value = (other_votes.map(&:value) + [value, 0]).max other_votes.each do |v| if (v.value >= max_positive_value) && !v.favorite v.favorite = true v.save end end favorite end def other_votes(vote, user) user_votes(user) - [vote] end # Does observation.user have a single preferred id for this observation? def owner_preference? owner_uniq_favorite_vote&.value&.>= Vote.owner_id_min_confidence end def owner_uniq_favorite_name favs = owner_favorite_votes favs[0].naming.name if favs.count == 1 end def owner_uniq_favorite_vote votes = owner_favorite_votes return votes.first if votes.count == 1 end def owner_favorite_votes votes.where(user_id: user_id, favorite: true) end public ############################################################################## # # :section: Images # ############################################################################## # Add Image to this Observation, making it the thumbnail if none set already. # Saves changes. Returns Image. def add_image(img) unless images.include?(img) images << img self.thumb_image = img unless thumb_image self.updated_at = Time.zone.now save notify_users(:added_image) reload end img end # Removes an Image from this Observation. If it's the thumbnail, changes # thumbnail to next available Image. Saves change to thumbnail, might save # change to Image. Returns Image. def remove_image(img) if images.include?(img) images.delete(img) if thumb_image_id == img.id self.thumb_image = images.empty? ? nil : images.first save end notify_users(:removed_image) end img end def has_backup_data? !thumb_image_id.nil? || species_lists.count.positive? || herbarium_records.count.positive? || specimen || notes.length >= 100 end ############################################################################## # # :section: Specimens # ############################################################################## def turn_off_specimen_if_no_more_records return unless specimen return unless collection_numbers.empty? return unless herbarium_records.empty? return unless sequences.empty? update(specimen: false) end # Return primary collector and their number if available, else just return # the observer's name. def collector_and_number return user.legal_name if collection_numbers.empty? collection_numbers.first.format_name end ############################################################################## # # :section: Callbacks # ############################################################################## # Callback that updates a User's contribution after adding an Observation to # a SpeciesList. def add_spl_callback(_obs) SiteData.update_contribution(:add, :species_list_entries, user_id) end # Callback that updates a User's contribution after removing an Observation # from a SpeciesList. def remove_spl_callback(_obs) SiteData.update_contribution(:del, :species_list_entries, user_id) end # Callback that logs an Observation's destruction on all of its # SpeciesList's. (Also saves list of Namings so they can be destroyed # by hand afterword without causing superfluous calc_consensuses.) def notify_species_lists # Tell all the species lists it belonged to. species_lists.each do |spl| spl.log(:log_observation_destroyed2, name: unique_format_name, touch: false) end # Save namings so we can delete them after it's dead. @old_namings = namings end # Callback that destroys an Observation's Naming's (carefully) after the # Observation is destroyed. def destroy_dependents @old_namings.each do |naming| naming.observation = nil # (tells it not to recalc consensus) naming.destroy end end # Callback that sends email notifications after save. def notify_users_after_change if !id || saved_change_to_when? || saved_change_to_where? || saved_change_to_location_id? || saved_change_to_notes? || saved_change_to_specimen? || saved_change_to_is_collection_location? || saved_change_to_thumb_image_id? notify_users(:change) end end # Callback that sends email notifications after destroy. def notify_users_before_destroy notify_users(:destroy) end # Send email notifications upon change to Observation. Several actions are # possible: # # added_image:: Image was added. # removed_image:: Image was removed. # change:: Other changes (e.g. to notes). # destroy:: Observation destroyed. # # obs.images << Image.create # obs.notify_users(:added_image) # def notify_users(action) sender = user recipients = [] # Send to people who have registered interest. interests.each do |interest| recipients.push(interest.user) if interest.state end # Tell masochists who want to know about all observation changes. User.where(email_observations_all: true).find_each do |user| recipients.push(user) end # Send notification to all except the person who triggered the change. recipients.uniq.each do |recipient| next if !recipient || recipient == sender case action when :destroy QueuedEmail::ObservationChange.destroy_observation(sender, recipient, self) when :change QueuedEmail::ObservationChange.change_observation(sender, recipient, self) else QueuedEmail::ObservationChange.change_images(sender, recipient, self, action) end end end # Send email notifications upon change to consensus. # # old_name = obs.name # obs.name = new_name # obs.announce_consensus_change(old_name, new_name) # def announce_consensus_change(old_name, new_name) log_consensus_change(old_name, new_name) # Change can trigger emails. owner = user sender = User.current recipients = [] # Tell owner of observation if they want. recipients.push(owner) if owner&.email_observations_consensus # Send to people who have registered interest. # Also remove everyone who has explicitly said they are NOT interested. interests.each do |interest| if interest.state recipients.push(interest.user) else recipients.delete(interest.user) end end # Send notification to all except the person who triggered the change. (recipients.uniq - [sender]).each do |recipient| QueuedEmail::ConsensusChange.create_email(sender, recipient, self, old_name, new_name) end end def log_consensus_change(old_name, new_name) if old_name log(:log_consensus_changed, old: old_name.display_name, new: new_name.display_name) else log(:log_consensus_created, name: new_name.display_name) end end # After defining a location, update any lists using old "where" name. def self.define_a_location(location, old_name) old_name = connection.quote(old_name) new_name = connection.quote(location.name) connection.update(%( UPDATE observations SET `where` = #{new_name}, location_id = #{location.id} WHERE `where` = #{old_name} )) end ############################################################################## protected include Validations validate :check_requirements, :check_when def check_requirements check_where check_user check_coordinates return unless @when_str begin Date.parse(@when_str) rescue ArgumentError if /^\d{4}-\d{1,2}-\d{1,2}$/.match?(@when_str) errors.add(:when_str, :runtime_date_invalid.t) else errors.add(:when_str, :runtime_date_should_be_yyyymmdd.t) end end end def check_where # Clean off leading/trailing whitespace from +where+. self.where = where.strip_squeeze if where self.where = nil if where == "" if where.to_s.blank? && !location_id self.location = Location.unknown # errors.add(:where, :validate_observation_where_missing.t) elsif where.to_s.size > 1024 errors.add(:where, :validate_observation_where_too_long.t) end end def check_user return if user || User.current errors.add(:user, :validate_observation_user_missing.t) end def check_coordinates check_latitude check_longitude check_altitude end def check_latitude if lat.blank? && long.present? || lat.present? && !Location.parse_latitude(lat) errors.add(:lat, :runtime_lat_long_error.t) end end def check_longitude if lat.present? && long.blank? || long.present? && !Location.parse_longitude(long) errors.add(:long, :runtime_lat_long_error.t) end end def check_altitude return unless alt.present? && !Location.parse_altitude(alt) # As of July 5, 2020 this statement appears to be unreachable # because .to_i returns 0 for unparsable strings. errors.add(:alt, :runtime_altitude_error.t) end def check_when self.when ||= Time.zone.now validate_when(self.when, errors) end end
#encoding: utf-8 class Observation < ActiveRecord::Base include ActsAsElasticModel include ObservationSearch include ActsAsUUIDable has_subscribers :to => { :comments => {:notification => "activity", :include_owner => true}, :identifications => {:notification => "activity", :include_owner => true} } notifies_subscribers_of :user, :notification => "created_observations", :queue_if => lambda { |observation| !observation.bulk_import } notifies_subscribers_of :public_places, :notification => "new_observations", :on => :create, :queue_if => lambda {|observation| observation.georeferenced? && !observation.bulk_import }, :if => lambda {|observation, place, subscription| return false unless observation.georeferenced? return true if subscription.taxon_id.blank? return false if observation.taxon.blank? observation.taxon.ancestor_ids.include?(subscription.taxon_id) } notifies_subscribers_of :taxon_and_ancestors, :notification => "new_observations", :queue_if => lambda {|observation| !observation.taxon_id.blank? && !observation.bulk_import}, :if => lambda {|observation, taxon, subscription| return true if observation.taxon_id == taxon.id return false if observation.taxon.blank? observation.taxon.ancestor_ids.include?(subscription.resource_id) } notifies_users :mentioned_users, on: :save, notification: "mention" acts_as_taggable acts_as_votable acts_as_spammable fields: [ :description ], comment_type: "item-description", automated: false include Ambidextrous # Set to true if you want to skip the expensive updating of all the user's # lists after saving. Useful if you're saving many observations at once and # you want to update lists in a batch attr_accessor :skip_refresh_lists, :skip_refresh_check_lists, :skip_identifications, :bulk_import, :skip_indexing # Set if you need to set the taxon from a name separate from the species # guess attr_accessor :taxon_name # licensing extras attr_accessor :make_license_default attr_accessor :make_licenses_same # coordinate system attr_accessor :coordinate_system attr_accessor :geo_x attr_accessor :geo_y attr_accessor :owners_identification_from_vision_requested def captive_flag @captive_flag ||= !quality_metrics.detect{|qm| qm.user_id == user_id && qm.metric == QualityMetric::WILD && !qm.agree? }.nil? end def captive_flag=(v) @captive_flag = v end attr_accessor :force_quality_metrics # custom project field errors attr_accessor :custom_field_errors MASS_ASSIGNABLE_ATTRIBUTES = [:make_license_default, :make_licenses_same] M_TO_OBSCURE_THREATENED_TAXA = 10000 OUT_OF_RANGE_BUFFER = 5000 # meters PLANETARY_RADIUS = 6370997.0 DEGREES_PER_RADIAN = 57.2958 FLOAT_REGEX = /[-+]?[0-9]*\.?[0-9]+/ COORDINATE_REGEX = /[^\d\,]*?(#{FLOAT_REGEX})[^\d\,]*?/ LAT_LON_SEPARATOR_REGEX = /[\,\s]\s*/ LAT_LON_REGEX = /#{COORDINATE_REGEX}#{LAT_LON_SEPARATOR_REGEX}#{COORDINATE_REGEX}/ COORDINATE_UNCERTAINTY_CELL_SIZE = 0.2 OPEN = "open" PRIVATE = "private" OBSCURED = "obscured" GEOPRIVACIES = [OBSCURED, PRIVATE] GEOPRIVACY_DESCRIPTIONS = { OPEN => :open_description, OBSCURED => :obscured_description, PRIVATE => :private_description } RESEARCH_GRADE = "research" CASUAL = "casual" NEEDS_ID = "needs_id" QUALITY_GRADES = [CASUAL, NEEDS_ID, RESEARCH_GRADE] COMMUNITY_TAXON_SCORE_CUTOFF = (2.0 / 3) LICENSES = [ ["CC0", :cc_0_name, :cc_0_description], ["CC-BY", :cc_by_name, :cc_by_description], ["CC-BY-NC", :cc_by_nc_name, :cc_by_nc_description], ["CC-BY-SA", :cc_by_sa_name, :cc_by_sa_description], ["CC-BY-ND", :cc_by_nd_name, :cc_by_nd_description], ["CC-BY-NC-SA",:cc_by_nc_sa_name, :cc_by_nc_sa_description], ["CC-BY-NC-ND", :cc_by_nc_nd_name, :cc_by_nc_nd_description] ] LICENSE_CODES = LICENSES.map{|row| row.first} LICENSES.each do |code, name, description| const_set code.gsub(/\-/, '_'), code end PREFERRED_LICENSES = [CC_BY, CC_BY_NC, CC0] CSV_COLUMNS = [ "id", "species_guess", "scientific_name", "common_name", "iconic_taxon_name", "taxon_id", "id_please", "num_identification_agreements", "num_identification_disagreements", "observed_on_string", "observed_on", "time_observed_at", "time_zone", "place_guess", "latitude", "longitude", "positional_accuracy", "private_place_guess", "private_latitude", "private_longitude", "private_positional_accuracy", "geoprivacy", "coordinates_obscured", "positioning_method", "positioning_device", "out_of_range", "user_id", "user_login", "created_at", "updated_at", "quality_grade", "license", "url", "image_url", "tag_list", "description", "oauth_application_id", "captive_cultivated" ] BASIC_COLUMNS = [ "id", "observed_on_string", "observed_on", "time_observed_at", "time_zone", "out_of_range", "user_id", "user_login", "created_at", "updated_at", "quality_grade", "license", "url", "image_url", "tag_list", "description", "id_please", "num_identification_agreements", "num_identification_disagreements", "captive_cultivated", "oauth_application_id" ] GEO_COLUMNS = [ "place_guess", "latitude", "longitude", "positional_accuracy", "private_place_guess", "private_latitude", "private_longitude", "private_positional_accuracy", "geoprivacy", "coordinates_obscured", "positioning_method", "positioning_device", "place_town_name", "place_county_name", "place_state_name", "place_country_name" ] TAXON_COLUMNS = [ "species_guess", "scientific_name", "common_name", "iconic_taxon_name", "taxon_id" ] EXTRA_TAXON_COLUMNS = %w( kingdom phylum subphylum superclass class subclass superorder order suborder superfamily family subfamily supertribe tribe subtribe genus genushybrid species hybrid subspecies variety form ).map{|r| "taxon_#{r}_name"}.compact ALL_EXPORT_COLUMNS = (CSV_COLUMNS + BASIC_COLUMNS + GEO_COLUMNS + TAXON_COLUMNS + EXTRA_TAXON_COLUMNS).uniq WGS84_PROJ4 = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" ALLOWED_DESCRIPTION_TAGS = %w(a abbr acronym b blockquote br cite em i img pre s small strike strong sub sup) preference :community_taxon, :boolean, :default => nil belongs_to :user, :counter_cache => true belongs_to :taxon belongs_to :community_taxon, :class_name => 'Taxon' belongs_to :iconic_taxon, :class_name => 'Taxon', :foreign_key => 'iconic_taxon_id' belongs_to :oauth_application belongs_to :site, :inverse_of => :observations has_many :observation_photos, -> { order("id asc") }, :dependent => :destroy, :inverse_of => :observation has_many :photos, :through => :observation_photos # note last_observation and first_observation on listed taxa will get reset # by CheckList.refresh_with_observation has_many :listed_taxa, :foreign_key => 'last_observation_id' has_many :first_listed_taxa, :class_name => "ListedTaxon", :foreign_key => 'first_observation_id' has_many :first_check_listed_taxa, -> { where("listed_taxa.place_id IS NOT NULL") }, :class_name => "ListedTaxon", :foreign_key => 'first_observation_id' has_many :comments, :as => :parent, :dependent => :destroy has_many :annotations, as: :resource, dependent: :destroy has_many :identifications, :dependent => :destroy has_many :project_observations, :dependent => :destroy has_many :project_observations_with_changes, -> { joins(:model_attribute_changes) }, class_name: "ProjectObservation" has_many :project_invitations, :dependent => :destroy has_many :projects, :through => :project_observations has_many :quality_metrics, :dependent => :destroy has_many :observation_field_values, -> { order("id asc") }, :dependent => :destroy, :inverse_of => :observation has_many :observation_fields, :through => :observation_field_values has_many :observation_links has_and_belongs_to_many :posts has_many :observation_sounds, :dependent => :destroy, :inverse_of => :observation has_many :sounds, :through => :observation_sounds has_many :observations_places, :dependent => :destroy has_many :observation_reviews, :dependent => :destroy has_many :confirmed_reviews, -> { where("observation_reviews.reviewed = true") }, class_name: "ObservationReview" FIELDS_TO_SEARCH_ON = %w(names tags description place) NON_ELASTIC_ATTRIBUTES = %w(establishment_means em) accepts_nested_attributes_for :observation_field_values, :allow_destroy => true, :reject_if => lambda { |attrs| attrs[:value].blank? } ## # Validations # validates_presence_of :user_id validate :must_be_in_the_past, :must_not_be_a_range validates_numericality_of :latitude, :allow_blank => true, :less_than_or_equal_to => 90, :greater_than_or_equal_to => -90 validates_numericality_of :longitude, :allow_blank => true, :less_than_or_equal_to => 180, :greater_than_or_equal_to => -180 validates_length_of :observed_on_string, :maximum => 256, :allow_blank => true validates_length_of :species_guess, :maximum => 256, :allow_blank => true validates_length_of :place_guess, :maximum => 256, :allow_blank => true validate do # This should be a validation on cached_tag_list, but acts_as_taggable seems # to set that after the validations run if tag_list.join(", ").length > 750 errors.add( :tag_list, "must be under 750 characters total, no more than 256 characters per tag" ) end end validate do unless coordinate_system.blank? begin RGeo::CoordSys::Proj4.new( coordinate_system ) rescue RGeo::Error::UnsupportedOperation errors.add( :coordinate_system, "is not a valid Proj4 string" ) end end end # See /config/locale/en.yml for field labels for `geo_x` and `geo_y` validates_numericality_of :geo_x, :allow_blank => true, :message => "should be a number" validates_numericality_of :geo_y, :allow_blank => true, :message => "should be a number" validates_presence_of :geo_x, :if => proc {|o| o.geo_y.present? } validates_presence_of :geo_y, :if => proc {|o| o.geo_x.present? } before_validation :munge_observed_on_with_chronic, :set_time_zone, :set_time_in_time_zone, :set_coordinates before_save :strip_species_guess, :set_taxon_from_species_guess, :set_taxon_from_taxon_name, :keep_old_taxon_id, :set_latlon_from_place_guess, :reset_private_coordinates_if_coordinates_changed, :normalize_geoprivacy, :set_license, :trim_user_agent, :update_identifications, :set_community_taxon_before_save, :set_taxon_from_community_taxon, :obscure_coordinates_for_geoprivacy, :obscure_coordinates_for_threatened_taxa, :set_geom_from_latlon, :set_place_guess_from_latlon, :obscure_place_guess, :set_iconic_taxon before_update :set_quality_grade after_save :refresh_lists, :refresh_check_lists, :update_out_of_range_later, :update_default_license, :update_all_licenses, :update_taxon_counter_caches, :update_quality_metrics, :update_public_positional_accuracy, :update_mappable, :set_captive, :update_observations_places, :set_taxon_photo, :create_observation_review after_create :set_uri before_destroy :keep_old_taxon_id after_destroy :refresh_lists_after_destroy, :refresh_check_lists, :update_taxon_counter_caches, :create_deleted_observation ## # Named scopes # # Area scopes # scope :in_bounding_box, lambda { |swlat, swlng, nelat, nelng| scope :in_bounding_box, lambda {|*args| swlat, swlng, nelat, nelng, options = args options ||= {} if options[:private] geom_col = "observations.private_geom" lat_col = "observations.private_latitude" lon_col = "observations.private_longitude" else geom_col = "observations.geom" lat_col = "observations.latitude" lon_col = "observations.longitude" end # resort to lat/lon cols for date-line spanning boxes if swlng.to_f > 0 && nelng.to_f < 0 where("#{lat_col} > ? AND #{lat_col} < ? AND (#{lon_col} > ? OR #{lon_col} < ?)", swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f) else where("ST_Intersects( ST_MakeBox2D(ST_Point(#{swlng.to_f}, #{swlat.to_f}), ST_Point(#{nelng.to_f}, #{nelat.to_f})), #{geom_col} )") end } do def distinct_taxon group("taxon_id").where("taxon_id IS NOT NULL").includes(:taxon) end end scope :in_place, lambda {|place| place_id = if place.is_a?(Place) place.id elsif place.to_i == 0 begin Place.find(place).try(&:id) rescue ActiveRecord::RecordNotFound -1 end else place.to_i end joins("JOIN place_geometries ON place_geometries.place_id = #{place_id}"). where("ST_Intersects(place_geometries.geom, observations.private_geom)") } # should use .select("DISTINCT observations.*") scope :in_places, lambda {|place_ids| joins("JOIN place_geometries ON place_geometries.place_id IN (#{place_ids.join(",")})"). where("ST_Intersects(place_geometries.geom, observations.private_geom)") } scope :in_taxons_range, lambda {|taxon| taxon_id = taxon.is_a?(Taxon) ? taxon.id : taxon.to_i joins("JOIN taxon_ranges ON taxon_ranges.taxon_id = #{taxon_id}"). where("ST_Intersects(taxon_ranges.geom, observations.private_geom)") } # possibly radius in kilometers scope :near_point, Proc.new { |lat, lng, radius| lat = lat.to_f lng = lng.to_f radius = radius.to_f radius = 10.0 if radius == 0 planetary_radius = PLANETARY_RADIUS / 1000 # km radius_degrees = radius / (2*Math::PI*planetary_radius) * 360.0 where("ST_DWithin(ST_Point(?,?), geom, ?)", lng.to_f, lat.to_f, radius_degrees) } # Has_property scopes scope :has_taxon, lambda { |*args| taxon_id = args.first if taxon_id.nil? where("taxon_id IS NOT NULL") else where("taxon_id IN (?)", taxon_id) end } scope :has_iconic_taxa, lambda { |iconic_taxon_ids| iconic_taxon_ids = [iconic_taxon_ids].flatten.map do |itid| if itid.is_a?(Taxon) itid.id elsif itid.to_i == 0 Taxon::ICONIC_TAXA_BY_NAME[itid].try(:id) else itid end end.uniq if iconic_taxon_ids.include?(nil) where( "observations.iconic_taxon_id IS NULL OR observations.iconic_taxon_id IN (?)", iconic_taxon_ids ) elsif !iconic_taxon_ids.empty? where("observations.iconic_taxon_id IN (?)", iconic_taxon_ids) end } scope :has_geo, -> { where("latitude IS NOT NULL AND longitude IS NOT NULL") } scope :has_id_please, -> { where( "quality_grade = ?", NEEDS_ID ) } scope :has_photos, -> { where("observation_photos_count > 0") } scope :has_sounds, -> { where("observation_sounds_count > 0") } scope :has_quality_grade, lambda {|quality_grade| quality_grades = quality_grade.to_s.split(',') & Observation::QUALITY_GRADES quality_grade = '' if quality_grades.size == 0 where("quality_grade IN (?)", quality_grades) } # Find observations by a taxon object. Querying on taxa columns forces # massive joins, it's a bit sluggish scope :of, lambda { |taxon| taxon = Taxon.find_by_id(taxon.to_i) unless taxon.is_a? Taxon return where("1 = 2") unless taxon c = taxon.descendant_conditions.to_sql c[0] = "taxa.id = #{taxon.id} OR #{c[0]}" joins(:taxon).where(c) } scope :with_identifications_of, lambda { |taxon| taxon = Taxon.find_by_id( taxon.to_i ) unless taxon.is_a? Taxon return where( "1 = 2" ) unless taxon c = taxon.descendant_conditions.to_sql c = c.gsub( '"taxa"."ancestry"', 'it."ancestry"' ) # I'm not using TaxonAncestor here b/c updating observations for changes # in conservation status uses this scope, and when a cons status changes, # we don't want to skip any taxa that have moved around the tree since the # last time the denormalizer ran select( "DISTINCT observations.*"). joins( :identifications ). joins( "JOIN taxa it ON it.id = identifications.taxon_id" ). where( "identifications.current AND (it.id = ? or #{c})", taxon.id ) } scope :at_or_below_rank, lambda {|rank| rank_level = Taxon::RANK_LEVELS[rank] joins(:taxon).where("taxa.rank_level <= ?", rank_level) } # Find observations by user scope :by, lambda {|user| if user.is_a?(User) || user.to_i > 0 where("observations.user_id = ?", user) else joins(:user).where("users.login = ?", user) end } # Order observations by date and time observed scope :latest, -> { order("observed_on DESC NULLS LAST, time_observed_at DESC NULLS LAST") } scope :recently_added, -> { order("observations.id DESC") } # TODO: Make this work for any SQL order statement, including multiple cols scope :order_by, lambda { |order_sql| pieces = order_sql.split order_by = pieces[0] order = pieces[1] || 'ASC' extra = [pieces[2..-1]].flatten.join(' ') extra = "NULLS LAST" if extra.blank? options = {} case order_by when 'observed_on' order "observed_on #{order} #{extra}, time_observed_at #{order} #{extra}" when 'created_at' order "observations.id #{order} #{extra}" when 'project' order("project_observations.id #{order} #{extra}").joins(:project_observations) when 'votes' order("cached_votes_total #{order} #{extra}") else order "#{order_by} #{order} #{extra}" end } def self.identifications(agreement) scope = Observation scope = scope.includes(:identifications) case agreement when 'most_agree' scope.where("num_identification_agreements > num_identification_disagreements") when 'some_agree' scope.where("num_identification_agreements > 0") when 'most_disagree' scope.where("num_identification_agreements < num_identification_disagreements") else scope end end # Time based named scopes scope :created_after, lambda { |time| where('created_at >= ?', time)} scope :created_before, lambda { |time| where('created_at <= ?', time)} scope :updated_after, lambda { |time| where('updated_at >= ?', time)} scope :updated_before, lambda { |time| where('updated_at <= ?', time)} scope :observed_after, lambda { |time| where('time_observed_at >= ?', time)} scope :observed_before, lambda { |time| where('time_observed_at <= ?', time)} scope :in_month, lambda {|month| where("EXTRACT(MONTH FROM observed_on) = ?", month)} scope :week, lambda {|week| where("EXTRACT(WEEK FROM observed_on) = ?", week)} scope :in_projects, lambda { |projects| # NOTE using :include seems to trigger an erroneous eager load of # observations that screws up sorting kueda 2011-07-22 joins(:project_observations).where("project_observations.project_id IN (?)", Project.slugs_to_ids(projects)) } scope :on, lambda {|date| where(Observation.conditions_for_date(:observed_on, date)) } scope :created_on, lambda {|date| where(Observation.conditions_for_date("observations.created_at", date))} scope :out_of_range, -> { where(:out_of_range => true) } scope :in_range, -> { where(:out_of_range => false) } scope :license, lambda {|license| if license == 'none' where("observations.license IS NULL") elsif LICENSE_CODES.include?(license) where(:license => license) else where("observations.license IS NOT NULL") end } scope :photo_license, lambda {|license| license = license.to_s scope = joins(:photos) license_number = Photo.license_number_for_code(license) if license == 'none' scope.where("photos.license = 0") elsif LICENSE_CODES.include?(license) scope.where("photos.license = ?", license_number) else scope.where("photos.license > 0") end } scope :has_observation_field, lambda{|*args| field, value = args join_name = "ofv_#{field.is_a?(ObservationField) ? field.id : field}" scope = joins("LEFT OUTER JOIN observation_field_values #{join_name} ON #{join_name}.observation_id = observations.id"). where("#{join_name}.observation_field_id = ?", field) scope = scope.where("#{join_name}.value = ?", value) unless value.blank? scope } scope :between_hours, lambda{|h1, h2| h1 = h1.to_i % 24 h2 = h2.to_i % 24 where("EXTRACT(hour FROM ((time_observed_at AT TIME ZONE 'GMT') AT TIME ZONE zic_time_zone)) BETWEEN ? AND ?", h1, h2) } scope :between_months, lambda{|m1, m2| m1 = m1.to_i % 12 m2 = m2.to_i % 12 if m1 > m2 where("EXTRACT(month FROM observed_on) >= ? OR EXTRACT(month FROM observed_on) <= ?", m1, m2) else where("EXTRACT(month FROM observed_on) BETWEEN ? AND ?", m1, m2) end } scope :between_dates, lambda{|d1, d2| t1 = (Time.parse(URI.unescape(d1.to_s)) rescue Time.now) t2 = (Time.parse(URI.unescape(d2.to_s)) rescue Time.now) if d1.to_s.index(':') where("time_observed_at BETWEEN ? AND ? OR (time_observed_at IS NULL AND observed_on BETWEEN ? AND ?)", t1, t2, t1.to_date, t2.to_date) else where("observed_on BETWEEN ? AND ?", t1, t2) end } scope :dbsearch, lambda {|*args| q, on = args q = sanitize_query(q) unless q.blank? case on when 'species_guess' where("observations.species_guess ILIKE", "%#{q}%") when 'description' where("observations.description ILIKE", "%#{q}%") when 'place_guess' where("observations.place_guess ILIKE", "%#{q}%") when 'tags' where("observations.cached_tag_list ILIKE", "%#{q}%") else where("observations.species_guess ILIKE ? OR observations.description ILIKE ? OR observations.cached_tag_list ILIKE ? OR observations.place_guess ILIKE ?", "%#{q}%", "%#{q}%", "%#{q}%", "%#{q}%") end } scope :reviewed_by, lambda { |users| joins(:observation_reviews).where("observation_reviews.user_id IN (?)", users) } scope :not_reviewed_by, lambda { |users| users = [ users ] unless users.is_a?(Array) user_ids = users.map{ |u| ElasticModel.id_or_object(u) } joins("LEFT JOIN observation_reviews ON (observations.id=observation_reviews.observation_id) AND observation_reviews.user_id IN (#{ user_ids.join(',') })"). where("observation_reviews.id IS NULL") } def self.near_place(place) place = (Place.find(place) rescue nil) unless place.is_a?(Place) if place.swlat Observation.in_bounding_box(place.swlat, place.swlng, place.nelat, place.nelng) else Observation.near_point(place.latitude, place.longitude) end end def self.preload_for_component(observations, logged_in) preloads = [ { user: :stored_preferences }, { taxon: { taxon_names: :place_taxon_names } }, :iconic_taxon, { photos: [ :flags, :user ] }, :stored_preferences, :flags, :quality_metrics ] # why do we need taxon_descriptions when logged in? if logged_in preloads.delete(:iconic_taxon) preloads << { iconic_taxon: :taxon_descriptions } preloads << :project_observations end Observation.preload_associations(observations, preloads) end # help_txt_for :species_guess, <<-DESC # Type a name for what you saw. It can be common or scientific, accurate # or just a placeholder. When you enter it, we'll try to look it up and find # the matching species of higher level taxon. # DESC # # instruction_for :place_guess, "Type the name of a place" # help_txt_for :place_guess, <<-DESC # Enter the name of a place and we'll try to find where it is. If we find # it, you can drag the map marker around to get more specific. # DESC def to_s "<Observation #{self.id}: #{to_plain_s}>" end def to_plain_s(options = {}) s = self.species_guess.blank? ? I18n.t(:something) : self.species_guess if options[:verb] s += options[:verb] == true ? I18n.t(:observed).downcase : " #{options[:verb]}" end unless self.place_guess.blank? || options[:no_place_guess] || coordinates_obscured? s += " #{I18n.t(:from, :default => 'from').downcase} #{self.place_guess}" end s += " #{I18n.t(:on_day)} #{I18n.l(self.observed_on, :format => :long)}" unless self.observed_on.blank? unless self.time_observed_at.blank? || options[:no_time] s += " #{I18n.t(:at)} #{self.time_observed_at_in_zone.to_s(:plain_time)}" end s += " #{I18n.t(:by).downcase} #{user.try_methods(:name, :login)}" unless options[:no_user] s.gsub(/\s+/, ' ') end def time_observed_at_utc time_observed_at.try(:utc) end def serializable_hash(opts = nil) # for some reason, in some cases options was still nil options = opts ? opts.clone : { } # making a deep copy of the options so they don't get modified # This was more effective than options.deep_dup if options[:include] && (options[:include].is_a?(Hash) || options[:include].is_a?(Array)) options[:include] = options[:include].marshal_copy end # don't use delete here, it will just remove the option for all # subsequent records in an array options[:include] = if options[:include].is_a?(Hash) options[:include].map{|k,v| {k => v}} else [options[:include]].flatten.compact end options[:methods] ||= [] options[:methods] += [:created_at_utc, :updated_at_utc, :time_observed_at_utc, :faves_count, :owners_identification_from_vision] viewer = options[:viewer] viewer_id = viewer.is_a?(User) ? viewer.id : viewer.to_i options[:except] ||= [] options[:except] += [:user_agent] if viewer_id != user_id && !options[:force_coordinate_visibility] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom, :private_geom, :private_place_guess] options[:methods] << :coordinates_obscured end options[:except] += [:cached_tag_list, :geom, :private_geom] options[:except].uniq! options[:methods].uniq! h = super(options) h.each do |k,v| h[k] = v.gsub(/<script.*script>/i, "") if v.is_a?(String) end h.force_utf8 end # # Return a time from observed_on and time_observed_at # def datetime @datetime ||= if observed_on && errors[:observed_on].blank? time_observed_at_in_zone || Time.new(observed_on.year, observed_on.month, observed_on.day, 0, 0, 0, timezone_offset) end end def timezone_object # returns nil if the time_zone has an invalid value (time_zone && ActiveSupport::TimeZone.new(time_zone)) || (zic_time_zone && ActiveSupport::TimeZone.new(zic_time_zone)) end def timezone_offset # returns nil if the time_zone has an invalid value (timezone_object || ActiveSupport::TimeZone.new("UTC")).formatted_offset end # Return time_observed_at in the observation's time zone def time_observed_at_in_zone if self.time_observed_at self.time_observed_at.in_time_zone(self.time_zone) end end # # Set all the time fields based on the contents of observed_on_string # def munge_observed_on_with_chronic if observed_on_string.blank? self.observed_on = nil self.time_observed_at = nil return true end date_string = observed_on_string.strip tz_abbrev_pattern = /\s\(?([A-Z]{3,})\)?$/ # ends with (PDT) tz_offset_pattern = /([+-]\d{4})$/ # contains -0800 tz_js_offset_pattern = /(GMT)?([+-]\d{4})/ # contains GMT-0800 tz_colon_offset_pattern = /(GMT|HSP)([+-]\d+:\d+)/ # contains (GMT-08:00) tz_failed_abbrev_pattern = /\(#{tz_colon_offset_pattern}\)/ if date_string =~ /#{tz_js_offset_pattern} #{tz_failed_abbrev_pattern}/ date_string = date_string.sub(tz_failed_abbrev_pattern, '').strip end # Rails timezone support doesn't seem to recognize this abbreviation, and # frankly I have no idea where ActiveSupport::TimeZone::CODES comes from. # In case that ever stops working or a less hackish solution is required, # check out https://gist.github.com/kueda/3e6f77f64f792b4f119f tz_abbrev = date_string[tz_abbrev_pattern, 1] tz_abbrev = 'CET' if tz_abbrev == 'CEST' if parsed_time_zone = ActiveSupport::TimeZone::CODES[tz_abbrev] date_string = observed_on_string.sub(tz_abbrev_pattern, '') date_string = date_string.sub(tz_js_offset_pattern, '').strip self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_offset_pattern, 1]) && (n = offset.to_f / 100) && (key = n == 0 ? 0 : n.floor + (n%n.floor)/0.6) && (parsed_time_zone = ActiveSupport::TimeZone[key]) date_string = date_string.sub(tz_offset_pattern, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_js_offset_pattern, 2]) && (n = offset.to_f / 100) && (key = n == 0 ? 0 : n.floor + (n%n.floor)/0.6) && (parsed_time_zone = ActiveSupport::TimeZone[key]) date_string = date_string.sub(tz_js_offset_pattern, '') date_string = date_string.sub(/^(Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s+/i, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_colon_offset_pattern, 2]) && (t = Time.parse(offset)) && (parsed_time_zone = ActiveSupport::TimeZone[t.hour+t.min/60.0]) date_string = date_string.sub(/#{tz_colon_offset_pattern}|#{tz_failed_abbrev_pattern}/, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? end date_string.sub!('T', ' ') if date_string =~ /\d{4}-\d{2}-\d{2}T/ date_string.sub!(/(\d{2}:\d{2}:\d{2})\.\d+/, '\\1') # strip leading month if present date_string.sub!(/^[A-z]{3} ([A-z]{3})/, '\\1') # strip paranthesized stuff date_string.gsub!(/\(.*\)/, '') # strip noon hour madness # this is due to a weird, weird bug in Chronic if date_string =~ /p\.?m\.?/i date_string.gsub!( /( 12:(\d\d)(:\d\d)?)\s+?p\.?m\.?/i, '\\1') elsif date_string =~ /a\.?m\.?/i date_string.gsub!( /( 12:(\d\d)(:\d\d)?)\s+?a\.?m\.?/i, '\\1') date_string.gsub!( / 12:/, " 00:" ) end # Set the time zone appropriately old_time_zone = Time.zone begin Time.zone = time_zone || user.try(:time_zone) rescue ArgumentError # Usually this would happen b/c of an invalid time zone being specified self.time_zone = time_zone_was || old_time_zone.name end Chronic.time_class = Time.zone begin # Start parsing... t = begin Chronic.parse(date_string) rescue ArgumentError nil end t = Chronic.parse(date_string.split[0..-2].join(' ')) unless t if !t && (locale = user.locale || I18n.locale) date_string = englishize_month_abbrevs_for_locale(date_string, locale) t = Chronic.parse(date_string) end if !t I18N_SUPPORTED_LOCALES.each do |locale| date_string = englishize_month_abbrevs_for_locale(date_string, locale) break if t = Chronic.parse(date_string) end end return true unless t # Re-interpret future dates as being in the past t = Chronic.parse(date_string, :context => :past) if t > Time.now self.observed_on = t.to_date if t # try to determine if the user specified a time by ask Chronic to return # a time range. Time ranges less than a day probably specified a time. if tspan = Chronic.parse(date_string, :context => :past, :guess => false) # If tspan is less than a day and the string wasn't 'today', set time if tspan.width < 86400 && date_string.strip.downcase != 'today' self.time_observed_at = t else self.time_observed_at = nil end end rescue RuntimeError, ArgumentError # ignore these, just don't set the date return true end # don't store relative observed_on_strings, or they will change # every time you save an observation! if date_string =~ /today|yesterday|ago|last|this|now|monday|tuesday|wednesday|thursday|friday|saturday|sunday/i self.observed_on_string = self.observed_on.to_s if self.time_observed_at self.observed_on_string = self.time_observed_at.strftime("%Y-%m-%d %H:%M:%S") end end # Set the time zone back the way it was Time.zone = old_time_zone true end def englishize_month_abbrevs_for_locale(date_string, locale) # HACK attempt to translate month abbreviations into English. # A much better approach would be add Spanish and any other supported # locales to https://github.com/olojac/chronic-l10n and switch to the # 'localized' branch of Chronic, which seems to clear our test suite. return date_string if locale.to_s =~ /^en/ return date_string unless I18N_SUPPORTED_LOCALES.include?(locale) I18n.t('date.abbr_month_names', :locale => :en).each_with_index do |en_month_name,i| next if i == 0 localized_month_name = I18n.t('date.abbr_month_names', :locale => locale)[i] next if localized_month_name == en_month_name date_string.gsub!(/#{localized_month_name}([\s\,])/, "#{en_month_name}\\1") end date_string end # # Adds, updates, or destroys the identification corresponding to the taxon # the user selected. # def update_identifications return true if @skip_identifications return true unless taxon_id_changed? owners_ident = identifications.where(:user_id => user_id).order("id asc").last # If there's a taxon we need to make sure the owner's ident agrees if taxon && (owners_ident.blank? || owners_ident.taxon_id != taxon.id) # If the owner doesn't have an identification for this obs, make one attrs = { user: user, taxon: taxon, observation: self, skip_observation: true, vision: owners_identification_from_vision_requested } owners_ident = if new_record? self.identifications.build(attrs) else self.identifications.create(attrs) end elsif taxon.blank? && owners_ident && owners_ident.current? if identifications.where(:user_id => user_id).count > 1 owners_ident.update_attributes(:current => false, :skip_observation => true) else owners_ident.skip_observation = true owners_ident.destroy end end update_stats(:skip_save => true) true end # Override nested obs field values attributes setter to ensure that field # values get added even if existing field values have been destroyed (e.g. # two windows). Also updating existing OFV of same OF name if id not # specified def observation_field_values_attributes=(attributes) attr_array = attributes.is_a?(Hash) ? attributes.values : attributes attr_array.each_with_index do |v,i| if v["id"].blank? existing = observation_field_values.where(:observation_field_id => v["observation_field_id"]).first unless v["observation_field_id"].blank? existing ||= observation_field_values.joins(:observation_fields).where("lower(observation_fields.name) = ?", v["name"]).first unless v["name"].blank? attr_array[i]["id"] = existing.id if existing elsif !ObservationFieldValue.where("id = ?", v["id"]).exists? attr_array[i].delete("id") end end assign_nested_attributes_for_collection_association(:observation_field_values, attr_array) end # # Update the user's lists with changes to this observation's taxon # # If the observation is the last_observation in any of the user's lists, # then the last_observation should be reset to another observation. # def refresh_lists return true if skip_refresh_lists return true unless taxon_id_changed? || quality_grade_changed? # Update the observation's current taxon and/or a previous one that was # just removed/changed target_taxa = [ taxon, Taxon.find_by_id(@old_observation_taxon_id) ].compact.uniq # Don't refresh all the lists if nothing changed return true if target_taxa.empty? # Refreh the ProjectLists ProjectList.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "ProjectList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) # Don't refresh LifeLists and Lists if only quality grade has changed return true unless taxon_id_changed? List.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "List::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :skip_subclasses => true) LifeList.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "LifeList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) # Reset the instance var so it doesn't linger around @old_observation_taxon_id = nil true end def refresh_check_lists return true if skip_refresh_check_lists refresh_needed = (georeferenced? || was_georeferenced?) && (taxon_id || taxon_id_was) && (quality_grade_changed? || taxon_id_changed? || latitude_changed? || longitude_changed? || observed_on_changed?) return true unless refresh_needed CheckList.delay(priority: INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "CheckList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_changed? ? taxon_id_was : nil, :latitude_was => (latitude_changed? || longitude_changed?) ? latitude_was : nil, :longitude_was => (latitude_changed? || longitude_changed?) ? longitude_was : nil, :new => id_was.blank?) true end # Because it has to be slightly different, in that the taxon of a destroyed # obs shouldn't be removed by default from life lists (maybe you've seen it # in the past, but you don't have any other obs), but those listed_taxa of # this taxon should have their last_observation reset. # def refresh_lists_after_destroy return true if skip_refresh_lists return true unless taxon List.delay(:priority => USER_INTEGRITY_PRIORITY).refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :skip_subclasses => true) LifeList.delay(:priority => USER_INTEGRITY_PRIORITY).refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) true end # # Preserve the old taxon id if the taxon has changed so we know to update # that taxon in the user's lists after_save # def keep_old_taxon_id @old_observation_taxon_id = taxon_id_was if taxon_id_changed? true end # # Set the iconic taxon if it hasn't been set # def set_iconic_taxon if taxon self.iconic_taxon_id ||= taxon.iconic_taxon_id else self.iconic_taxon_id = nil end true end # # Trim whitespace around species guess # def strip_species_guess self.species_guess.to_s.strip! unless species_guess.blank? true end # # Set the time_zone of this observation if not already set # def set_time_zone self.time_zone = nil if time_zone.blank? self.time_zone ||= user.time_zone if user && !user.time_zone.blank? self.time_zone ||= Time.zone.try(:name) unless time_observed_at.blank? self.time_zone ||= 'UTC' self.zic_time_zone = ActiveSupport::TimeZone::MAPPING[time_zone] unless time_zone.blank? true end # # Force time_observed_at into the time zone # def set_time_in_time_zone return true if time_observed_at.blank? || time_zone.blank? return true unless time_observed_at_changed? || time_zone_changed? # Render the time as a string time_s = time_observed_at_before_type_cast unless time_s.is_a? String time_s = time_observed_at_before_type_cast.strftime("%Y-%m-%d %H:%M:%S") end # Get the time zone offset as a string and append it offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false) time_s += " #{offset_s}" self.time_observed_at = Time.parse(time_s) true end def set_captive update_column(:captive, captive_cultivated) end def lsid "lsid:#{URI.parse(CONFIG.site_url).host}:observations:#{id}" end def component_cache_key(options = {}) Observation.component_cache_key(id, options) end def self.component_cache_key(id, options = {}) key = "obs_comp_#{id}" key += "_"+options.sort.map{|k,v| "#{k}-#{v}"}.join('_') unless options.blank? key end def num_identifications_by_others num_identification_agreements + num_identification_disagreements end def appropriate? return false if flagged? return false if observation_photos_count > 0 && photos.detect{ |p| p.flagged? } true end def georeferenced? (!latitude.nil? && !longitude.nil?) || (!private_latitude.nil? && !private_longitude.nil?) end def was_georeferenced? (latitude_was && longitude_was) || (private_latitude_was && private_longitude_was) end def quality_metric_score(metric) quality_metrics.all unless quality_metrics.loaded? metrics = quality_metrics.select{|qm| qm.metric == metric} return nil if metrics.blank? metrics.select{|qm| qm.agree?}.size.to_f / metrics.size end def community_supported_id? if community_taxon_rejected? num_identification_agreements.to_i > 0 && num_identification_agreements > num_identification_disagreements else !community_taxon_id.blank? && taxon_id == community_taxon_id end end def quality_metrics_pass? QualityMetric::METRICS.each do |metric| return false unless passes_quality_metric?(metric) end true end def passes_quality_metric?(metric) score = quality_metric_score(metric) score.blank? || score >= 0.5 end def research_grade_candidate? return false if human? return false unless georeferenced? return false unless quality_metrics_pass? return false unless observed_on? return false unless (photos? || sounds?) return false unless appropriate? true end def human? t = community_taxon || taxon t && t.name =~ /^Homo / end def research_grade? quality_grade == RESEARCH_GRADE end def verifiable? [ NEEDS_ID, RESEARCH_GRADE ].include?(quality_grade) end def photos? return true if photos && photos.any? observation_photos.loaded? ? ! observation_photos.empty? : observation_photos.exists? end def sounds? sounds.loaded? ? ! sounds.empty? : sounds.exists? end def set_quality_grade(options = {}) self.quality_grade = get_quality_grade true end def self.set_quality_grade(id) return unless observation = Observation.find_by_id(id) observation.set_quality_grade(:force => true) observation.save if observation.quality_grade_changed? CheckList.delay(priority: INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "CheckList::refresh_with_observation": id }). refresh_with_observation(observation.id, :taxon_id => observation.taxon_id) end observation.quality_grade end def get_quality_grade if !research_grade_candidate? CASUAL elsif voted_in_to_needs_id? NEEDS_ID elsif community_taxon_id && owners_identification && owners_identification.maverick? && community_taxon_rejected? CASUAL elsif community_taxon_at_species_or_lower? RESEARCH_GRADE elsif voted_out_of_needs_id? if community_taxon_below_family? RESEARCH_GRADE else CASUAL end else NEEDS_ID end end def coordinates_obscured? !private_latitude.blank? || !private_longitude.blank? end alias :coordinates_obscured :coordinates_obscured? def coordinates_private? latitude.blank? && longitude.blank? && private_latitude? && private_longitude? end def coordinates_changed? latitude_changed? || longitude_changed? || private_latitude_changed? || private_longitude_changed? end def geoprivacy_private? geoprivacy == PRIVATE end def geoprivacy_obscured? geoprivacy == OBSCURED end def coordinates_viewable_by?(viewer) return true unless coordinates_obscured? return false if viewer.blank? viewer = User.find_by_id(viewer) unless viewer.is_a?(User) return false unless viewer return true if user_id == viewer.id project_ids = if projects.loaded? projects.map(&:id) else project_observations.map(&:project_id) end viewer.project_users.select{|pu| project_ids.include?(pu.project_id) && ProjectUser::ROLES.include?(pu.role)}.each do |pu| if project_observations.detect{|po| po.project_id == pu.project_id && po.prefers_curator_coordinate_access?} return true end end false end def reset_private_coordinates_if_coordinates_changed if (latitude_changed? || longitude_changed?) self.private_latitude = nil self.private_longitude = nil end true end def normalize_geoprivacy self.geoprivacy = nil unless GEOPRIVACIES.include?(geoprivacy) true end def obscure_coordinates_for_geoprivacy self.geoprivacy = nil if geoprivacy.blank? return true if geoprivacy.blank? && !geoprivacy_changed? case geoprivacy when PRIVATE obscure_coordinates unless coordinates_obscured? self.latitude, self.longitude = [nil, nil] when OBSCURED obscure_coordinates unless coordinates_obscured? else unobscure_coordinates end true end def obscure_coordinates_for_threatened_taxa lat = private_latitude.blank? ? latitude : private_latitude lon = private_longitude.blank? ? longitude : private_longitude t = taxon || community_taxon target_taxon_ids = [[t.try(:id)] + identifications.current.pluck(:taxon_id)].flatten.compact.uniq taxon_geoprivacy = Taxon.max_geoprivacy( target_taxon_ids, latitude: lat, longitude: lon ) case taxon_geoprivacy when OBSCURED obscure_coordinates unless coordinates_obscured? when PRIVATE unless coordinates_private? obscure_coordinates self.latitude, self.longitude = [nil, nil] end else unobscure_coordinates end true end def obscure_coordinates return if latitude.blank? || longitude.blank? if latitude_changed? || longitude_changed? self.private_latitude = latitude self.private_longitude = longitude else self.private_latitude ||= latitude self.private_longitude ||= longitude end self.latitude, self.longitude = Observation.random_neighbor_lat_lon( private_latitude, private_longitude ) set_geom_from_latlon true end def obscure_place_guess public_place_guess = Observation.place_guess_from_latlon( private_latitude, private_longitude, acc: calculate_public_positional_accuracy, user: user ) if coordinates_private? if place_guess_changed? && place_guess == private_place_guess self.place_guess = nil elsif !place_guess.blank? && place_guess != public_place_guess self.private_place_guess = place_guess self.place_guess = nil end elsif coordinates_obscured? if place_guess_changed? if place_guess == private_place_guess self.place_guess = public_place_guess else self.private_place_guess = place_guess self.place_guess = public_place_guess end elsif private_latitude_changed? && private_place_guess.blank? self.private_place_guess = place_guess self.place_guess = public_place_guess end else unless place_guess_changed? || private_place_guess.blank? self.place_guess = private_place_guess end self.private_place_guess = nil end true end def lat_lon_in_place_guess? !place_guess.blank? && place_guess !~ /[a-cf-mo-rt-vx-z]/i && !place_guess.scan(COORDINATE_REGEX).blank? end def unobscure_coordinates return unless coordinates_obscured? || coordinates_private? return unless geoprivacy.blank? self.latitude = private_latitude self.longitude = private_longitude self.private_latitude = nil self.private_longitude = nil set_geom_from_latlon end def iconic_taxon_name return nil if iconic_taxon_id.blank? if Taxon::ICONIC_TAXA_BY_ID.blank? association(:iconic_taxon).loaded? ? iconic_taxon.try(:name) : Taxon.select("id, name").where(:id => iconic_taxon_id).first.try(:name) else Taxon::ICONIC_TAXA_BY_ID[iconic_taxon_id].try(:name) end end def captive_cultivated? !passes_quality_metric?(QualityMetric::WILD) end alias :captive_cultivated :captive_cultivated? def reviewed_by?(viewer) viewer = User.find_by_id(viewer) unless viewer.is_a?(User) return false unless viewer ObservationReview.where(observation_id: id, user_id: viewer.id, reviewed: true).exists? end ##### Community Taxon ######################################################### def get_community_taxon(options = {}) return if (identifications.loaded? ? identifications.select(&:current?).select(&:persisted?).uniq : identifications.current).count <= 1 node = community_taxon_nodes(options).select{|n| n[:cumulative_count] > 1}.sort_by do |n| [ n[:score].to_f > COMMUNITY_TAXON_SCORE_CUTOFF ? 1 : 0, # only consider taxa with a score above the cutoff 0 - (n[:taxon].rank_level || 500) # within that set, sort by rank level, i.e. choose lowest rank ] end.last # # Visualizing this stuff is pretty useful for testing, so please leave this in # puts # width = 15 # %w(taxon_id taxon_name cc dc cdc score).each do |c| # print c.ljust(width) # end # puts # community_taxon_nodes.sort_by{|n| n[:taxon].ancestry || ""}.each do |n| # print n[:taxon].id.to_s.ljust(width) # print n[:taxon].name.to_s.ljust(width) # print n[:cumulative_count].to_s.ljust(width) # print n[:disagreement_count].to_s.ljust(width) # print n[:conservative_disagreement_count].to_s.ljust(width) # print n[:score].to_s.ljust(width) # puts # end return unless node node[:taxon] end def community_taxon_nodes(options = {}) return @community_taxon_nodes if @community_taxon_nodes && !options[:force] # work on current identifications ids = identifications.loaded? ? identifications.select(&:current?).select(&:persisted?).uniq : identifications.current.includes(:taxon) working_idents = ids.sort_by(&:id) # load all ancestor taxa implied by identifications ancestor_ids = working_idents.map{|i| i.taxon.ancestor_ids}.flatten.uniq.compact taxon_ids = working_idents.map{|i| [i.taxon_id] + i.taxon.ancestor_ids}.flatten.uniq.compact taxa = Taxon.where("id IN (?)", taxon_ids) taxon_ids_count = taxon_ids.size @community_taxon_nodes = taxa.map do |id_taxon| # count all identifications of this taxon and its descendants cumulative_count = working_idents.select{|i| i.taxon.self_and_ancestor_ids.include?(id_taxon.id)}.size # count identifications of taxa that are outside of this taxon's subtree # (i.e. absolute disagreements) disagreement_count = working_idents.reject{|i| id_taxon.self_and_ancestor_ids.include?(i.taxon_id) || i.taxon.self_and_ancestor_ids.include?(id_taxon.id) }.size # count identifications of taxa that are ancestors of this taxon but # were made after the first identification of this taxon (i.e. # conservative disagreements). Note that for genus1 > species1, an # identification of species1 implies an identification of genus1 first_ident = working_idents.detect{|i| i.taxon.self_and_ancestor_ids.include?(id_taxon.id)} conservative_disagreement_count = if first_ident working_idents.select{|i| i.id > first_ident.id && id_taxon.ancestor_ids.include?(i.taxon_id)}.size else 0 end { :taxon => id_taxon, :ident_count => working_idents.select{|i| i.taxon_id == id_taxon.id}.size, :cumulative_count => cumulative_count, :disagreement_count => disagreement_count, :conservative_disagreement_count => conservative_disagreement_count, :score => cumulative_count.to_f / (cumulative_count + disagreement_count + conservative_disagreement_count) } end end def set_community_taxon(options = {}) community_taxon = get_community_taxon(options) self.community_taxon = community_taxon if self.changed? && !community_taxon.nil? && !community_taxon_rejected? self.species_guess = (community_taxon.common_name.try(:name) || community_taxon.name) end true end def set_community_taxon_before_save set_community_taxon(:force => true) if prefers_community_taxon_changed? || taxon_id_changed? true end def self.set_community_taxa(options = {}) scope = Observation.includes({:identifications => [:taxon]}, :user) scope = scope.where(options[:where]) if options[:where] scope = scope.by(options[:user]) unless options[:user].blank? scope = scope.of(options[:taxon]) unless options[:taxon].blank? scope = scope.in_place(options[:place]) unless options[:place].blank? scope = scope.in_projects([options[:project]]) unless options[:project].blank? start_time = Time.now logger = options[:logger] || Rails.logger logger.info "[INFO #{Time.now}] Starting Observation.set_community_taxon, options: #{options.inspect}" scope.find_each do |o| next unless o.identifications.size > 1 o.set_community_taxon unless o.save logger.error "[ERROR #{Time.now}] Failed to set community taxon for #{o}: #{o.errors.full_messages.to_sentence}" end end logger.info "[INFO #{Time.now}] Finished Observation.set_community_taxon in #{Time.now - start_time}s, options: #{options.inspect}" end def community_taxon_rejected? return false if prefers_community_taxon == true (prefers_community_taxon == false || user.prefers_community_taxa == false) end def set_taxon_from_community_taxon return if identifications.count == 0 && taxon_id # explicitly opted in self.taxon_id = if prefers_community_taxon community_taxon_id || owners_identification.try(:taxon_id) || others_identifications.last.try(:taxon_id) # obs opted out or user opted out elsif prefers_community_taxon == false || !user.prefers_community_taxa? owners_identification.try(:taxon_id) # implicitly opted in else community_taxon_id || owners_identification.try(:taxon_id) || others_identifications.last.try(:taxon_id) end if taxon_id_changed? && (community_taxon_id_changed? || prefers_community_taxon_changed?) update_stats(:skip_save => true) self.species_guess = if taxon taxon.common_name.try(:name) || taxon.name else nil end end true end def self.reassess_coordinates_for_observations_of( taxon, options = {} ) batch_size = 500 scope = Observation.with_identifications_of( taxon ) scope.find_in_batches(batch_size: batch_size) do |batch| if options[:place] # using Elasticsearch for place filtering so we don't # get bogged down by huge geometries in Postgresql es_params = { id: batch, place_id: options[:place], per_page: batch_size } reassess_coordinates_of( Observation.page_of_results( es_params ) ) else reassess_coordinates_of( batch ) end end end def self.reassess_coordinates_of( observations ) observations.each do |o| o.obscure_coordinates_for_threatened_taxa o.obscure_place_guess next unless o.coordinates_changed? || o.place_guess_changed? Observation.where( id: o.id ).update_all( latitude: o.latitude, longitude: o.longitude, private_latitude: o.private_latitude, private_longitude: o.private_longitude, geom: o.geom, private_geom: o.private_geom, place_guess: o.place_guess, private_place_guess: o.private_place_guess ) end Observation.elastic_index!( ids: observations.map(&:id) ) end def self.find_observations_of(taxon) Observation.joins(:taxon). where("observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon).find_each do |o| yield(o) end end ##### Validations ######################################################### # # Make sure the observation is not in the future. # def must_be_in_the_past return true if observed_on.blank? if observed_on > Time.now.in_time_zone(time_zone || user.time_zone).to_date errors.add(:observed_on, "can't be in the future") end true end # # Make sure the observation resolves to a single day. Right now we don't # store ambiguity... # def must_not_be_a_range return if observed_on_string.blank? is_a_range = false begin if tspan = Chronic.parse(observed_on_string, :context => :past, :guess => false) is_a_range = true if tspan.width.seconds > 1.day.seconds end rescue RuntimeError, ArgumentError # ignore parse errors, assume they're not spans return end # Special case: dates like '2004', which ordinarily resolve to today at # 8:04pm observed_on_int = observed_on_string.gsub(/[^\d]/, '').to_i if observed_on_int > 1900 && observed_on_int <= Date.today.year is_a_range = true end if is_a_range errors.add(:observed_on, "must be a single day, not a range") end end def set_taxon_from_taxon_name return true if self.taxon_name.blank? return true if taxon_id self.taxon_id = single_taxon_id_for_name(self.taxon_name) true end def set_taxon_from_species_guess return true if species_guess =~ /\?$/ return true unless species_guess_changed? && taxon_id.blank? return true if species_guess.blank? self.taxon_id = single_taxon_id_for_name(species_guess) true end def single_taxon_for_name(name) Taxon.single_taxon_for_name(name) end def single_taxon_id_for_name(name) Taxon.single_taxon_for_name(name).try(:id) end def set_latlon_from_place_guess return true unless latitude.blank? && longitude.blank? return true if place_guess.blank? return true if place_guess =~ /[a-cf-mo-rt-vx-z]/i # ignore anything with word chars other than NSEW return true unless place_guess.strip =~ /[.+,\s.+]/ # ignore anything without a legit separator matches = place_guess.strip.scan(COORDINATE_REGEX).flatten return true if matches.blank? case matches.size when 2 # decimal degrees self.latitude, self.longitude = matches when 4 # decimal minutes self.latitude = matches[0].to_i + matches[1].to_f/60.0 self.longitude = matches[3].to_i + matches[4].to_f/60.0 when 6 # degrees / minutes / seconds self.latitude = matches[0].to_i + matches[1].to_i/60.0 + matches[2].to_f/60/60 self.longitude = matches[3].to_i + matches[4].to_i/60.0 + matches[5].to_f/60/60 end self.latitude *= -1 if latitude.to_f > 0 && place_guess =~ /s/i self.longitude *= -1 if longitude.to_f > 0 && place_guess =~ /w/i true end def set_geom_from_latlon(options = {}) if longitude.blank? || latitude.blank? self.geom = nil elsif options[:force] || longitude_changed? || latitude_changed? self.geom = "POINT(#{longitude} #{latitude})" end if private_latitude && private_longitude self.private_geom = "POINT(#{private_longitude} #{private_latitude})" elsif self.geom self.private_geom = self.geom else self.private_geom = nil end true end def self.place_guess_from_latlon( lat, lon, options = {} ) sys_places = Observation.system_places_for_latlon( lat, lon, options ) return if sys_places.blank? sys_places_codes = sys_places.map(&:code) user = options[:user] locale = options[:locale] locale ||= user.locale if user locale ||= I18n.locale first_name = if sys_places[0].admin_level == Place::COUNTY_LEVEL && sys_places_codes.include?( "US" ) "#{sys_places[0].name} County" else I18n.t( sys_places[0].name, locale: locale, default: sys_places[0].name ) end remaining_names = sys_places[1..-1].map do |p| if p.admin_level == Place::COUNTY_LEVEL && sys_places_codes.include?( "US" ) "#{p.name} County" else p.code.blank? ? I18n.t( p.name, locale: locale, default: p.name ) : p.code end end [first_name, remaining_names].flatten.join( ", " ) end def set_place_guess_from_latlon return true unless place_guess.blank? return true if coordinates_private? if guess = Observation.place_guess_from_latlon( latitude, longitude, { acc: calculate_public_positional_accuracy, user: user } ) self.place_guess = guess end true end def set_license return true if license_changed? && license.blank? self.license ||= user.preferred_observation_license self.license = nil unless LICENSE_CODES.include?(license) true end def trim_user_agent return true if user_agent.blank? self.user_agent = user_agent[0..254] true end def update_out_of_range_later if taxon_id_changed? && taxon.blank? update_out_of_range elsif latitude_changed? || private_latitude_changed? || taxon_id_changed? delay(:priority => USER_INTEGRITY_PRIORITY).update_out_of_range end true end def update_out_of_range set_out_of_range Observation.where(id: id).update_all(out_of_range: out_of_range) end def set_out_of_range if taxon_id.blank? || !georeferenced? || !TaxonRange.exists?(["taxon_id = ?", taxon_id]) self.out_of_range = nil return end # buffer the point to accomodate simplified or slightly inaccurate ranges buffer_degrees = OUT_OF_RANGE_BUFFER / (2*Math::PI*Observation::PLANETARY_RADIUS) * 360.0 self.out_of_range = if coordinates_obscured? TaxonRange.where( "taxon_ranges.taxon_id = ? AND ST_Distance(taxon_ranges.geom, ST_Point(?,?)) > ?", taxon_id, private_longitude, private_latitude, buffer_degrees ).exists? else TaxonRange. from("taxon_ranges, observations"). where( "taxon_ranges.taxon_id = ? AND observations.id = ? AND ST_Distance(taxon_ranges.geom, observations.geom) > ?", taxon_id, id, buffer_degrees ).count > 0 end end def set_uri if uri.blank? Observation.where(id: id).update_all(uri: FakeView.observation_url(id)) end true end def update_default_license return true unless make_license_default.yesish? user.update_attribute(:preferred_observation_license, license) true end def update_all_licenses return true unless make_licenses_same.yesish? Observation.where(user_id: user_id).update_all(license: license) user.index_observations_later true end def update_taxon_counter_caches return true unless destroyed? || taxon_id_changed? taxon_ids = [taxon_id_was, taxon_id].compact.uniq unless taxon_ids.blank? Taxon.delay(:priority => INTEGRITY_PRIORITY).update_observation_counts(:taxon_ids => taxon_ids) end true end def update_quality_metrics if captive_flag.yesish? QualityMetric.vote( user, self, QualityMetric::WILD, false ) elsif captive_flag.noish? && force_quality_metrics QualityMetric.vote( user, self, QualityMetric::WILD, true ) elsif captive_flag.noish? && ( qm = quality_metrics.detect{|m| m.user_id == user_id && m.metric == QualityMetric::WILD} ) qm.update_attributes( agree: true) elsif force_quality_metrics && ( qm = quality_metrics.detect{|m| m.user_id == user_id && m.metric == QualityMetric::WILD} ) qm.destroy end system_captive_vote = quality_metrics.detect{ |m| m.user_id.blank? && m.metric == QualityMetric::WILD } if probably_captive? QualityMetric.vote( nil, self, QualityMetric::WILD, false ) unless system_captive_vote elsif system_captive_vote system_captive_vote.destroy end true end def update_attributes(attributes) # hack around a weird android bug attributes.delete(:iconic_taxon_name) # MASS_ASSIGNABLE_ATTRIBUTES.each do |a| # self.send("#{a}=", attributes.delete(a.to_s)) if attributes.has_key?(a.to_s) # self.send("#{a}=", attributes.delete(a)) if attributes.has_key?(a) # end super(attributes) end def license_name return nil if license.blank? s = "Creative Commons " s += LICENSES.detect{|row| row.first == license}.try(:[], 1).to_s s end # I'm not psyched about having this stuff here, but it makes generating # more compact JSON a lot easier. include ObservationsHelper include ActionView::Helpers::SanitizeHelper include ActionView::Helpers::TextHelper # include ActionController::UrlWriter include Rails.application.routes.url_helpers def image_url(options = {}) url = observation_image_url(self, options.merge(size: "medium")) url =~ /^http/ ? url : nil end def obs_image_url image_url end def short_description short_observation_description(self) end def scientific_name taxon.scientific_name.name if taxon && taxon.scientific_name end def common_name taxon.common_name.name if taxon && taxon.common_name end def url uri end def user_login user.login end def update_stats(options = {}) idents = [self.identifications.to_a, options[:include]].flatten.compact.uniq current_idents = idents.select(&:current?) if taxon_id.blank? num_agreements = 0 num_disagreements = 0 else if node = community_taxon_nodes.detect{|n| n[:taxon].try(:id) == taxon_id} num_agreements = node[:cumulative_count] num_disagreements = node[:disagreement_count] + node[:conservative_disagreement_count] num_agreements -= 1 if current_idents.detect{|i| i.taxon_id == taxon_id && i.user_id == user_id} num_agreements = 0 if current_idents.count <= 1 num_disagreements = 0 if current_idents.count <= 1 else num_agreements = current_idents.select{|ident| ident.is_agreement?(:observation => self)}.size num_disagreements = current_idents.select{|ident| ident.is_disagreement?(:observation => self)}.size end end # Kinda lame, but Observation#get_quality_grade relies on these numbers self.num_identification_agreements = num_agreements self.num_identification_disagreements = num_disagreements self.identifications_count = idents.size new_quality_grade = get_quality_grade self.quality_grade = new_quality_grade if !options[:skip_save] && ( num_identification_agreements_changed? || num_identification_disagreements_changed? || quality_grade_changed? || identifications_count_changed?) Observation.where(id: id).update_all( num_identification_agreements: num_agreements, num_identification_disagreements: num_disagreements, quality_grade: new_quality_grade, identifications_count: identifications_count) refresh_check_lists refresh_lists end end def self.update_stats_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon descendant_conditions = taxon.descendant_conditions.to_a result = Identification.elastic_search( filters: [ { bool: { should: [ { term: { "taxon.ancestor_ids": taxon.id } }, { term: { "observation.taxon.ancestor_ids": taxon.id } }, ]}}], size: 0, aggregate: { obs: { terms: { field: "observation.id", size: 3000000 } } } ) obs_ids = result.response.aggregations.obs.buckets.map{ |b| b[:key] } obs_ids.in_groups_of(1000) do |batch_ids| Observation.includes(:taxon, { identifications: :taxon }, :flags, { photos: :flags }, :quality_metrics, :sounds, :votes_for).where(id: batch_ids).find_each do |o| o.set_community_taxon o.update_stats(skip_save: true) if o.changed? o.skip_indexing = true o.save Identification.update_categories_for_observation( o ) end end Observation.elastic_index!(ids: batch_ids) end Rails.logger.info "[INFO #{Time.now}] Finished Observation.update_stats_for_observations_of(#{taxon})" end def self.random_neighbor_lat_lon(lat, lon) precision = 10**5.0 range = ((-1 * precision)..precision) half_cell = COORDINATE_UNCERTAINTY_CELL_SIZE / 2 base_lat, base_lon = uncertainty_cell_southwest_latlon( lat, lon ) [ base_lat + ((rand(range) / precision) * half_cell), base_lon + ((rand(range) / precision) * half_cell)] end # # Coordinates of the southwest corner of the uncertainty cell for any given coordinates # def self.uncertainty_cell_southwest_latlon( lat, lon ) half_cell = COORDINATE_UNCERTAINTY_CELL_SIZE / 2 # how many significant digits in the obscured coordinates (e.g. 5) # doing a floor with intervals of 0.2, then adding 0.1 # so our origin is the center of a 0.2 square base_lat = lat - (lat % COORDINATE_UNCERTAINTY_CELL_SIZE) + half_cell base_lon = lon - (lon % COORDINATE_UNCERTAINTY_CELL_SIZE) + half_cell [base_lat, base_lon] end # # Distance of a diagonal from corner to corner across the uncertainty cell # for the given coordinates. # def self.uncertainty_cell_diagonal_meters( lat, lon ) base_lat, base_lon = uncertainty_cell_southwest_latlon( lat, lon ) lat_lon_distance_in_meters( base_lat, base_lon, base_lat + COORDINATE_UNCERTAINTY_CELL_SIZE, base_lon + COORDINATE_UNCERTAINTY_CELL_SIZE ).ceil end # # Distance of a diagonal from corner to corner across the uncertainty cell # for this observation's coordinates. # def uncertainty_cell_diagonal_meters return nil unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude Observation.uncertainty_cell_diagonal_meters( lat, lon ) end def self.places_for_latlon( lat, lon, acc ) candidates = Place.containing_lat_lng(lat, lon).sort_by{|p| p.bbox_area || 0} # At present we use PostGIS GEOMETRY types, which are a bit stupid about # things crossing the dateline, so we need to do an app-layer check. # Converting to the GEOGRAPHY type would solve this, in theory. # Unfortunately this does NOT solve the problem of failing to select # legit geoms that cross the dateline. GEOGRAPHY would solve that too. candidates.select do |p| # HACK: bbox_contains_lat_lng_acc uses rgeo, which despite having a # spherical geometry factory, doesn't seem to allow spherical polygons # to use a contains? method, which means it doesn't really work for # polygons that cross the dateline, so... skip it until we switch to # geography, I guess. if p.straddles_date_line? true else p.bbox_contains_lat_lng_acc?(lat, lon, acc) end end end def places return [] unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude acc = private_positional_accuracy || positional_accuracy Observation.places_for_latlon( lat, lon, acc ) end def public_places return [] unless georeferenced? return [] if geoprivacy == PRIVATE lat = private_latitude || latitude lon = private_longitude || longitude acc = public_positional_accuracy || positional_accuracy Observation.places_for_latlon( lat, lon, acc ) end def self.system_places_for_latlon( lat, lon, options = {} ) all_places = options[:places] || places_for_latlon( lat, lon, options[:acc] ) all_places.select do |p| p.user_id.blank? && ( [Place::COUNTRY_LEVEL, Place::STATE_LEVEL, Place::COUNTY_LEVEL].include?(p.admin_level) || p.place_type == Place::PLACE_TYPE_CODES['Open Space'] ) end end # The places that are theoretically controlled by site admins def system_places(options = {}) Observation.system_places_for_latlon( latitude, longitude, options.merge( acc: positional_accuracy ) ) end def public_system_places( options = {} ) Observation.system_places_for_latlon( latitude, longitude, options.merge( acc: positional_accuracy ) ) public_places.select{|p| !p.admin_level.blank? } end def intersecting_places return [] unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude @intersecting_places ||= Place.containing_lat_lng(lat, lon).sort_by{|p| p.bbox_area || 0} end { 0 => "Undefined", 2 => "Street Segment", 4 => "Street", 5 => "Intersection", 6 => "Street", 7 => "Town", 8 => "State", 9 => "County", 10 => "Local Administrative Area", 12 => "Country", 13 => "Island", 14 => "Airport", 15 => "Drainage", 16 => "Land Feature", 17 => "Miscellaneous", 18 => "Nationality", 19 => "Supername", 20 => "Point of Interest", 21 => "Region", 24 => "Colloquial", 25 => "Zone", 26 => "Historical State", 27 => "Historical County", 29 => "Continent", 33 => "Estate", 35 => "Historical Town", 36 => "Aggregate", 100 => "Open Space", 101 => "Territory" }.each do |code, type| define_method "place_#{type.underscore}" do intersecting_places.detect{|p| p.place_type == code} end define_method "place_#{type.underscore}_name" do send("place_#{type.underscore}").try(:name) end end def taxon_and_ancestors taxon ? taxon.self_and_ancestors.to_a : [] end def mobile? return false unless user_agent MOBILE_APP_USER_AGENT_PATTERNS.each do |pattern| return true if user_agent =~ pattern end false end def device_name return "unknown" unless user_agent if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "iNaturalist Android App" elsif user_agent =~ FISHTAGGER_APP_USER_AGENT_PATTERN "Fishtagger iPhone App" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "iNaturalist iPhone App" else "web browser" end end def device_url return unless user_agent if user_agent =~ FISHTAGGER_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/fishtagger/id582724178?mt=8" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/inaturalist/id421397028?mt=8" elsif user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "https://market.android.com/details?id=org.inaturalist.android" end end def owners_identification if identifications.loaded? # if idents are loaded, the most recent current identification might be a new record identifications.sort_by{|i| i.created_at || 1.minute.from_now}.select {|ident| ident.user_id == user_id && ident.current? }.last else identifications.current.by(user_id).last end end def others_identifications if identifications.loaded? identifications.select do |i| i.current? && i.user_id != user_id end else identifications.current.not_by(user_id) end end def method_missing(method, *args, &block) return super unless method.to_s =~ /^field:/ || method.to_s =~ /^taxon_[^=]+/ if method.to_s =~ /^field:/ of_name = method.to_s.split(':').last ofv = observation_field_values.detect{|ofv| ofv.observation_field.normalized_name == of_name} if ofv return ofv.taxon ? ofv.taxon.name : ofv.value end elsif method.to_s =~ /^taxon_/ && !self.class.instance_methods.include?(method) && taxon return taxon.send(method.to_s.gsub(/^taxon_/, '')) end super end def respond_to?(method, include_private = false) @@class_methods_hash ||= Hash[ self.class.instance_methods.map{ |h| [ h.to_sym, true ] } ] @@class_columns_hash ||= Hash[ self.class.column_names.map{ |h| [ h.to_sym, true ] } ] if @@class_methods_hash[method.to_sym] || @@class_columns_hash[method.to_sym] return super end return super unless method.to_s =~ /^field:/ || method.to_s =~ /^taxon_[^=]+/ if method.to_s =~ /^field:/ of_name = method.to_s.split(':').last ofv = observation_field_values.detect{|ofv| ofv.observation_field.normalized_name == of_name} return !ofv.blank? elsif method.to_s =~ /^taxon_/ && taxon return taxon.respond_to?(method.to_s.gsub(/^taxon_/, ''), include_private) end super end def merge(reject) mutable_columns = self.class.column_names - %w(id created_at updated_at) mutable_columns.each do |column| self.send("#{column}=", reject.send(column)) if send(column).blank? end reject.identifications.update_all("current = false") merge_has_many_associations(reject) reject.destroy identifications.group_by{|ident| [ident.user_id, ident.taxon_id]}.each do |pair, idents| c = idents.sort_by(&:id).last c.update_attributes(:current => true) end save! end def create_observation_review return true unless taxon return true unless taxon_id_was.blank? ObservationReview.where( observation_id: id, user_id: user_id ).first_or_create.touch true end def create_deleted_observation DeletedObservation.create( :observation_id => id, :user_id => user_id ) true end def build_observation_fields_from_tags(tags) tags.each do |tag| np, value = tag.split('=') next unless np && value namespace, predicate = np.split(':') predicate = namespace if predicate.blank? next if predicate.blank? of = ObservationField.where("lower(name) = ?", predicate.downcase).first next unless of next if self.observation_field_values.detect{|ofv| ofv.observation_field_id == of.id} if of.datatype == ObservationField::TAXON t = Taxon.single_taxon_for_name(value) next unless t value = t.id end ofv = ObservationFieldValue.new(observation: self, observation_field: of, value: value) self.observation_field_values.build(ofv.attributes) if ofv.valid? end end def fields_addable_by?(u) return false unless u.is_a?(User) return true if user.preferred_observation_fields_by == User::PREFERRED_OBSERVATION_FIELDS_BY_ANYONE return true if user.preferred_observation_fields_by == User::PREFERRED_OBSERVATION_FIELDS_BY_CURATORS && u.is_curator? u.id == user_id end def set_coordinates if self.geo_x.present? && self.geo_y.present? && self.coordinate_system.present? # Perform the transformation # transfrom from `self.coordinate_system` from = RGeo::CoordSys::Proj4.new(self.coordinate_system) # ... to WGS84 to = RGeo::CoordSys::Proj4.new(WGS84_PROJ4) # Returns an array of lat, lon transform = RGeo::CoordSys::Proj4.transform_coords(from, to, self.geo_x.to_d, self.geo_y.to_d) # Set the transfor self.longitude, self.latitude = transform end true end # Required for use of the sanitize method in # ObservationsHelper#short_observation_description def self.white_list_sanitizer @white_list_sanitizer ||= HTML::WhiteListSanitizer.new end def self.update_for_taxon_change(taxon_change, options = {}, &block) input_taxon_ids = taxon_change.input_taxa.map(&:id) scope = Observation.where("observations.taxon_id IN (?)", input_taxon_ids) scope = scope.by(options[:user]) if options[:user] scope = scope.where("observations.id IN (?)", options[:records].to_a) unless options[:records].blank? scope = scope.includes( :user, :identifications, :observations_places ) scope.find_each do |observation| if observation.owners_identification && input_taxon_ids.include?( observation.owners_identification.taxon_id ) if output_taxon = taxon_change.output_taxon_for_record( observation ) Identification.create( user: observation.user, observation: observation, taxon: output_taxon, taxon_change: taxon_change ) end end yield(observation) if block_given? end end # 2014-01 I tried improving performance by loading ancestor taxa for each # batch, but it didn't really speed things up much def self.generate_csv(scope, options = {}) fname = options[:fname] || "observations.csv" fpath = options[:path] || File.join(options[:dir] || Dir::tmpdir, fname) FileUtils.mkdir_p File.dirname(fpath), :mode => 0755 columns = options[:columns] || CSV_COLUMNS CSV.open(fpath, 'w') do |csv| csv << columns scope.find_each(batch_size: 500) do |observation| csv << columns.map do |c| c = "cached_tag_list" if c == "tag_list" observation.send(c) rescue nil end end end fpath end def self.generate_csv_for(record, options = {}) fname = options[:fname] || "#{record.to_param}-observations.csv" fpath = options[:path] || File.join(options[:dir] || Dir::tmpdir, fname) tmp_path = File.join(Dir::tmpdir, fname) FileUtils.mkdir_p File.dirname(tmp_path), :mode => 0755 columns = CSV_COLUMNS # ensure private coordinates are hidden unless they shouldn't be viewer_curates_project = record.is_a?(Project) && record.curated_by?(options[:user]) viewer_is_owner = record.is_a?(User) && record == options[:user] unless viewer_curates_project || viewer_is_owner columns = columns.select{|c| c !~ /^private_/} end # generate the csv if record.respond_to?(:generate_csv) record.generate_csv(tmp_path, columns, viewer: options[:user]) else scope = record.observations. includes(:taxon). includes(observation_field_values: :observation_field) unless record.is_a?(User) && options[:user] === record scope = scope.includes(project_observations: :stored_preferences). includes(user: {project_users: :stored_preferences}) end generate_csv(scope, :path => tmp_path, :fname => fname, :columns => columns, :viewer => options[:user]) end FileUtils.mkdir_p File.dirname(fpath), :mode => 0755 if tmp_path != fpath FileUtils.mv tmp_path, fpath end fpath end def self.generate_csv_for_cache_key(record, options = {}) "#{record.class.name.underscore}_#{record.id}" end def public_positional_accuracy if coordinates_obscured? && !read_attribute(:public_positional_accuracy) update_public_positional_accuracy end read_attribute(:public_positional_accuracy) end def update_public_positional_accuracy update_column(:public_positional_accuracy, calculate_public_positional_accuracy) end def calculate_public_positional_accuracy if coordinates_obscured? [ positional_accuracy.to_i, uncertainty_cell_diagonal_meters, 0 ].max elsif !positional_accuracy.blank? positional_accuracy end end def inaccurate_location? if metric = quality_metric_score(QualityMetric::LOCATION) return metric <= 0.5 end false end def update_mappable update_column(:mappable, calculate_mappable) end def calculate_mappable return false if latitude.blank? && longitude.blank? return false if public_positional_accuracy && public_positional_accuracy > uncertainty_cell_diagonal_meters return false if inaccurate_location? return false unless passes_quality_metric?(QualityMetric::EVIDENCE) return false unless appropriate? true end def update_observations_places Observation.update_observations_places(ids: [ id ]) # reload the association since we added the records using SQL observations_places(true) end def set_taxon_photo return true unless research_grade? && quality_grade_changed? unless taxon.photos.any? community_taxon.delay( priority: INTEGRITY_PRIORITY, run_at: 1.day.from_now ).set_photo_from_observations end true end def self.update_observations_places(options = { }) filter_scope = options.delete(:scope) scope = (filter_scope && filter_scope.is_a?(ActiveRecord::Relation)) ? filter_scope : self.all if filter_ids = options.delete(:ids) if filter_ids.length > 1000 # call again for each batch, then return filter_ids.each_slice(1000) do |slice| update_observations_places(options.merge(ids: slice)) end return end scope = scope.where(id: filter_ids) end scope.select(:id).find_in_batches(options) do |batch| ids = batch.map(&:id) Observation.transaction do connection.execute("DELETE FROM observations_places WHERE observation_id IN (#{ ids.join(',') })") connection.execute("INSERT INTO observations_places (observation_id, place_id) SELECT o.id, pg.place_id FROM observations o JOIN place_geometries pg ON ST_Intersects(pg.geom, o.private_geom) WHERE o.id IN (#{ ids.join(',') }) AND pg.place_id IS NOT NULL AND NOT EXISTS ( SELECT id FROM observations_places WHERE place_id = pg.place_id AND observation_id = o.id )") end end end def observation_photos_finished_processing observation_photos.select do |op| ! (op.photo.is_a?(LocalPhoto) && op.photo.processing?) end end def interpolate_coordinates return unless time_observed_at scope = user.observations.where("latitude IS NOT NULL or private_latitude IS NOT NULL") prev_obs = scope.where("time_observed_at < ?", time_observed_at).order("time_observed_at DESC").first next_obs = scope.where("time_observed_at > ?", time_observed_at).order("time_observed_at ASC").first return unless prev_obs && next_obs prev_lat = prev_obs.private_latitude || prev_obs.latitude prev_lon = prev_obs.private_longitude || prev_obs.longitude next_lat = next_obs.private_latitude || next_obs.latitude next_lon = next_obs.private_longitude || next_obs.longitude # time-weighted interpolation between prev and next observations weight = (next_obs.time_observed_at - time_observed_at) / (next_obs.time_observed_at-prev_obs.time_observed_at) new_lat = (1-weight)*next_lat + weight*prev_lat new_lon = (1-weight)*next_lon + weight*prev_lon self.latitude = new_lat self.longitude = new_lon # we can only set a new uncertainty if the uncertainty of the two points are known if prev_obs.positional_accuracy && next_obs.positional_accuracy f = RGeo::Geographic.simple_mercator_factory prev_point = f.point(prev_lon, prev_lat) next_point = f.point(next_lon, next_lat) interpolation_uncertainty = prev_point.distance(next_point)/2.0 new_acc = Math.sqrt(interpolation_uncertainty**2 + prev_obs.positional_accuracy**2 + next_obs.positional_accuracy**2) self.positional_accuracy = new_acc end end def self.as_csv(scope, methods, options = {}) CSV.generate do |csv| csv << methods scope.each do |item| # image_url gets options, which will include an SSL boolean csv << methods.map{ |m| m == :image_url ? item.send(m, options) : item.send(m) } end end end def community_taxon_at_species_or_lower? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level <= Taxon::SPECIES_LEVEL end def community_taxon_at_family_or_lower? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level <= Taxon::FAMILY_LEVEL end def community_taxon_below_family? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level < Taxon::FAMILY_LEVEL end def needs_id_upvotes_count votes_for.loaded? ? votes_for.select{ |v| v.vote_flag? && v.vote_scope == "needs_id" }.size : get_upvotes(vote_scope: "needs_id").size end def needs_id_downvotes_count votes_for.loaded? ? votes_for.select{ |v| !v.vote_flag? && v.vote_scope == "needs_id" }.size : get_downvotes(vote_scope: "needs_id").size end def needs_id_vote_score uvotes = needs_id_upvotes_count dvotes = needs_id_downvotes_count if uvotes == 0 && dvotes == 0 nil elsif uvotes == 0 0 elsif dvotes == 0 1 else uvotes.to_f / (uvotes + dvotes) end end def voted_out_of_needs_id? needs_id_downvotes_count > needs_id_upvotes_count end def voted_in_to_needs_id? needs_id_upvotes_count > needs_id_downvotes_count end def needs_id? quality_grade == NEEDS_ID end def casual? quality_grade == CASUAL end def flagged_with(flag, options) quality_grade_will_change! save evaluate_new_flag_for_spam(flag) end def mentioned_users return [ ] unless description description.mentioned_users end # Show count of all faves on this observation. cached_votes_total stores the # count of all votes without a vote_scope, which for an Observation means # the faves, but since that might vary from model to model based on how we # use acts_as_votable, faves_count seems clearer. def faves_count cached_votes_total end def probably_captive? target_taxon = community_taxon || taxon return false unless target_taxon if target_taxon.rank_level.blank? || target_taxon.rank_level.to_i > Taxon::GENUS_LEVEL return false end place = system_places.detect do |p| [ Place::COUNTRY_LEVEL, Place::STATE_LEVEL, Place::COUNTY_LEVEL ].include?( p.admin_level ) end return false unless place buckets = Observation.elastic_search( filters: [ { term: { "taxon.ancestor_ids": target_taxon.id } }, { term: { place_ids: place.id } }, ], # earliest_sort_field: "id", size: 0, aggregate: { captive: { terms: { field: "captive", size: 15 } } } ).results.response.response.aggregations.captive.buckets captive_stats = Hash[ buckets.map{ |b| [ b["key"], b["doc_count" ] ] } ] total = captive_stats.values.sum ratio = captive_stats[1].to_f / total # puts "total: #{total}, ratio: #{ratio}, place: #{place}" total > 10 && ratio >= 0.8 end def application_id_to_index return oauth_application_id if oauth_application_id if user_agent =~ IPHONE_APP_USER_AGENT_PATTERN return OauthApplication.inaturalist_iphone_app.id end if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN return OauthApplication.inaturalist_android_app.id end end def owners_identification_from_vision owners_identification.try(:vision) end def owners_identification_from_vision=( val ) self.owners_identification_from_vision_requested = val end def self.dedupe_for_user(user, options = {}) unless user.is_a?(User) u = User.find_by_id(user) u ||= User.find_by_login(user) user = u end return unless user sql = <<-SQL SELECT array_agg(id) AS observation_ids FROM observations WHERE user_id = #{user.id} AND taxon_id IS NOT NULL AND observed_on_string IS NOT NULL AND observed_on_string != '' AND private_geom IS NOT NULL GROUP BY user_id, taxon_id, observed_on_string, private_geom HAVING count(*) > 1; SQL deleted = 0 start = Time.now Observation.connection.execute(sql).each do |row| ids = row['observation_ids'].gsub(/[\{\}]/, '').split(',').map(&:to_i).sort puts "Found duplicates: #{ids.join(',')}" if options[:debug] keeper_id = ids.shift puts "\tKeeping #{keeper_id}" if options[:debug] unless options[:test] Observation.find(ids).each do |o| puts "\tDeleting #{o.id}" if options[:debug] o.destroy end end deleted += ids.size end puts "Deleted #{deleted} observations in #{Time.now - start}s" if options[:debug] end def self.index_observations_for_user(user_id) Observation.elastic_index!( scope: Observation.by( user_id ) ) end def self.refresh_es_index Observation.__elasticsearch__.refresh_index! unless Rails.env.test? end end fixing issue with some specs #encoding: utf-8 class Observation < ActiveRecord::Base include ActsAsElasticModel include ObservationSearch include ActsAsUUIDable has_subscribers :to => { :comments => {:notification => "activity", :include_owner => true}, :identifications => {:notification => "activity", :include_owner => true} } notifies_subscribers_of :user, :notification => "created_observations", :queue_if => lambda { |observation| !observation.bulk_import } notifies_subscribers_of :public_places, :notification => "new_observations", :on => :create, :queue_if => lambda {|observation| observation.georeferenced? && !observation.bulk_import }, :if => lambda {|observation, place, subscription| return false unless observation.georeferenced? return true if subscription.taxon_id.blank? return false if observation.taxon.blank? observation.taxon.ancestor_ids.include?(subscription.taxon_id) } notifies_subscribers_of :taxon_and_ancestors, :notification => "new_observations", :queue_if => lambda {|observation| !observation.taxon_id.blank? && !observation.bulk_import}, :if => lambda {|observation, taxon, subscription| return true if observation.taxon_id == taxon.id return false if observation.taxon.blank? observation.taxon.ancestor_ids.include?(subscription.resource_id) } notifies_users :mentioned_users, on: :save, notification: "mention" acts_as_taggable acts_as_votable acts_as_spammable fields: [ :description ], comment_type: "item-description", automated: false include Ambidextrous # Set to true if you want to skip the expensive updating of all the user's # lists after saving. Useful if you're saving many observations at once and # you want to update lists in a batch attr_accessor :skip_refresh_lists, :skip_refresh_check_lists, :skip_identifications, :bulk_import, :skip_indexing # Set if you need to set the taxon from a name separate from the species # guess attr_accessor :taxon_name # licensing extras attr_accessor :make_license_default attr_accessor :make_licenses_same # coordinate system attr_accessor :coordinate_system attr_accessor :geo_x attr_accessor :geo_y attr_accessor :owners_identification_from_vision_requested def captive_flag @captive_flag ||= !quality_metrics.detect{|qm| qm.user_id == user_id && qm.metric == QualityMetric::WILD && !qm.agree? }.nil? end def captive_flag=(v) @captive_flag = v end attr_accessor :force_quality_metrics # custom project field errors attr_accessor :custom_field_errors MASS_ASSIGNABLE_ATTRIBUTES = [:make_license_default, :make_licenses_same] M_TO_OBSCURE_THREATENED_TAXA = 10000 OUT_OF_RANGE_BUFFER = 5000 # meters PLANETARY_RADIUS = 6370997.0 DEGREES_PER_RADIAN = 57.2958 FLOAT_REGEX = /[-+]?[0-9]*\.?[0-9]+/ COORDINATE_REGEX = /[^\d\,]*?(#{FLOAT_REGEX})[^\d\,]*?/ LAT_LON_SEPARATOR_REGEX = /[\,\s]\s*/ LAT_LON_REGEX = /#{COORDINATE_REGEX}#{LAT_LON_SEPARATOR_REGEX}#{COORDINATE_REGEX}/ COORDINATE_UNCERTAINTY_CELL_SIZE = 0.2 OPEN = "open" PRIVATE = "private" OBSCURED = "obscured" GEOPRIVACIES = [OBSCURED, PRIVATE] GEOPRIVACY_DESCRIPTIONS = { OPEN => :open_description, OBSCURED => :obscured_description, PRIVATE => :private_description } RESEARCH_GRADE = "research" CASUAL = "casual" NEEDS_ID = "needs_id" QUALITY_GRADES = [CASUAL, NEEDS_ID, RESEARCH_GRADE] COMMUNITY_TAXON_SCORE_CUTOFF = (2.0 / 3) LICENSES = [ ["CC0", :cc_0_name, :cc_0_description], ["CC-BY", :cc_by_name, :cc_by_description], ["CC-BY-NC", :cc_by_nc_name, :cc_by_nc_description], ["CC-BY-SA", :cc_by_sa_name, :cc_by_sa_description], ["CC-BY-ND", :cc_by_nd_name, :cc_by_nd_description], ["CC-BY-NC-SA",:cc_by_nc_sa_name, :cc_by_nc_sa_description], ["CC-BY-NC-ND", :cc_by_nc_nd_name, :cc_by_nc_nd_description] ] LICENSE_CODES = LICENSES.map{|row| row.first} LICENSES.each do |code, name, description| const_set code.gsub(/\-/, '_'), code end PREFERRED_LICENSES = [CC_BY, CC_BY_NC, CC0] CSV_COLUMNS = [ "id", "species_guess", "scientific_name", "common_name", "iconic_taxon_name", "taxon_id", "id_please", "num_identification_agreements", "num_identification_disagreements", "observed_on_string", "observed_on", "time_observed_at", "time_zone", "place_guess", "latitude", "longitude", "positional_accuracy", "private_place_guess", "private_latitude", "private_longitude", "private_positional_accuracy", "geoprivacy", "coordinates_obscured", "positioning_method", "positioning_device", "out_of_range", "user_id", "user_login", "created_at", "updated_at", "quality_grade", "license", "url", "image_url", "tag_list", "description", "oauth_application_id", "captive_cultivated" ] BASIC_COLUMNS = [ "id", "observed_on_string", "observed_on", "time_observed_at", "time_zone", "out_of_range", "user_id", "user_login", "created_at", "updated_at", "quality_grade", "license", "url", "image_url", "tag_list", "description", "id_please", "num_identification_agreements", "num_identification_disagreements", "captive_cultivated", "oauth_application_id" ] GEO_COLUMNS = [ "place_guess", "latitude", "longitude", "positional_accuracy", "private_place_guess", "private_latitude", "private_longitude", "private_positional_accuracy", "geoprivacy", "coordinates_obscured", "positioning_method", "positioning_device", "place_town_name", "place_county_name", "place_state_name", "place_country_name" ] TAXON_COLUMNS = [ "species_guess", "scientific_name", "common_name", "iconic_taxon_name", "taxon_id" ] EXTRA_TAXON_COLUMNS = %w( kingdom phylum subphylum superclass class subclass superorder order suborder superfamily family subfamily supertribe tribe subtribe genus genushybrid species hybrid subspecies variety form ).map{|r| "taxon_#{r}_name"}.compact ALL_EXPORT_COLUMNS = (CSV_COLUMNS + BASIC_COLUMNS + GEO_COLUMNS + TAXON_COLUMNS + EXTRA_TAXON_COLUMNS).uniq WGS84_PROJ4 = "+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs" ALLOWED_DESCRIPTION_TAGS = %w(a abbr acronym b blockquote br cite em i img pre s small strike strong sub sup) preference :community_taxon, :boolean, :default => nil belongs_to :user, :counter_cache => true belongs_to :taxon belongs_to :community_taxon, :class_name => 'Taxon' belongs_to :iconic_taxon, :class_name => 'Taxon', :foreign_key => 'iconic_taxon_id' belongs_to :oauth_application belongs_to :site, :inverse_of => :observations has_many :observation_photos, -> { order("id asc") }, :dependent => :destroy, :inverse_of => :observation has_many :photos, :through => :observation_photos # note last_observation and first_observation on listed taxa will get reset # by CheckList.refresh_with_observation has_many :listed_taxa, :foreign_key => 'last_observation_id' has_many :first_listed_taxa, :class_name => "ListedTaxon", :foreign_key => 'first_observation_id' has_many :first_check_listed_taxa, -> { where("listed_taxa.place_id IS NOT NULL") }, :class_name => "ListedTaxon", :foreign_key => 'first_observation_id' has_many :comments, :as => :parent, :dependent => :destroy has_many :annotations, as: :resource, dependent: :destroy has_many :identifications, :dependent => :destroy has_many :project_observations, :dependent => :destroy has_many :project_observations_with_changes, -> { joins(:model_attribute_changes) }, class_name: "ProjectObservation" has_many :project_invitations, :dependent => :destroy has_many :projects, :through => :project_observations has_many :quality_metrics, :dependent => :destroy has_many :observation_field_values, -> { order("id asc") }, :dependent => :destroy, :inverse_of => :observation has_many :observation_fields, :through => :observation_field_values has_many :observation_links has_and_belongs_to_many :posts has_many :observation_sounds, :dependent => :destroy, :inverse_of => :observation has_many :sounds, :through => :observation_sounds has_many :observations_places, :dependent => :destroy has_many :observation_reviews, :dependent => :destroy has_many :confirmed_reviews, -> { where("observation_reviews.reviewed = true") }, class_name: "ObservationReview" FIELDS_TO_SEARCH_ON = %w(names tags description place) NON_ELASTIC_ATTRIBUTES = %w(establishment_means em) accepts_nested_attributes_for :observation_field_values, :allow_destroy => true, :reject_if => lambda { |attrs| attrs[:value].blank? } ## # Validations # validates_presence_of :user_id validate :must_be_in_the_past, :must_not_be_a_range validates_numericality_of :latitude, :allow_blank => true, :less_than_or_equal_to => 90, :greater_than_or_equal_to => -90 validates_numericality_of :longitude, :allow_blank => true, :less_than_or_equal_to => 180, :greater_than_or_equal_to => -180 validates_length_of :observed_on_string, :maximum => 256, :allow_blank => true validates_length_of :species_guess, :maximum => 256, :allow_blank => true validates_length_of :place_guess, :maximum => 256, :allow_blank => true validate do # This should be a validation on cached_tag_list, but acts_as_taggable seems # to set that after the validations run if tag_list.join(", ").length > 750 errors.add( :tag_list, "must be under 750 characters total, no more than 256 characters per tag" ) end end validate do unless coordinate_system.blank? begin RGeo::CoordSys::Proj4.new( coordinate_system ) rescue RGeo::Error::UnsupportedOperation errors.add( :coordinate_system, "is not a valid Proj4 string" ) end end end # See /config/locale/en.yml for field labels for `geo_x` and `geo_y` validates_numericality_of :geo_x, :allow_blank => true, :message => "should be a number" validates_numericality_of :geo_y, :allow_blank => true, :message => "should be a number" validates_presence_of :geo_x, :if => proc {|o| o.geo_y.present? } validates_presence_of :geo_y, :if => proc {|o| o.geo_x.present? } before_validation :munge_observed_on_with_chronic, :set_time_zone, :set_time_in_time_zone, :set_coordinates before_save :strip_species_guess, :set_taxon_from_species_guess, :set_taxon_from_taxon_name, :keep_old_taxon_id, :set_latlon_from_place_guess, :reset_private_coordinates_if_coordinates_changed, :normalize_geoprivacy, :set_license, :trim_user_agent, :update_identifications, :set_community_taxon_before_save, :set_taxon_from_community_taxon, :obscure_coordinates_for_geoprivacy, :obscure_coordinates_for_threatened_taxa, :set_geom_from_latlon, :set_place_guess_from_latlon, :obscure_place_guess, :set_iconic_taxon before_update :set_quality_grade after_save :refresh_lists, :refresh_check_lists, :update_out_of_range_later, :update_default_license, :update_all_licenses, :update_taxon_counter_caches, :update_quality_metrics, :update_public_positional_accuracy, :update_mappable, :set_captive, :update_observations_places, :set_taxon_photo, :create_observation_review after_create :set_uri before_destroy :keep_old_taxon_id after_destroy :refresh_lists_after_destroy, :refresh_check_lists, :update_taxon_counter_caches, :create_deleted_observation ## # Named scopes # # Area scopes # scope :in_bounding_box, lambda { |swlat, swlng, nelat, nelng| scope :in_bounding_box, lambda {|*args| swlat, swlng, nelat, nelng, options = args options ||= {} if options[:private] geom_col = "observations.private_geom" lat_col = "observations.private_latitude" lon_col = "observations.private_longitude" else geom_col = "observations.geom" lat_col = "observations.latitude" lon_col = "observations.longitude" end # resort to lat/lon cols for date-line spanning boxes if swlng.to_f > 0 && nelng.to_f < 0 where("#{lat_col} > ? AND #{lat_col} < ? AND (#{lon_col} > ? OR #{lon_col} < ?)", swlat.to_f, nelat.to_f, swlng.to_f, nelng.to_f) else where("ST_Intersects( ST_MakeBox2D(ST_Point(#{swlng.to_f}, #{swlat.to_f}), ST_Point(#{nelng.to_f}, #{nelat.to_f})), #{geom_col} )") end } do def distinct_taxon group("taxon_id").where("taxon_id IS NOT NULL").includes(:taxon) end end scope :in_place, lambda {|place| place_id = if place.is_a?(Place) place.id elsif place.to_i == 0 begin Place.find(place).try(&:id) rescue ActiveRecord::RecordNotFound -1 end else place.to_i end joins("JOIN place_geometries ON place_geometries.place_id = #{place_id}"). where("ST_Intersects(place_geometries.geom, observations.private_geom)") } # should use .select("DISTINCT observations.*") scope :in_places, lambda {|place_ids| joins("JOIN place_geometries ON place_geometries.place_id IN (#{place_ids.join(",")})"). where("ST_Intersects(place_geometries.geom, observations.private_geom)") } scope :in_taxons_range, lambda {|taxon| taxon_id = taxon.is_a?(Taxon) ? taxon.id : taxon.to_i joins("JOIN taxon_ranges ON taxon_ranges.taxon_id = #{taxon_id}"). where("ST_Intersects(taxon_ranges.geom, observations.private_geom)") } # possibly radius in kilometers scope :near_point, Proc.new { |lat, lng, radius| lat = lat.to_f lng = lng.to_f radius = radius.to_f radius = 10.0 if radius == 0 planetary_radius = PLANETARY_RADIUS / 1000 # km radius_degrees = radius / (2*Math::PI*planetary_radius) * 360.0 where("ST_DWithin(ST_Point(?,?), geom, ?)", lng.to_f, lat.to_f, radius_degrees) } # Has_property scopes scope :has_taxon, lambda { |*args| taxon_id = args.first if taxon_id.nil? where("taxon_id IS NOT NULL") else where("taxon_id IN (?)", taxon_id) end } scope :has_iconic_taxa, lambda { |iconic_taxon_ids| iconic_taxon_ids = [iconic_taxon_ids].flatten.map do |itid| if itid.is_a?(Taxon) itid.id elsif itid.to_i == 0 Taxon::ICONIC_TAXA_BY_NAME[itid].try(:id) else itid end end.uniq if iconic_taxon_ids.include?(nil) where( "observations.iconic_taxon_id IS NULL OR observations.iconic_taxon_id IN (?)", iconic_taxon_ids ) elsif !iconic_taxon_ids.empty? where("observations.iconic_taxon_id IN (?)", iconic_taxon_ids) end } scope :has_geo, -> { where("latitude IS NOT NULL AND longitude IS NOT NULL") } scope :has_id_please, -> { where( "quality_grade = ?", NEEDS_ID ) } scope :has_photos, -> { where("observation_photos_count > 0") } scope :has_sounds, -> { where("observation_sounds_count > 0") } scope :has_quality_grade, lambda {|quality_grade| quality_grades = quality_grade.to_s.split(',') & Observation::QUALITY_GRADES quality_grade = '' if quality_grades.size == 0 where("quality_grade IN (?)", quality_grades) } # Find observations by a taxon object. Querying on taxa columns forces # massive joins, it's a bit sluggish scope :of, lambda { |taxon| taxon = Taxon.find_by_id(taxon.to_i) unless taxon.is_a? Taxon return where("1 = 2") unless taxon c = taxon.descendant_conditions.to_sql c[0] = "taxa.id = #{taxon.id} OR #{c[0]}" joins(:taxon).where(c) } scope :with_identifications_of, lambda { |taxon| taxon = Taxon.find_by_id( taxon.to_i ) unless taxon.is_a? Taxon return where( "1 = 2" ) unless taxon c = taxon.descendant_conditions.to_sql c = c.gsub( '"taxa"."ancestry"', 'it."ancestry"' ) # I'm not using TaxonAncestor here b/c updating observations for changes # in conservation status uses this scope, and when a cons status changes, # we don't want to skip any taxa that have moved around the tree since the # last time the denormalizer ran select( "DISTINCT observations.*"). joins( :identifications ). joins( "JOIN taxa it ON it.id = identifications.taxon_id" ). where( "identifications.current AND (it.id = ? or #{c})", taxon.id ) } scope :at_or_below_rank, lambda {|rank| rank_level = Taxon::RANK_LEVELS[rank] joins(:taxon).where("taxa.rank_level <= ?", rank_level) } # Find observations by user scope :by, lambda {|user| if user.is_a?(User) || user.to_i > 0 where("observations.user_id = ?", user) else joins(:user).where("users.login = ?", user) end } # Order observations by date and time observed scope :latest, -> { order("observed_on DESC NULLS LAST, time_observed_at DESC NULLS LAST") } scope :recently_added, -> { order("observations.id DESC") } # TODO: Make this work for any SQL order statement, including multiple cols scope :order_by, lambda { |order_sql| pieces = order_sql.split order_by = pieces[0] order = pieces[1] || 'ASC' extra = [pieces[2..-1]].flatten.join(' ') extra = "NULLS LAST" if extra.blank? options = {} case order_by when 'observed_on' order "observed_on #{order} #{extra}, time_observed_at #{order} #{extra}" when 'created_at' order "observations.id #{order} #{extra}" when 'project' order("project_observations.id #{order} #{extra}").joins(:project_observations) when 'votes' order("cached_votes_total #{order} #{extra}") else order "#{order_by} #{order} #{extra}" end } def self.identifications(agreement) scope = Observation scope = scope.includes(:identifications) case agreement when 'most_agree' scope.where("num_identification_agreements > num_identification_disagreements") when 'some_agree' scope.where("num_identification_agreements > 0") when 'most_disagree' scope.where("num_identification_agreements < num_identification_disagreements") else scope end end # Time based named scopes scope :created_after, lambda { |time| where('created_at >= ?', time)} scope :created_before, lambda { |time| where('created_at <= ?', time)} scope :updated_after, lambda { |time| where('updated_at >= ?', time)} scope :updated_before, lambda { |time| where('updated_at <= ?', time)} scope :observed_after, lambda { |time| where('time_observed_at >= ?', time)} scope :observed_before, lambda { |time| where('time_observed_at <= ?', time)} scope :in_month, lambda {|month| where("EXTRACT(MONTH FROM observed_on) = ?", month)} scope :week, lambda {|week| where("EXTRACT(WEEK FROM observed_on) = ?", week)} scope :in_projects, lambda { |projects| # NOTE using :include seems to trigger an erroneous eager load of # observations that screws up sorting kueda 2011-07-22 joins(:project_observations).where("project_observations.project_id IN (?)", Project.slugs_to_ids(projects)) } scope :on, lambda {|date| where(Observation.conditions_for_date(:observed_on, date)) } scope :created_on, lambda {|date| where(Observation.conditions_for_date("observations.created_at", date))} scope :out_of_range, -> { where(:out_of_range => true) } scope :in_range, -> { where(:out_of_range => false) } scope :license, lambda {|license| if license == 'none' where("observations.license IS NULL") elsif LICENSE_CODES.include?(license) where(:license => license) else where("observations.license IS NOT NULL") end } scope :photo_license, lambda {|license| license = license.to_s scope = joins(:photos) license_number = Photo.license_number_for_code(license) if license == 'none' scope.where("photos.license = 0") elsif LICENSE_CODES.include?(license) scope.where("photos.license = ?", license_number) else scope.where("photos.license > 0") end } scope :has_observation_field, lambda{|*args| field, value = args join_name = "ofv_#{field.is_a?(ObservationField) ? field.id : field}" scope = joins("LEFT OUTER JOIN observation_field_values #{join_name} ON #{join_name}.observation_id = observations.id"). where("#{join_name}.observation_field_id = ?", field) scope = scope.where("#{join_name}.value = ?", value) unless value.blank? scope } scope :between_hours, lambda{|h1, h2| h1 = h1.to_i % 24 h2 = h2.to_i % 24 where("EXTRACT(hour FROM ((time_observed_at AT TIME ZONE 'GMT') AT TIME ZONE zic_time_zone)) BETWEEN ? AND ?", h1, h2) } scope :between_months, lambda{|m1, m2| m1 = m1.to_i % 12 m2 = m2.to_i % 12 if m1 > m2 where("EXTRACT(month FROM observed_on) >= ? OR EXTRACT(month FROM observed_on) <= ?", m1, m2) else where("EXTRACT(month FROM observed_on) BETWEEN ? AND ?", m1, m2) end } scope :between_dates, lambda{|d1, d2| t1 = (Time.parse(URI.unescape(d1.to_s)) rescue Time.now) t2 = (Time.parse(URI.unescape(d2.to_s)) rescue Time.now) if d1.to_s.index(':') where("time_observed_at BETWEEN ? AND ? OR (time_observed_at IS NULL AND observed_on BETWEEN ? AND ?)", t1, t2, t1.to_date, t2.to_date) else where("observed_on BETWEEN ? AND ?", t1, t2) end } scope :dbsearch, lambda {|*args| q, on = args q = sanitize_query(q) unless q.blank? case on when 'species_guess' where("observations.species_guess ILIKE", "%#{q}%") when 'description' where("observations.description ILIKE", "%#{q}%") when 'place_guess' where("observations.place_guess ILIKE", "%#{q}%") when 'tags' where("observations.cached_tag_list ILIKE", "%#{q}%") else where("observations.species_guess ILIKE ? OR observations.description ILIKE ? OR observations.cached_tag_list ILIKE ? OR observations.place_guess ILIKE ?", "%#{q}%", "%#{q}%", "%#{q}%", "%#{q}%") end } scope :reviewed_by, lambda { |users| joins(:observation_reviews).where("observation_reviews.user_id IN (?)", users) } scope :not_reviewed_by, lambda { |users| users = [ users ] unless users.is_a?(Array) user_ids = users.map{ |u| ElasticModel.id_or_object(u) } joins("LEFT JOIN observation_reviews ON (observations.id=observation_reviews.observation_id) AND observation_reviews.user_id IN (#{ user_ids.join(',') })"). where("observation_reviews.id IS NULL") } def self.near_place(place) place = (Place.find(place) rescue nil) unless place.is_a?(Place) if place.swlat Observation.in_bounding_box(place.swlat, place.swlng, place.nelat, place.nelng) else Observation.near_point(place.latitude, place.longitude) end end def self.preload_for_component(observations, logged_in) preloads = [ { user: :stored_preferences }, { taxon: { taxon_names: :place_taxon_names } }, :iconic_taxon, { photos: [ :flags, :user ] }, :stored_preferences, :flags, :quality_metrics ] # why do we need taxon_descriptions when logged in? if logged_in preloads.delete(:iconic_taxon) preloads << { iconic_taxon: :taxon_descriptions } preloads << :project_observations end Observation.preload_associations(observations, preloads) end # help_txt_for :species_guess, <<-DESC # Type a name for what you saw. It can be common or scientific, accurate # or just a placeholder. When you enter it, we'll try to look it up and find # the matching species of higher level taxon. # DESC # # instruction_for :place_guess, "Type the name of a place" # help_txt_for :place_guess, <<-DESC # Enter the name of a place and we'll try to find where it is. If we find # it, you can drag the map marker around to get more specific. # DESC def to_s "<Observation #{self.id}: #{to_plain_s}>" end def to_plain_s(options = {}) s = self.species_guess.blank? ? I18n.t(:something) : self.species_guess if options[:verb] s += options[:verb] == true ? I18n.t(:observed).downcase : " #{options[:verb]}" end unless self.place_guess.blank? || options[:no_place_guess] || coordinates_obscured? s += " #{I18n.t(:from, :default => 'from').downcase} #{self.place_guess}" end s += " #{I18n.t(:on_day)} #{I18n.l(self.observed_on, :format => :long)}" unless self.observed_on.blank? unless self.time_observed_at.blank? || options[:no_time] s += " #{I18n.t(:at)} #{self.time_observed_at_in_zone.to_s(:plain_time)}" end s += " #{I18n.t(:by).downcase} #{user.try_methods(:name, :login)}" unless options[:no_user] s.gsub(/\s+/, ' ') end def time_observed_at_utc time_observed_at.try(:utc) end def serializable_hash(opts = nil) # for some reason, in some cases options was still nil options = opts ? opts.clone : { } # making a deep copy of the options so they don't get modified # This was more effective than options.deep_dup if options[:include] && (options[:include].is_a?(Hash) || options[:include].is_a?(Array)) options[:include] = options[:include].marshal_copy end # don't use delete here, it will just remove the option for all # subsequent records in an array options[:include] = if options[:include].is_a?(Hash) options[:include].map{|k,v| {k => v}} else [options[:include]].flatten.compact end options[:methods] ||= [] options[:methods] += [:created_at_utc, :updated_at_utc, :time_observed_at_utc, :faves_count, :owners_identification_from_vision] viewer = options[:viewer] viewer_id = viewer.is_a?(User) ? viewer.id : viewer.to_i options[:except] ||= [] options[:except] += [:user_agent] if viewer_id != user_id && !options[:force_coordinate_visibility] options[:except] += [:private_latitude, :private_longitude, :private_positional_accuracy, :geom, :private_geom, :private_place_guess] options[:methods] << :coordinates_obscured end options[:except] += [:cached_tag_list, :geom, :private_geom] options[:except].uniq! options[:methods].uniq! h = super(options) h.each do |k,v| h[k] = v.gsub(/<script.*script>/i, "") if v.is_a?(String) end h.force_utf8 end # # Return a time from observed_on and time_observed_at # def datetime @datetime ||= if observed_on && errors[:observed_on].blank? time_observed_at_in_zone || Time.new(observed_on.year, observed_on.month, observed_on.day, 0, 0, 0, timezone_offset) end end def timezone_object # returns nil if the time_zone has an invalid value (time_zone && ActiveSupport::TimeZone.new(time_zone)) || (zic_time_zone && ActiveSupport::TimeZone.new(zic_time_zone)) end def timezone_offset # returns nil if the time_zone has an invalid value (timezone_object || ActiveSupport::TimeZone.new("UTC")).formatted_offset end # Return time_observed_at in the observation's time zone def time_observed_at_in_zone if self.time_observed_at self.time_observed_at.in_time_zone(self.time_zone) end end # # Set all the time fields based on the contents of observed_on_string # def munge_observed_on_with_chronic if observed_on_string.blank? self.observed_on = nil self.time_observed_at = nil return true end date_string = observed_on_string.strip tz_abbrev_pattern = /\s\(?([A-Z]{3,})\)?$/ # ends with (PDT) tz_offset_pattern = /([+-]\d{4})$/ # contains -0800 tz_js_offset_pattern = /(GMT)?([+-]\d{4})/ # contains GMT-0800 tz_colon_offset_pattern = /(GMT|HSP)([+-]\d+:\d+)/ # contains (GMT-08:00) tz_failed_abbrev_pattern = /\(#{tz_colon_offset_pattern}\)/ if date_string =~ /#{tz_js_offset_pattern} #{tz_failed_abbrev_pattern}/ date_string = date_string.sub(tz_failed_abbrev_pattern, '').strip end # Rails timezone support doesn't seem to recognize this abbreviation, and # frankly I have no idea where ActiveSupport::TimeZone::CODES comes from. # In case that ever stops working or a less hackish solution is required, # check out https://gist.github.com/kueda/3e6f77f64f792b4f119f tz_abbrev = date_string[tz_abbrev_pattern, 1] tz_abbrev = 'CET' if tz_abbrev == 'CEST' if parsed_time_zone = ActiveSupport::TimeZone::CODES[tz_abbrev] date_string = observed_on_string.sub(tz_abbrev_pattern, '') date_string = date_string.sub(tz_js_offset_pattern, '').strip self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_offset_pattern, 1]) && (n = offset.to_f / 100) && (key = n == 0 ? 0 : n.floor + (n%n.floor)/0.6) && (parsed_time_zone = ActiveSupport::TimeZone[key]) date_string = date_string.sub(tz_offset_pattern, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_js_offset_pattern, 2]) && (n = offset.to_f / 100) && (key = n == 0 ? 0 : n.floor + (n%n.floor)/0.6) && (parsed_time_zone = ActiveSupport::TimeZone[key]) date_string = date_string.sub(tz_js_offset_pattern, '') date_string = date_string.sub(/^(Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s+/i, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? elsif (offset = date_string[tz_colon_offset_pattern, 2]) && (t = Time.parse(offset)) && (parsed_time_zone = ActiveSupport::TimeZone[t.hour+t.min/60.0]) date_string = date_string.sub(/#{tz_colon_offset_pattern}|#{tz_failed_abbrev_pattern}/, '') self.time_zone = parsed_time_zone.name if observed_on_string_changed? end date_string.sub!('T', ' ') if date_string =~ /\d{4}-\d{2}-\d{2}T/ date_string.sub!(/(\d{2}:\d{2}:\d{2})\.\d+/, '\\1') # strip leading month if present date_string.sub!(/^[A-z]{3} ([A-z]{3})/, '\\1') # strip paranthesized stuff date_string.gsub!(/\(.*\)/, '') # strip noon hour madness # this is due to a weird, weird bug in Chronic if date_string =~ /p\.?m\.?/i date_string.gsub!( /( 12:(\d\d)(:\d\d)?)\s+?p\.?m\.?/i, '\\1') elsif date_string =~ /a\.?m\.?/i date_string.gsub!( /( 12:(\d\d)(:\d\d)?)\s+?a\.?m\.?/i, '\\1') date_string.gsub!( / 12:/, " 00:" ) end # Set the time zone appropriately old_time_zone = Time.zone begin Time.zone = time_zone || user.try(:time_zone) rescue ArgumentError # Usually this would happen b/c of an invalid time zone being specified self.time_zone = time_zone_was || old_time_zone.name end Chronic.time_class = Time.zone begin # Start parsing... t = begin Chronic.parse(date_string) rescue ArgumentError nil end t = Chronic.parse(date_string.split[0..-2].join(' ')) unless t if !t && (locale = user.locale || I18n.locale) date_string = englishize_month_abbrevs_for_locale(date_string, locale) t = Chronic.parse(date_string) end if !t I18N_SUPPORTED_LOCALES.each do |locale| date_string = englishize_month_abbrevs_for_locale(date_string, locale) break if t = Chronic.parse(date_string) end end return true unless t # Re-interpret future dates as being in the past t = Chronic.parse(date_string, :context => :past) if t > Time.now self.observed_on = t.to_date if t # try to determine if the user specified a time by ask Chronic to return # a time range. Time ranges less than a day probably specified a time. if tspan = Chronic.parse(date_string, :context => :past, :guess => false) # If tspan is less than a day and the string wasn't 'today', set time if tspan.width < 86400 && date_string.strip.downcase != 'today' self.time_observed_at = t else self.time_observed_at = nil end end rescue RuntimeError, ArgumentError # ignore these, just don't set the date return true end # don't store relative observed_on_strings, or they will change # every time you save an observation! if date_string =~ /today|yesterday|ago|last|this|now|monday|tuesday|wednesday|thursday|friday|saturday|sunday/i self.observed_on_string = self.observed_on.to_s if self.time_observed_at self.observed_on_string = self.time_observed_at.strftime("%Y-%m-%d %H:%M:%S") end end # Set the time zone back the way it was Time.zone = old_time_zone true end def englishize_month_abbrevs_for_locale(date_string, locale) # HACK attempt to translate month abbreviations into English. # A much better approach would be add Spanish and any other supported # locales to https://github.com/olojac/chronic-l10n and switch to the # 'localized' branch of Chronic, which seems to clear our test suite. return date_string if locale.to_s =~ /^en/ return date_string unless I18N_SUPPORTED_LOCALES.include?(locale) I18n.t('date.abbr_month_names', :locale => :en).each_with_index do |en_month_name,i| next if i == 0 localized_month_name = I18n.t('date.abbr_month_names', :locale => locale)[i] next if localized_month_name == en_month_name date_string.gsub!(/#{localized_month_name}([\s\,])/, "#{en_month_name}\\1") end date_string end # # Adds, updates, or destroys the identification corresponding to the taxon # the user selected. # def update_identifications return true if @skip_identifications return true unless taxon_id_changed? owners_ident = identifications.where(:user_id => user_id).order("id asc").last # If there's a taxon we need to make sure the owner's ident agrees if taxon && (owners_ident.blank? || owners_ident.taxon_id != taxon.id) # If the owner doesn't have an identification for this obs, make one attrs = { user: user, taxon: taxon, observation: self, skip_observation: true, vision: owners_identification_from_vision_requested } owners_ident = if new_record? self.identifications.build(attrs) else self.identifications.create(attrs) end elsif taxon.blank? && owners_ident && owners_ident.current? if identifications.where(:user_id => user_id).count > 1 owners_ident.update_attributes(:current => false, :skip_observation => true) else owners_ident.skip_observation = true owners_ident.destroy end end update_stats(:skip_save => true) true end # Override nested obs field values attributes setter to ensure that field # values get added even if existing field values have been destroyed (e.g. # two windows). Also updating existing OFV of same OF name if id not # specified def observation_field_values_attributes=(attributes) attr_array = attributes.is_a?(Hash) ? attributes.values : attributes attr_array.each_with_index do |v,i| if v["id"].blank? existing = observation_field_values.where(:observation_field_id => v["observation_field_id"]).first unless v["observation_field_id"].blank? existing ||= observation_field_values.joins(:observation_fields).where("lower(observation_fields.name) = ?", v["name"]).first unless v["name"].blank? attr_array[i]["id"] = existing.id if existing elsif !ObservationFieldValue.where("id = ?", v["id"]).exists? attr_array[i].delete("id") end end assign_nested_attributes_for_collection_association(:observation_field_values, attr_array) end # # Update the user's lists with changes to this observation's taxon # # If the observation is the last_observation in any of the user's lists, # then the last_observation should be reset to another observation. # def refresh_lists return true if skip_refresh_lists return true unless taxon_id_changed? || quality_grade_changed? # Update the observation's current taxon and/or a previous one that was # just removed/changed target_taxa = [ taxon, Taxon.find_by_id(@old_observation_taxon_id) ].compact.uniq # Don't refresh all the lists if nothing changed return true if target_taxa.empty? # Refreh the ProjectLists ProjectList.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "ProjectList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) # Don't refresh LifeLists and Lists if only quality grade has changed return true unless taxon_id_changed? List.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "List::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :skip_subclasses => true) LifeList.delay(priority: USER_INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "LifeList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) # Reset the instance var so it doesn't linger around @old_observation_taxon_id = nil true end def refresh_check_lists return true if skip_refresh_check_lists refresh_needed = (georeferenced? || was_georeferenced?) && (taxon_id || taxon_id_was) && (quality_grade_changed? || taxon_id_changed? || latitude_changed? || longitude_changed? || observed_on_changed?) return true unless refresh_needed CheckList.delay(priority: INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "CheckList::refresh_with_observation": id }). refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_changed? ? taxon_id_was : nil, :latitude_was => (latitude_changed? || longitude_changed?) ? latitude_was : nil, :longitude_was => (latitude_changed? || longitude_changed?) ? longitude_was : nil, :new => id_was.blank?) true end # Because it has to be slightly different, in that the taxon of a destroyed # obs shouldn't be removed by default from life lists (maybe you've seen it # in the past, but you don't have any other obs), but those listed_taxa of # this taxon should have their last_observation reset. # def refresh_lists_after_destroy return true if skip_refresh_lists return true unless taxon List.delay(:priority => USER_INTEGRITY_PRIORITY).refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at, :skip_subclasses => true) LifeList.delay(:priority => USER_INTEGRITY_PRIORITY).refresh_with_observation(id, :taxon_id => taxon_id, :taxon_id_was => taxon_id_was, :user_id => user_id, :created_at => created_at) true end # # Preserve the old taxon id if the taxon has changed so we know to update # that taxon in the user's lists after_save # def keep_old_taxon_id @old_observation_taxon_id = taxon_id_was if taxon_id_changed? true end # # Set the iconic taxon if it hasn't been set # def set_iconic_taxon if taxon self.iconic_taxon_id ||= taxon.iconic_taxon_id else self.iconic_taxon_id = nil end true end # # Trim whitespace around species guess # def strip_species_guess self.species_guess.to_s.strip! unless species_guess.blank? true end # # Set the time_zone of this observation if not already set # def set_time_zone self.time_zone = nil if time_zone.blank? self.time_zone ||= user.time_zone if user && !user.time_zone.blank? self.time_zone ||= Time.zone.try(:name) unless time_observed_at.blank? self.time_zone ||= 'UTC' self.zic_time_zone = ActiveSupport::TimeZone::MAPPING[time_zone] unless time_zone.blank? true end # # Force time_observed_at into the time zone # def set_time_in_time_zone return true if time_observed_at.blank? || time_zone.blank? return true unless time_observed_at_changed? || time_zone_changed? # Render the time as a string time_s = time_observed_at_before_type_cast unless time_s.is_a? String time_s = time_observed_at_before_type_cast.strftime("%Y-%m-%d %H:%M:%S") end # Get the time zone offset as a string and append it offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false) time_s += " #{offset_s}" self.time_observed_at = Time.parse(time_s) true end def set_captive update_column(:captive, captive_cultivated) end def lsid "lsid:#{URI.parse(CONFIG.site_url).host}:observations:#{id}" end def component_cache_key(options = {}) Observation.component_cache_key(id, options) end def self.component_cache_key(id, options = {}) key = "obs_comp_#{id}" key += "_"+options.sort.map{|k,v| "#{k}-#{v}"}.join('_') unless options.blank? key end def num_identifications_by_others num_identification_agreements + num_identification_disagreements end def appropriate? return false if flagged? return false if observation_photos_count > 0 && photos.detect{ |p| p.flagged? } true end def georeferenced? (!latitude.nil? && !longitude.nil?) || (!private_latitude.nil? && !private_longitude.nil?) end def was_georeferenced? (latitude_was && longitude_was) || (private_latitude_was && private_longitude_was) end def quality_metric_score(metric) quality_metrics.all unless quality_metrics.loaded? metrics = quality_metrics.select{|qm| qm.metric == metric} return nil if metrics.blank? metrics.select{|qm| qm.agree?}.size.to_f / metrics.size end def community_supported_id? if community_taxon_rejected? num_identification_agreements.to_i > 0 && num_identification_agreements > num_identification_disagreements else !community_taxon_id.blank? && taxon_id == community_taxon_id end end def quality_metrics_pass? QualityMetric::METRICS.each do |metric| return false unless passes_quality_metric?(metric) end true end def passes_quality_metric?(metric) score = quality_metric_score(metric) score.blank? || score >= 0.5 end def research_grade_candidate? return false if human? return false unless georeferenced? return false unless quality_metrics_pass? return false unless observed_on? return false unless (photos? || sounds?) return false unless appropriate? true end def human? t = community_taxon || taxon t && t.name =~ /^Homo / end def research_grade? quality_grade == RESEARCH_GRADE end def verifiable? [ NEEDS_ID, RESEARCH_GRADE ].include?(quality_grade) end def photos? return true if photos && photos.any? observation_photos.loaded? ? ! observation_photos.empty? : observation_photos.exists? end def sounds? sounds.loaded? ? ! sounds.empty? : sounds.exists? end def set_quality_grade(options = {}) self.quality_grade = get_quality_grade true end def self.set_quality_grade(id) return unless observation = Observation.find_by_id(id) observation.set_quality_grade(:force => true) observation.save if observation.quality_grade_changed? CheckList.delay(priority: INTEGRITY_PRIORITY, queue: "slow", unique_hash: { "CheckList::refresh_with_observation": id }). refresh_with_observation(observation.id, :taxon_id => observation.taxon_id) end observation.quality_grade end def get_quality_grade if !research_grade_candidate? CASUAL elsif voted_in_to_needs_id? NEEDS_ID elsif community_taxon_id && owners_identification && owners_identification.maverick? && community_taxon_rejected? CASUAL elsif community_taxon_at_species_or_lower? RESEARCH_GRADE elsif voted_out_of_needs_id? if community_taxon_below_family? RESEARCH_GRADE else CASUAL end else NEEDS_ID end end def coordinates_obscured? !private_latitude.blank? || !private_longitude.blank? end alias :coordinates_obscured :coordinates_obscured? def coordinates_private? latitude.blank? && longitude.blank? && private_latitude? && private_longitude? end def coordinates_changed? latitude_changed? || longitude_changed? || private_latitude_changed? || private_longitude_changed? end def geoprivacy_private? geoprivacy == PRIVATE end def geoprivacy_obscured? geoprivacy == OBSCURED end def coordinates_viewable_by?(viewer) return true unless coordinates_obscured? return false if viewer.blank? viewer = User.find_by_id(viewer) unless viewer.is_a?(User) return false unless viewer return true if user_id == viewer.id project_ids = if projects.loaded? projects.map(&:id) else project_observations.map(&:project_id) end viewer.project_users.select{|pu| project_ids.include?(pu.project_id) && ProjectUser::ROLES.include?(pu.role)}.each do |pu| if project_observations.detect{|po| po.project_id == pu.project_id && po.prefers_curator_coordinate_access?} return true end end false end def reset_private_coordinates_if_coordinates_changed if (latitude_changed? || longitude_changed?) self.private_latitude = nil self.private_longitude = nil end true end def normalize_geoprivacy self.geoprivacy = nil unless GEOPRIVACIES.include?(geoprivacy) true end def obscure_coordinates_for_geoprivacy self.geoprivacy = nil if geoprivacy.blank? return true if geoprivacy.blank? && !geoprivacy_changed? case geoprivacy when PRIVATE obscure_coordinates unless coordinates_obscured? self.latitude, self.longitude = [nil, nil] when OBSCURED obscure_coordinates unless coordinates_obscured? else unobscure_coordinates end true end def obscure_coordinates_for_threatened_taxa lat = private_latitude.blank? ? latitude : private_latitude lon = private_longitude.blank? ? longitude : private_longitude t = taxon || community_taxon target_taxon_ids = [[t.try(:id)] + identifications.current.pluck(:taxon_id)].flatten.compact.uniq taxon_geoprivacy = Taxon.max_geoprivacy( target_taxon_ids, latitude: lat, longitude: lon ) case taxon_geoprivacy when OBSCURED obscure_coordinates unless coordinates_obscured? when PRIVATE unless coordinates_private? obscure_coordinates self.latitude, self.longitude = [nil, nil] end else unobscure_coordinates end true end def obscure_coordinates return if latitude.blank? || longitude.blank? if latitude_changed? || longitude_changed? self.private_latitude = latitude self.private_longitude = longitude else self.private_latitude ||= latitude self.private_longitude ||= longitude end self.latitude, self.longitude = Observation.random_neighbor_lat_lon( private_latitude, private_longitude ) set_geom_from_latlon true end def obscure_place_guess public_place_guess = Observation.place_guess_from_latlon( private_latitude, private_longitude, acc: calculate_public_positional_accuracy, user: user ) if coordinates_private? if place_guess_changed? && place_guess == private_place_guess self.place_guess = nil elsif !place_guess.blank? && place_guess != public_place_guess self.private_place_guess = place_guess self.place_guess = nil end elsif coordinates_obscured? if place_guess_changed? if place_guess == private_place_guess self.place_guess = public_place_guess else self.private_place_guess = place_guess self.place_guess = public_place_guess end elsif private_latitude_changed? && private_place_guess.blank? self.private_place_guess = place_guess self.place_guess = public_place_guess end else unless place_guess_changed? || private_place_guess.blank? self.place_guess = private_place_guess end self.private_place_guess = nil end true end def lat_lon_in_place_guess? !place_guess.blank? && place_guess !~ /[a-cf-mo-rt-vx-z]/i && !place_guess.scan(COORDINATE_REGEX).blank? end def unobscure_coordinates return unless coordinates_obscured? || coordinates_private? return unless geoprivacy.blank? self.latitude = private_latitude self.longitude = private_longitude self.private_latitude = nil self.private_longitude = nil set_geom_from_latlon end def iconic_taxon_name return nil if iconic_taxon_id.blank? if Taxon::ICONIC_TAXA_BY_ID.blank? association(:iconic_taxon).loaded? ? iconic_taxon.try(:name) : Taxon.select("id, name").where(:id => iconic_taxon_id).first.try(:name) else Taxon::ICONIC_TAXA_BY_ID[iconic_taxon_id].try(:name) end end def captive_cultivated? !passes_quality_metric?(QualityMetric::WILD) end alias :captive_cultivated :captive_cultivated? def reviewed_by?(viewer) viewer = User.find_by_id(viewer) unless viewer.is_a?(User) return false unless viewer ObservationReview.where(observation_id: id, user_id: viewer.id, reviewed: true).exists? end ##### Community Taxon ######################################################### def get_community_taxon(options = {}) return if (identifications.loaded? ? identifications.select(&:current?).select(&:persisted?).uniq : identifications.current).count <= 1 node = community_taxon_nodes(options).select{|n| n[:cumulative_count] > 1}.sort_by do |n| [ n[:score].to_f > COMMUNITY_TAXON_SCORE_CUTOFF ? 1 : 0, # only consider taxa with a score above the cutoff 0 - (n[:taxon].rank_level || 500) # within that set, sort by rank level, i.e. choose lowest rank ] end.last # # Visualizing this stuff is pretty useful for testing, so please leave this in # puts # width = 15 # %w(taxon_id taxon_name cc dc cdc score).each do |c| # print c.ljust(width) # end # puts # community_taxon_nodes.sort_by{|n| n[:taxon].ancestry || ""}.each do |n| # print n[:taxon].id.to_s.ljust(width) # print n[:taxon].name.to_s.ljust(width) # print n[:cumulative_count].to_s.ljust(width) # print n[:disagreement_count].to_s.ljust(width) # print n[:conservative_disagreement_count].to_s.ljust(width) # print n[:score].to_s.ljust(width) # puts # end return unless node node[:taxon] end def community_taxon_nodes(options = {}) return @community_taxon_nodes if @community_taxon_nodes && !options[:force] # work on current identifications ids = identifications.loaded? ? identifications.select(&:current?).select(&:persisted?).uniq : identifications.current.includes(:taxon) working_idents = ids.sort_by(&:id) # load all ancestor taxa implied by identifications ancestor_ids = working_idents.map{|i| i.taxon.ancestor_ids}.flatten.uniq.compact taxon_ids = working_idents.map{|i| [i.taxon_id] + i.taxon.ancestor_ids}.flatten.uniq.compact taxa = Taxon.where("id IN (?)", taxon_ids) taxon_ids_count = taxon_ids.size @community_taxon_nodes = taxa.map do |id_taxon| # count all identifications of this taxon and its descendants cumulative_count = working_idents.select{|i| i.taxon.self_and_ancestor_ids.include?(id_taxon.id)}.size # count identifications of taxa that are outside of this taxon's subtree # (i.e. absolute disagreements) disagreement_count = working_idents.reject{|i| id_taxon.self_and_ancestor_ids.include?(i.taxon_id) || i.taxon.self_and_ancestor_ids.include?(id_taxon.id) }.size # count identifications of taxa that are ancestors of this taxon but # were made after the first identification of this taxon (i.e. # conservative disagreements). Note that for genus1 > species1, an # identification of species1 implies an identification of genus1 first_ident = working_idents.detect{|i| i.taxon.self_and_ancestor_ids.include?(id_taxon.id)} conservative_disagreement_count = if first_ident working_idents.select{|i| i.id > first_ident.id && id_taxon.ancestor_ids.include?(i.taxon_id)}.size else 0 end { :taxon => id_taxon, :ident_count => working_idents.select{|i| i.taxon_id == id_taxon.id}.size, :cumulative_count => cumulative_count, :disagreement_count => disagreement_count, :conservative_disagreement_count => conservative_disagreement_count, :score => cumulative_count.to_f / (cumulative_count + disagreement_count + conservative_disagreement_count) } end end def set_community_taxon(options = {}) community_taxon = get_community_taxon(options) self.community_taxon = community_taxon if self.changed? && !community_taxon.nil? && !community_taxon_rejected? self.species_guess = (community_taxon.common_name.try(:name) || community_taxon.name) end true end def set_community_taxon_before_save set_community_taxon(:force => true) if prefers_community_taxon_changed? || taxon_id_changed? true end def self.set_community_taxa(options = {}) scope = Observation.includes({:identifications => [:taxon]}, :user) scope = scope.where(options[:where]) if options[:where] scope = scope.by(options[:user]) unless options[:user].blank? scope = scope.of(options[:taxon]) unless options[:taxon].blank? scope = scope.in_place(options[:place]) unless options[:place].blank? scope = scope.in_projects([options[:project]]) unless options[:project].blank? start_time = Time.now logger = options[:logger] || Rails.logger logger.info "[INFO #{Time.now}] Starting Observation.set_community_taxon, options: #{options.inspect}" scope.find_each do |o| next unless o.identifications.size > 1 o.set_community_taxon unless o.save logger.error "[ERROR #{Time.now}] Failed to set community taxon for #{o}: #{o.errors.full_messages.to_sentence}" end end logger.info "[INFO #{Time.now}] Finished Observation.set_community_taxon in #{Time.now - start_time}s, options: #{options.inspect}" end def community_taxon_rejected? return false if prefers_community_taxon == true (prefers_community_taxon == false || user.prefers_community_taxa == false) end def set_taxon_from_community_taxon return if identifications.count == 0 && taxon_id # explicitly opted in self.taxon_id = if prefers_community_taxon community_taxon_id || owners_identification.try(:taxon_id) || others_identifications.last.try(:taxon_id) # obs opted out or user opted out elsif prefers_community_taxon == false || !user.prefers_community_taxa? owners_identification.try(:taxon_id) # implicitly opted in else community_taxon_id || owners_identification.try(:taxon_id) || others_identifications.last.try(:taxon_id) end if taxon_id_changed? && (community_taxon_id_changed? || prefers_community_taxon_changed?) update_stats(:skip_save => true) self.species_guess = if taxon taxon.common_name.try(:name) || taxon.name else nil end end true end def self.reassess_coordinates_for_observations_of( taxon, options = {} ) batch_size = 500 scope = Observation.with_identifications_of( taxon ) scope.find_in_batches(batch_size: batch_size) do |batch| if options[:place] # using Elasticsearch for place filtering so we don't # get bogged down by huge geometries in Postgresql es_params = { id: batch, place_id: options[:place], per_page: batch_size } reassess_coordinates_of( Observation.page_of_results( es_params ) ) else reassess_coordinates_of( batch ) end end end def self.reassess_coordinates_of( observations ) observations.each do |o| o.obscure_coordinates_for_threatened_taxa o.obscure_place_guess next unless o.coordinates_changed? || o.place_guess_changed? Observation.where( id: o.id ).update_all( latitude: o.latitude, longitude: o.longitude, private_latitude: o.private_latitude, private_longitude: o.private_longitude, geom: o.geom, private_geom: o.private_geom, place_guess: o.place_guess, private_place_guess: o.private_place_guess ) end Observation.elastic_index!( ids: observations.map(&:id) ) end def self.find_observations_of(taxon) Observation.joins(:taxon). where("observations.taxon_id = ? OR taxa.ancestry LIKE '#{taxon.ancestry}/#{taxon.id}%'", taxon).find_each do |o| yield(o) end end ##### Validations ######################################################### # # Make sure the observation is not in the future. # def must_be_in_the_past return true if observed_on.blank? if observed_on > Time.now.in_time_zone(time_zone || user.time_zone).to_date errors.add(:observed_on, "can't be in the future") end true end # # Make sure the observation resolves to a single day. Right now we don't # store ambiguity... # def must_not_be_a_range return if observed_on_string.blank? is_a_range = false begin if tspan = Chronic.parse(observed_on_string, :context => :past, :guess => false) is_a_range = true if tspan.width.seconds > 1.day.seconds end rescue RuntimeError, ArgumentError # ignore parse errors, assume they're not spans return end # Special case: dates like '2004', which ordinarily resolve to today at # 8:04pm observed_on_int = observed_on_string.gsub(/[^\d]/, '').to_i if observed_on_int > 1900 && observed_on_int <= Date.today.year is_a_range = true end if is_a_range errors.add(:observed_on, "must be a single day, not a range") end end def set_taxon_from_taxon_name return true if self.taxon_name.blank? return true if taxon_id self.taxon_id = single_taxon_id_for_name(self.taxon_name) true end def set_taxon_from_species_guess return true if species_guess =~ /\?$/ return true unless species_guess_changed? && taxon_id.blank? return true if species_guess.blank? self.taxon_id = single_taxon_id_for_name(species_guess) true end def single_taxon_for_name(name) Taxon.single_taxon_for_name(name) end def single_taxon_id_for_name(name) Taxon.single_taxon_for_name(name).try(:id) end def set_latlon_from_place_guess return true unless latitude.blank? && longitude.blank? return true if place_guess.blank? return true if place_guess =~ /[a-cf-mo-rt-vx-z]/i # ignore anything with word chars other than NSEW return true unless place_guess.strip =~ /[.+,\s.+]/ # ignore anything without a legit separator matches = place_guess.strip.scan(COORDINATE_REGEX).flatten return true if matches.blank? case matches.size when 2 # decimal degrees self.latitude, self.longitude = matches when 4 # decimal minutes self.latitude = matches[0].to_i + matches[1].to_f/60.0 self.longitude = matches[3].to_i + matches[4].to_f/60.0 when 6 # degrees / minutes / seconds self.latitude = matches[0].to_i + matches[1].to_i/60.0 + matches[2].to_f/60/60 self.longitude = matches[3].to_i + matches[4].to_i/60.0 + matches[5].to_f/60/60 end self.latitude *= -1 if latitude.to_f > 0 && place_guess =~ /s/i self.longitude *= -1 if longitude.to_f > 0 && place_guess =~ /w/i true end def set_geom_from_latlon(options = {}) if longitude.blank? || latitude.blank? self.geom = nil elsif options[:force] || longitude_changed? || latitude_changed? self.geom = "POINT(#{longitude} #{latitude})" end if private_latitude && private_longitude self.private_geom = "POINT(#{private_longitude} #{private_latitude})" elsif self.geom self.private_geom = self.geom else self.private_geom = nil end true end def self.place_guess_from_latlon( lat, lon, options = {} ) sys_places = Observation.system_places_for_latlon( lat, lon, options ) return if sys_places.blank? sys_places_codes = sys_places.map(&:code) user = options[:user] locale = options[:locale] locale ||= user.locale if user locale ||= I18n.locale first_name = if sys_places[0].admin_level == Place::COUNTY_LEVEL && sys_places_codes.include?( "US" ) "#{sys_places[0].name} County" else I18n.t( sys_places[0].name, locale: locale, default: sys_places[0].name ) end remaining_names = sys_places[1..-1].map do |p| if p.admin_level == Place::COUNTY_LEVEL && sys_places_codes.include?( "US" ) "#{p.name} County" else p.code.blank? ? I18n.t( p.name, locale: locale, default: p.name ) : p.code end end [first_name, remaining_names].flatten.join( ", " ) end def set_place_guess_from_latlon return true unless place_guess.blank? return true if coordinates_private? if guess = Observation.place_guess_from_latlon( latitude, longitude, { acc: calculate_public_positional_accuracy, user: user } ) self.place_guess = guess end true end def set_license return true if license_changed? && license.blank? self.license ||= user.preferred_observation_license self.license = nil unless LICENSE_CODES.include?(license) true end def trim_user_agent return true if user_agent.blank? self.user_agent = user_agent[0..254] true end def update_out_of_range_later if taxon_id_changed? && taxon.blank? update_out_of_range elsif latitude_changed? || private_latitude_changed? || taxon_id_changed? delay(:priority => USER_INTEGRITY_PRIORITY).update_out_of_range end true end def update_out_of_range set_out_of_range Observation.where(id: id).update_all(out_of_range: out_of_range) end def set_out_of_range if taxon_id.blank? || !georeferenced? || !TaxonRange.exists?(["taxon_id = ?", taxon_id]) self.out_of_range = nil return end # buffer the point to accomodate simplified or slightly inaccurate ranges buffer_degrees = OUT_OF_RANGE_BUFFER / (2*Math::PI*Observation::PLANETARY_RADIUS) * 360.0 self.out_of_range = if coordinates_obscured? TaxonRange.where( "taxon_ranges.taxon_id = ? AND ST_Distance(taxon_ranges.geom, ST_Point(?,?)) > ?", taxon_id, private_longitude, private_latitude, buffer_degrees ).exists? else TaxonRange. from("taxon_ranges, observations"). where( "taxon_ranges.taxon_id = ? AND observations.id = ? AND ST_Distance(taxon_ranges.geom, observations.geom) > ?", taxon_id, id, buffer_degrees ).count > 0 end end def set_uri if uri.blank? Observation.where(id: id).update_all(uri: FakeView.observation_url(id)) end true end def update_default_license return true unless make_license_default.yesish? user.update_attribute(:preferred_observation_license, license) true end def update_all_licenses return true unless make_licenses_same.yesish? Observation.where(user_id: user_id).update_all(license: license) user.index_observations_later true end def update_taxon_counter_caches return true unless destroyed? || taxon_id_changed? taxon_ids = [taxon_id_was, taxon_id].compact.uniq unless taxon_ids.blank? Taxon.delay(:priority => INTEGRITY_PRIORITY).update_observation_counts(:taxon_ids => taxon_ids) end true end def update_quality_metrics if captive_flag.yesish? QualityMetric.vote( user, self, QualityMetric::WILD, false ) elsif captive_flag.noish? && force_quality_metrics QualityMetric.vote( user, self, QualityMetric::WILD, true ) elsif captive_flag.noish? && ( qm = quality_metrics.detect{|m| m.user_id == user_id && m.metric == QualityMetric::WILD} ) qm.update_attributes( agree: true) elsif force_quality_metrics && ( qm = quality_metrics.detect{|m| m.user_id == user_id && m.metric == QualityMetric::WILD} ) qm.destroy end system_captive_vote = quality_metrics.detect{ |m| m.user_id.blank? && m.metric == QualityMetric::WILD } if probably_captive? QualityMetric.vote( nil, self, QualityMetric::WILD, false ) unless system_captive_vote elsif system_captive_vote system_captive_vote.destroy end true end def update_attributes(attributes) # hack around a weird android bug attributes.delete(:iconic_taxon_name) # MASS_ASSIGNABLE_ATTRIBUTES.each do |a| # self.send("#{a}=", attributes.delete(a.to_s)) if attributes.has_key?(a.to_s) # self.send("#{a}=", attributes.delete(a)) if attributes.has_key?(a) # end super(attributes) end def license_name return nil if license.blank? s = "Creative Commons " s += LICENSES.detect{|row| row.first == license}.try(:[], 1).to_s s end # I'm not psyched about having this stuff here, but it makes generating # more compact JSON a lot easier. include ObservationsHelper include ActionView::Helpers::SanitizeHelper include ActionView::Helpers::TextHelper # include ActionController::UrlWriter include Rails.application.routes.url_helpers def image_url(options = {}) url = observation_image_url(self, options.merge(size: "medium")) url =~ /^http/ ? url : nil end def obs_image_url image_url end def short_description short_observation_description(self) end def scientific_name taxon.scientific_name.name if taxon && taxon.scientific_name end def common_name taxon.common_name.name if taxon && taxon.common_name end def url uri end def user_login user.login end def update_stats(options = {}) idents = [self.identifications.to_a, options[:include]].flatten.compact.uniq current_idents = idents.select(&:current?) if taxon_id.blank? num_agreements = 0 num_disagreements = 0 else if node = community_taxon_nodes.detect{|n| n[:taxon].try(:id) == taxon_id} num_agreements = node[:cumulative_count] num_disagreements = node[:disagreement_count] + node[:conservative_disagreement_count] num_agreements -= 1 if current_idents.detect{|i| i.taxon_id == taxon_id && i.user_id == user_id} num_agreements = 0 if current_idents.count <= 1 num_disagreements = 0 if current_idents.count <= 1 else num_agreements = current_idents.select{|ident| ident.is_agreement?(:observation => self)}.size num_disagreements = current_idents.select{|ident| ident.is_disagreement?(:observation => self)}.size end end # Kinda lame, but Observation#get_quality_grade relies on these numbers self.num_identification_agreements = num_agreements self.num_identification_disagreements = num_disagreements self.identifications_count = idents.size new_quality_grade = get_quality_grade self.quality_grade = new_quality_grade if !options[:skip_save] && ( num_identification_agreements_changed? || num_identification_disagreements_changed? || quality_grade_changed? || identifications_count_changed?) Observation.where(id: id).update_all( num_identification_agreements: num_agreements, num_identification_disagreements: num_disagreements, quality_grade: new_quality_grade, identifications_count: identifications_count) refresh_check_lists refresh_lists end end def self.update_stats_for_observations_of(taxon) taxon = Taxon.find_by_id(taxon) unless taxon.is_a?(Taxon) return unless taxon descendant_conditions = taxon.descendant_conditions.to_a result = Identification.elastic_search( filters: [ { bool: { should: [ { term: { "taxon.ancestor_ids": taxon.id } }, { term: { "observation.taxon.ancestor_ids": taxon.id } }, ]}}], size: 0, aggregate: { obs: { terms: { field: "observation.id", size: 3000000 } } } ) obs_ids = result.response.aggregations.obs.buckets.map{ |b| b[:key] } obs_ids.in_groups_of(1000) do |batch_ids| Observation.includes(:taxon, { identifications: :taxon }, :flags, { photos: :flags }, :quality_metrics, :sounds, :votes_for).where(id: batch_ids).find_each do |o| o.set_community_taxon o.update_stats(skip_save: true) if o.changed? o.skip_indexing = true o.save Identification.update_categories_for_observation( o ) end end Observation.elastic_index!(ids: batch_ids) end Rails.logger.info "[INFO #{Time.now}] Finished Observation.update_stats_for_observations_of(#{taxon})" end def self.random_neighbor_lat_lon(lat, lon) precision = 10**5.0 range = ((-1 * precision)..precision) half_cell = COORDINATE_UNCERTAINTY_CELL_SIZE / 2 base_lat, base_lon = uncertainty_cell_southwest_latlon( lat, lon ) [ base_lat + ((rand(range) / precision) * half_cell), base_lon + ((rand(range) / precision) * half_cell)] end # # Coordinates of the southwest corner of the uncertainty cell for any given coordinates # def self.uncertainty_cell_southwest_latlon( lat, lon ) half_cell = COORDINATE_UNCERTAINTY_CELL_SIZE / 2 # how many significant digits in the obscured coordinates (e.g. 5) # doing a floor with intervals of 0.2, then adding 0.1 # so our origin is the center of a 0.2 square base_lat = lat - (lat % COORDINATE_UNCERTAINTY_CELL_SIZE) + half_cell base_lon = lon - (lon % COORDINATE_UNCERTAINTY_CELL_SIZE) + half_cell [base_lat, base_lon] end # # Distance of a diagonal from corner to corner across the uncertainty cell # for the given coordinates. # def self.uncertainty_cell_diagonal_meters( lat, lon ) base_lat, base_lon = uncertainty_cell_southwest_latlon( lat, lon ) lat_lon_distance_in_meters( base_lat, base_lon, base_lat + COORDINATE_UNCERTAINTY_CELL_SIZE, base_lon + COORDINATE_UNCERTAINTY_CELL_SIZE ).ceil end # # Distance of a diagonal from corner to corner across the uncertainty cell # for this observation's coordinates. # def uncertainty_cell_diagonal_meters return nil unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude Observation.uncertainty_cell_diagonal_meters( lat, lon ) end def self.places_for_latlon( lat, lon, acc ) candidates = Place.containing_lat_lng(lat, lon).sort_by{|p| p.bbox_area || 0} # At present we use PostGIS GEOMETRY types, which are a bit stupid about # things crossing the dateline, so we need to do an app-layer check. # Converting to the GEOGRAPHY type would solve this, in theory. # Unfortunately this does NOT solve the problem of failing to select # legit geoms that cross the dateline. GEOGRAPHY would solve that too. candidates.select do |p| # HACK: bbox_contains_lat_lng_acc uses rgeo, which despite having a # spherical geometry factory, doesn't seem to allow spherical polygons # to use a contains? method, which means it doesn't really work for # polygons that cross the dateline, so... skip it until we switch to # geography, I guess. if p.straddles_date_line? true else p.bbox_contains_lat_lng_acc?(lat, lon, acc) end end end def places return [] unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude acc = private_positional_accuracy || positional_accuracy Observation.places_for_latlon( lat, lon, acc ) end def public_places return [] unless georeferenced? return [] if geoprivacy == PRIVATE lat = private_latitude || latitude lon = private_longitude || longitude acc = public_positional_accuracy || positional_accuracy Observation.places_for_latlon( lat, lon, acc ) end def self.system_places_for_latlon( lat, lon, options = {} ) all_places = options[:places] || places_for_latlon( lat, lon, options[:acc] ) all_places.select do |p| p.user_id.blank? && ( [Place::COUNTRY_LEVEL, Place::STATE_LEVEL, Place::COUNTY_LEVEL].include?(p.admin_level) || p.place_type == Place::PLACE_TYPE_CODES['Open Space'] ) end end # The places that are theoretically controlled by site admins def system_places(options = {}) Observation.system_places_for_latlon( latitude, longitude, options.merge( acc: positional_accuracy ) ) end def public_system_places( options = {} ) Observation.system_places_for_latlon( latitude, longitude, options.merge( acc: positional_accuracy ) ) public_places.select{|p| !p.admin_level.blank? } end def intersecting_places return [] unless georeferenced? lat = private_latitude || latitude lon = private_longitude || longitude @intersecting_places ||= Place.containing_lat_lng(lat, lon).sort_by{|p| p.bbox_area || 0} end { 0 => "Undefined", 2 => "Street Segment", 4 => "Street", 5 => "Intersection", 6 => "Street", 7 => "Town", 8 => "State", 9 => "County", 10 => "Local Administrative Area", 12 => "Country", 13 => "Island", 14 => "Airport", 15 => "Drainage", 16 => "Land Feature", 17 => "Miscellaneous", 18 => "Nationality", 19 => "Supername", 20 => "Point of Interest", 21 => "Region", 24 => "Colloquial", 25 => "Zone", 26 => "Historical State", 27 => "Historical County", 29 => "Continent", 33 => "Estate", 35 => "Historical Town", 36 => "Aggregate", 100 => "Open Space", 101 => "Territory" }.each do |code, type| define_method "place_#{type.underscore}" do intersecting_places.detect{|p| p.place_type == code} end define_method "place_#{type.underscore}_name" do send("place_#{type.underscore}").try(:name) end end def taxon_and_ancestors taxon ? taxon.self_and_ancestors.to_a : [] end def mobile? return false unless user_agent MOBILE_APP_USER_AGENT_PATTERNS.each do |pattern| return true if user_agent =~ pattern end false end def device_name return "unknown" unless user_agent if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "iNaturalist Android App" elsif user_agent =~ FISHTAGGER_APP_USER_AGENT_PATTERN "Fishtagger iPhone App" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "iNaturalist iPhone App" else "web browser" end end def device_url return unless user_agent if user_agent =~ FISHTAGGER_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/fishtagger/id582724178?mt=8" elsif user_agent =~ IPHONE_APP_USER_AGENT_PATTERN "http://itunes.apple.com/us/app/inaturalist/id421397028?mt=8" elsif user_agent =~ ANDROID_APP_USER_AGENT_PATTERN "https://market.android.com/details?id=org.inaturalist.android" end end def owners_identification if identifications.loaded? # if idents are loaded, the most recent current identification might be a new record identifications.sort_by{|i| i.created_at || 1.minute.from_now}.select {|ident| ident.user_id == user_id && ident.current? }.last else identifications.current.by(user_id).last end end def others_identifications if identifications.loaded? identifications.select do |i| i.current? && i.user_id != user_id end else identifications.current.not_by(user_id) end end def method_missing(method, *args, &block) return super unless method.to_s =~ /^field:/ || method.to_s =~ /^taxon_[^=]+/ if method.to_s =~ /^field:/ of_name = method.to_s.split(':').last ofv = observation_field_values.detect{|ofv| ofv.observation_field.normalized_name == of_name} if ofv return ofv.taxon ? ofv.taxon.name : ofv.value end elsif method.to_s =~ /^taxon_/ && !self.class.instance_methods.include?(method) && taxon return taxon.send(method.to_s.gsub(/^taxon_/, '')) end super end def respond_to?(method, include_private = false) @@class_methods_hash ||= Hash[ self.class.instance_methods.map{ |h| [ h.to_sym, true ] } ] @@class_columns_hash ||= Hash[ self.class.column_names.map{ |h| [ h.to_sym, true ] } ] if @@class_methods_hash[method.to_sym] || @@class_columns_hash[method.to_sym] return super end return super unless method.to_s =~ /^field:/ || method.to_s =~ /^taxon_[^=]+/ if method.to_s =~ /^field:/ of_name = method.to_s.split(':').last ofv = observation_field_values.detect{|ofv| ofv.observation_field.normalized_name == of_name} return !ofv.blank? elsif method.to_s =~ /^taxon_/ && taxon return taxon.respond_to?(method.to_s.gsub(/^taxon_/, ''), include_private) end super end def merge(reject) mutable_columns = self.class.column_names - %w(id created_at updated_at) mutable_columns.each do |column| self.send("#{column}=", reject.send(column)) if send(column).blank? end reject.identifications.update_all("current = false") merge_has_many_associations(reject) reject.destroy identifications.group_by{|ident| [ident.user_id, ident.taxon_id]}.each do |pair, idents| c = idents.sort_by(&:id).last c.update_attributes(:current => true) end save! end def create_observation_review return true unless taxon return true unless taxon_id_was.blank? ObservationReview.where( observation_id: id, user_id: user_id ).first_or_create.touch true end def create_deleted_observation DeletedObservation.create( :observation_id => id, :user_id => user_id ) true end def build_observation_fields_from_tags(tags) tags.each do |tag| np, value = tag.split('=') next unless np && value namespace, predicate = np.split(':') predicate = namespace if predicate.blank? next if predicate.blank? of = ObservationField.where("lower(name) = ?", predicate.downcase).first next unless of next if self.observation_field_values.detect{|ofv| ofv.observation_field_id == of.id} if of.datatype == ObservationField::TAXON t = Taxon.single_taxon_for_name(value) next unless t value = t.id end ofv = ObservationFieldValue.new(observation: self, observation_field: of, value: value) self.observation_field_values.build(ofv.attributes) if ofv.valid? end end def fields_addable_by?(u) return false unless u.is_a?(User) return true if user.preferred_observation_fields_by == User::PREFERRED_OBSERVATION_FIELDS_BY_ANYONE return true if user.preferred_observation_fields_by == User::PREFERRED_OBSERVATION_FIELDS_BY_CURATORS && u.is_curator? u.id == user_id end def set_coordinates if self.geo_x.present? && self.geo_y.present? && self.coordinate_system.present? # Perform the transformation # transfrom from `self.coordinate_system` from = RGeo::CoordSys::Proj4.new(self.coordinate_system) # ... to WGS84 to = RGeo::CoordSys::Proj4.new(WGS84_PROJ4) # Returns an array of lat, lon transform = RGeo::CoordSys::Proj4.transform_coords(from, to, self.geo_x.to_d, self.geo_y.to_d) # Set the transfor self.longitude, self.latitude = transform end true end # Required for use of the sanitize method in # ObservationsHelper#short_observation_description def self.white_list_sanitizer @white_list_sanitizer ||= HTML::WhiteListSanitizer.new end def self.update_for_taxon_change(taxon_change, options = {}, &block) input_taxon_ids = taxon_change.input_taxa.map(&:id) scope = Observation.where("observations.taxon_id IN (?)", input_taxon_ids) scope = scope.by(options[:user]) if options[:user] scope = scope.where("observations.id IN (?)", options[:records].to_a) unless options[:records].blank? scope = scope.includes( :user, :identifications, :observations_places ) scope.find_each do |observation| if observation.owners_identification && input_taxon_ids.include?( observation.owners_identification.taxon_id ) if output_taxon = taxon_change.output_taxon_for_record( observation ) Identification.create( user: observation.user, observation: observation, taxon: output_taxon, taxon_change: taxon_change ) end end yield(observation) if block_given? end end # 2014-01 I tried improving performance by loading ancestor taxa for each # batch, but it didn't really speed things up much def self.generate_csv(scope, options = {}) fname = options[:fname] || "observations.csv" fpath = options[:path] || File.join(options[:dir] || Dir::tmpdir, fname) FileUtils.mkdir_p File.dirname(fpath), :mode => 0755 columns = options[:columns] || CSV_COLUMNS CSV.open(fpath, 'w') do |csv| csv << columns scope.find_each(batch_size: 500) do |observation| csv << columns.map do |c| c = "cached_tag_list" if c == "tag_list" observation.send(c) rescue nil end end end fpath end def self.generate_csv_for(record, options = {}) fname = options[:fname] || "#{record.to_param}-observations.csv" fpath = options[:path] || File.join(options[:dir] || Dir::tmpdir, fname) tmp_path = File.join(Dir::tmpdir, fname) FileUtils.mkdir_p File.dirname(tmp_path), :mode => 0755 columns = CSV_COLUMNS # ensure private coordinates are hidden unless they shouldn't be viewer_curates_project = record.is_a?(Project) && record.curated_by?(options[:user]) viewer_is_owner = record.is_a?(User) && record == options[:user] unless viewer_curates_project || viewer_is_owner columns = columns.select{|c| c !~ /^private_/} end # generate the csv if record.respond_to?(:generate_csv) record.generate_csv(tmp_path, columns, viewer: options[:user]) else scope = record.observations. includes(:taxon). includes(observation_field_values: :observation_field) unless record.is_a?(User) && options[:user] === record scope = scope.includes(project_observations: :stored_preferences). includes(user: {project_users: :stored_preferences}) end generate_csv(scope, :path => tmp_path, :fname => fname, :columns => columns, :viewer => options[:user]) end FileUtils.mkdir_p File.dirname(fpath), :mode => 0755 if tmp_path != fpath FileUtils.mv tmp_path, fpath end fpath end def self.generate_csv_for_cache_key(record, options = {}) "#{record.class.name.underscore}_#{record.id}" end def public_positional_accuracy if coordinates_obscured? && !read_attribute(:public_positional_accuracy) update_public_positional_accuracy end read_attribute(:public_positional_accuracy) end def update_public_positional_accuracy update_column(:public_positional_accuracy, calculate_public_positional_accuracy) end def calculate_public_positional_accuracy if coordinates_obscured? [ positional_accuracy.to_i, uncertainty_cell_diagonal_meters, 0 ].max elsif !positional_accuracy.blank? positional_accuracy end end def inaccurate_location? if metric = quality_metric_score(QualityMetric::LOCATION) return metric <= 0.5 end false end def update_mappable update_column(:mappable, calculate_mappable) end def calculate_mappable return false if latitude.blank? && longitude.blank? return false if public_positional_accuracy && public_positional_accuracy > uncertainty_cell_diagonal_meters return false if inaccurate_location? return false unless passes_quality_metric?(QualityMetric::EVIDENCE) return false unless appropriate? true end def update_observations_places Observation.update_observations_places(ids: [ id ]) # reload the association since we added the records using SQL observations_places(true) end def set_taxon_photo return true unless research_grade? && quality_grade_changed? unless taxon.photos.any? community_taxon.delay( priority: INTEGRITY_PRIORITY, run_at: 1.day.from_now ).set_photo_from_observations end true end def self.update_observations_places(options = { }) filter_scope = options.delete(:scope) scope = (filter_scope && filter_scope.is_a?(ActiveRecord::Relation)) ? filter_scope : self.all if filter_ids = options.delete(:ids) if filter_ids.length > 1000 # call again for each batch, then return filter_ids.each_slice(1000) do |slice| update_observations_places(options.merge(ids: slice)) end return end scope = scope.where(id: filter_ids) end scope.select(:id).find_in_batches(options) do |batch| ids = batch.map(&:id) Observation.transaction do connection.execute("DELETE FROM observations_places WHERE observation_id IN (#{ ids.join(',') })") connection.execute("INSERT INTO observations_places (observation_id, place_id) SELECT o.id, pg.place_id FROM observations o JOIN place_geometries pg ON ST_Intersects(pg.geom, o.private_geom) WHERE o.id IN (#{ ids.join(',') }) AND pg.place_id IS NOT NULL AND NOT EXISTS ( SELECT id FROM observations_places WHERE place_id = pg.place_id AND observation_id = o.id )") end end end def observation_photos_finished_processing observation_photos.select do |op| ! (op.photo.is_a?(LocalPhoto) && op.photo.processing?) end end def interpolate_coordinates return unless time_observed_at scope = user.observations.where("latitude IS NOT NULL or private_latitude IS NOT NULL") prev_obs = scope.where("time_observed_at < ?", time_observed_at).order("time_observed_at DESC").first next_obs = scope.where("time_observed_at > ?", time_observed_at).order("time_observed_at ASC").first return unless prev_obs && next_obs prev_lat = prev_obs.private_latitude || prev_obs.latitude prev_lon = prev_obs.private_longitude || prev_obs.longitude next_lat = next_obs.private_latitude || next_obs.latitude next_lon = next_obs.private_longitude || next_obs.longitude # time-weighted interpolation between prev and next observations weight = (next_obs.time_observed_at - time_observed_at) / (next_obs.time_observed_at-prev_obs.time_observed_at) new_lat = (1-weight)*next_lat + weight*prev_lat new_lon = (1-weight)*next_lon + weight*prev_lon self.latitude = new_lat self.longitude = new_lon # we can only set a new uncertainty if the uncertainty of the two points are known if prev_obs.positional_accuracy && next_obs.positional_accuracy f = RGeo::Geographic.simple_mercator_factory prev_point = f.point(prev_lon, prev_lat) next_point = f.point(next_lon, next_lat) interpolation_uncertainty = prev_point.distance(next_point)/2.0 new_acc = Math.sqrt(interpolation_uncertainty**2 + prev_obs.positional_accuracy**2 + next_obs.positional_accuracy**2) self.positional_accuracy = new_acc end end def self.as_csv(scope, methods, options = {}) CSV.generate do |csv| csv << methods scope.each do |item| # image_url gets options, which will include an SSL boolean csv << methods.map{ |m| m == :image_url ? item.send(m, options) : item.send(m) } end end end def community_taxon_at_species_or_lower? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level <= Taxon::SPECIES_LEVEL end def community_taxon_at_family_or_lower? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level <= Taxon::FAMILY_LEVEL end def community_taxon_below_family? community_taxon && community_taxon_id == taxon_id && community_taxon.rank_level && community_taxon.rank_level < Taxon::FAMILY_LEVEL end def needs_id_upvotes_count votes_for.loaded? ? votes_for.select{ |v| v.vote_flag? && v.vote_scope == "needs_id" }.size : get_upvotes(vote_scope: "needs_id").size end def needs_id_downvotes_count votes_for.loaded? ? votes_for.select{ |v| !v.vote_flag? && v.vote_scope == "needs_id" }.size : get_downvotes(vote_scope: "needs_id").size end def needs_id_vote_score uvotes = needs_id_upvotes_count dvotes = needs_id_downvotes_count if uvotes == 0 && dvotes == 0 nil elsif uvotes == 0 0 elsif dvotes == 0 1 else uvotes.to_f / (uvotes + dvotes) end end def voted_out_of_needs_id? needs_id_downvotes_count > needs_id_upvotes_count end def voted_in_to_needs_id? needs_id_upvotes_count > needs_id_downvotes_count end def needs_id? quality_grade == NEEDS_ID end def casual? quality_grade == CASUAL end def flagged_with(flag, options) quality_grade_will_change! save evaluate_new_flag_for_spam(flag) end def mentioned_users return [ ] unless description description.mentioned_users end # Show count of all faves on this observation. cached_votes_total stores the # count of all votes without a vote_scope, which for an Observation means # the faves, but since that might vary from model to model based on how we # use acts_as_votable, faves_count seems clearer. def faves_count cached_votes_total end def probably_captive? target_taxon = community_taxon || taxon return false unless target_taxon if target_taxon.rank_level.blank? || target_taxon.rank_level.to_i > Taxon::GENUS_LEVEL return false end place = system_places.detect do |p| [ Place::COUNTRY_LEVEL, Place::STATE_LEVEL, Place::COUNTY_LEVEL ].include?( p.admin_level ) end return false unless place buckets = Observation.elastic_search( filters: [ { term: { "taxon.ancestor_ids": target_taxon.id } }, { term: { place_ids: place.id } }, ], # earliest_sort_field: "id", size: 0, aggregate: { captive: { terms: { field: "captive", size: 15 } } } ).results.response.response.aggregations.captive.buckets captive_stats = Hash[ buckets.map{ |b| [ b["key"], b["doc_count" ] ] } ] total = captive_stats.values.sum ratio = captive_stats[1].to_f / total # puts "total: #{total}, ratio: #{ratio}, place: #{place}" total > 10 && ratio >= 0.8 end def application_id_to_index return oauth_application_id if oauth_application_id if user_agent =~ IPHONE_APP_USER_AGENT_PATTERN return OauthApplication.inaturalist_iphone_app.try(:id) end if user_agent =~ ANDROID_APP_USER_AGENT_PATTERN return OauthApplication.inaturalist_android_app.try(:id) end end def owners_identification_from_vision owners_identification.try(:vision) end def owners_identification_from_vision=( val ) self.owners_identification_from_vision_requested = val end def self.dedupe_for_user(user, options = {}) unless user.is_a?(User) u = User.find_by_id(user) u ||= User.find_by_login(user) user = u end return unless user sql = <<-SQL SELECT array_agg(id) AS observation_ids FROM observations WHERE user_id = #{user.id} AND taxon_id IS NOT NULL AND observed_on_string IS NOT NULL AND observed_on_string != '' AND private_geom IS NOT NULL GROUP BY user_id, taxon_id, observed_on_string, private_geom HAVING count(*) > 1; SQL deleted = 0 start = Time.now Observation.connection.execute(sql).each do |row| ids = row['observation_ids'].gsub(/[\{\}]/, '').split(',').map(&:to_i).sort puts "Found duplicates: #{ids.join(',')}" if options[:debug] keeper_id = ids.shift puts "\tKeeping #{keeper_id}" if options[:debug] unless options[:test] Observation.find(ids).each do |o| puts "\tDeleting #{o.id}" if options[:debug] o.destroy end end deleted += ids.size end puts "Deleted #{deleted} observations in #{Time.now - start}s" if options[:debug] end def self.index_observations_for_user(user_id) Observation.elastic_index!( scope: Observation.by( user_id ) ) end def self.refresh_es_index Observation.__elasticsearch__.refresh_index! unless Rails.env.test? end end
require 'app/models/push_job' require 'app/concerns/git_commit_sha_validator' module Pod module TrunkApp class PodVersion < Sequel::Model include Concerns::GitCommitSHAValidator DATA_URL = "https://raw.github.com/#{ENV['GH_REPO']}/%s/%s" self.dataset = :pod_versions plugin :timestamps plugin :validation_helpers plugin :after_initialize many_to_one :pod one_to_many :commits, :order => Sequel.desc(:updated_at) alias_method :published?, :published def published? commits.any?(&:pushed?) end def commit_sha commits.find?(&:pushed?) end def after_initialize super # if new? # self.published = false if published.nil? # end end def public_attributes { 'created_at' => created_at, 'name' => name } end def destination_path File.join('Specs', pod.name, name, "#{pod.name}.podspec.json") end def message "[Add] #{pod.name} #{name}" end def data_url DATA_URL % [commit_sha, destination_path] if commit_sha end def resource_path URI.escape("/pods/#{pod.name}/versions/#{name}") end protected UNIQUE_VERSION = [:pod_id, :name] def validate super validates_presence :pod_id validates_presence :name # validates_presence :published # validates_git_commit_sha :commit_sha validates_unique UNIQUE_VERSION # Sequel adds the error with the column tuple as the key, but for the # user just uing `name' as the key is more semantic. if error = errors.delete(UNIQUE_VERSION) errors.add(:name, error.first) end end end end end [PodVersion] Replace DB published method with dynamic one that goes via commit. require 'app/models/commit' require 'app/concerns/git_commit_sha_validator' module Pod module TrunkApp class PodVersion < Sequel::Model include Concerns::GitCommitSHAValidator DATA_URL = "https://raw.github.com/#{ENV['GH_REPO']}/%s/%s" self.dataset = :pod_versions plugin :timestamps plugin :validation_helpers plugin :after_initialize many_to_one :pod one_to_many :commits, :order => Sequel.desc(:updated_at) def published? commits.any?(&:pushed?) end def commit_sha commits.find?(&:pushed?) end def after_initialize super # if new? # self.published = false if published.nil? # end end def public_attributes { 'created_at' => created_at, 'name' => name } end def destination_path File.join('Specs', pod.name, name, "#{pod.name}.podspec.json") end def message "[Add] #{pod.name} #{name}" end def data_url DATA_URL % [commit_sha, destination_path] if commit_sha end def resource_path URI.escape("/pods/#{pod.name}/versions/#{name}") end protected UNIQUE_VERSION = [:pod_id, :name] def validate super validates_presence :pod_id validates_presence :name # validates_presence :published # validates_git_commit_sha :commit_sha validates_unique UNIQUE_VERSION # Sequel adds the error with the column tuple as the key, but for the # user just uing `name' as the key is more semantic. if error = errors.delete(UNIQUE_VERSION) errors.add(:name, error.first) end end end end end
class Proposition include Mongoid::Document include Mongoid::Timestamps field :text, type: String belongs_to :candidacy has_and_belongs_to_many :tags validates_presence_of :candidacy, :tags, :text embeds_many :embeds, as: :embedable accepts_nested_attributes_for :embeds, :allow_destroy => true, :reject_if => proc { |obj| obj.blank? } before_save :add_parent_tags attr_reader :tag def tag= name self.add_tag name end def tag_names tags.collect(&:name) end def tag_names= names self.tag_ids = [] # reset tags (self.tags = [] not working for this) names.each do |name| self.add_tag name end self.tags end def add_tag name unless tag = Tag.first(conditions: {name: name}) tag = Tag.create name: name end self.tags << tag end private def add_parent_tags return false unless candidacy and candidacy.election for tag in self.tags do election_tag = candidacy.election.election_tags.where(tag_id: tag.id).first parent_tag = election_tag.try(:parent_tag) if parent_tag and not tags.include?(parent_tag) self.tags << parent_tag end end self.tags end end On propositions, tags is optional class Proposition include Mongoid::Document include Mongoid::Timestamps field :text, type: String belongs_to :candidacy has_and_belongs_to_many :tags validates_presence_of :candidacy, :text embeds_many :embeds, as: :embedable accepts_nested_attributes_for :embeds, :allow_destroy => true, :reject_if => proc { |obj| obj.blank? } before_save :add_parent_tags attr_reader :tag def tag= name self.add_tag name end def tag_names tags.collect(&:name) end def tag_names= names self.tag_ids = [] # reset tags (self.tags = [] not working for this) names.each do |name| self.add_tag name end self.tags end def add_tag name unless tag = Tag.first(conditions: {name: name}) tag = Tag.create name: name end self.tags << tag end private def add_parent_tags return false unless candidacy and candidacy.election for tag in self.tags do election_tag = candidacy.election.election_tags.where(tag_id: tag.id).first parent_tag = election_tag.try(:parent_tag) if parent_tag and not tags.include?(parent_tag) self.tags << parent_tag end end self.tags end end
# -*- encoding : utf-8 -*- # == Schema Information # Schema version: 20131024114346 # # Table name: public_bodies # # id :integer not null, primary key # name :text not null # short_name :text default(""), not null # request_email :text not null # version :integer not null # last_edit_editor :string(255) not null # last_edit_comment :text not null # created_at :datetime not null # updated_at :datetime not null # url_name :text not null # home_page :text default(""), not null # notes :text default(""), not null # first_letter :string(255) not null # publication_scheme :text default(""), not null # api_key :string(255) not null # info_requests_count :integer default(0), not null # disclosure_log :text default(""), not null # info_requests_successful_count :integer # info_requests_not_held_count :integer # info_requests_overdue_count :integer # info_requests_visible_classified_count :integer # require 'csv' require 'securerandom' require 'set' class PublicBody < ActiveRecord::Base include AdminColumn class ImportCSVDryRun < StandardError ; end @non_admin_columns = %w(name last_edit_comment) attr_accessor :no_xapian_reindex # Default fields available for importing from CSV, in the format # [field_name, 'short description of field (basic html allowed)'] cattr_accessor :csv_import_fields do [ ['name', '(i18n)<strong>Existing records cannot be renamed</strong>'], ['short_name', '(i18n)'], ['request_email', '(i18n)'], ['notes', '(i18n)'], ['publication_scheme', '(i18n)'], ['disclosure_log', '(i18n)'], ['home_page', ''], ['tag_string', '(tags separated by spaces)'], ] end has_many :info_requests, :order => 'created_at desc' has_many :track_things, :order => 'created_at desc' has_many :censor_rules, :order => 'created_at desc' validates_presence_of :name, :message => N_("Name can't be blank") validates_presence_of :url_name, :message => N_("URL name can't be blank") validates_uniqueness_of :short_name, :message => N_("Short name is already taken"), :allow_blank => true validates_uniqueness_of :url_name, :message => N_("URL name is already taken") validates_uniqueness_of :name, :message => N_("Name is already taken") validate :request_email_if_requestable before_save :set_api_key!, :unless => :api_key before_save :set_default_publication_scheme after_save :purge_in_cache after_update :reindex_requested_from # Every public body except for the internal admin one is visible scope :visible, lambda { { :conditions => "public_bodies.id <> #{PublicBody.internal_admin_body.id}" } } acts_as_versioned acts_as_xapian :texts => [:name, :short_name, :notes], :values => [ # for sorting [:created_at_numeric, 1, "created_at", :number] ], :terms => [ [:variety, 'V', "variety"], [:tag_array_for_search, 'U', "tag"] ] has_tag_string strip_attributes! translates :name, :short_name, :request_email, :url_name, :notes, :first_letter, :publication_scheme # Cannot be grouped at top as it depends on the `translates` macro include Translatable # Cannot be grouped at top as it depends on the `translates` macro include PublicBodyDerivedFields # Cannot be grouped at top as it depends on the `translates` macro class Translation include PublicBodyDerivedFields end self.non_versioned_columns << 'created_at' << 'updated_at' << 'first_letter' << 'api_key' self.non_versioned_columns << 'info_requests_count' << 'info_requests_successful_count' self.non_versioned_columns << 'info_requests_count' << 'info_requests_visible_classified_count' self.non_versioned_columns << 'info_requests_not_held_count' << 'info_requests_overdue' self.non_versioned_columns << 'info_requests_overdue_count' # Cannot be defined directly under `include` statements as this is opening # the PublicBody::Version class dynamically defined by the # `acts_as_versioned` macro. # # TODO: acts_as_versioned accepts an extend parameter [1] so these methods # could be extracted to a module: # # acts_as_versioned :extend => PublicBodyVersionExtensions # # This includes the module in both the parent class (PublicBody) and the # Version class (PublicBody::Version), so the behaviour is slightly # different to opening up PublicBody::Version. # # We could add an `extend_version_class` option pretty trivially by # following the pattern for the existing `extend` option. # # [1] http://git.io/vIetK class Version def last_edit_comment_for_html_display text = self.last_edit_comment.strip text = CGI.escapeHTML(text) text = MySociety::Format.make_clickable(text) text = text.gsub(/\n/, '<br>') return text end def compare(previous = nil) if previous.nil? yield([]) else v = self changes = self.class.content_columns.inject([]) {|memo, c| unless %w(version last_edit_editor last_edit_comment updated_at).include?(c.name) from = previous.send(c.name) to = self.send(c.name) memo << { :name => c.human_name, :from => from, :to => to } if from != to end memo } changes.each do |change| yield(change) end end end end # Public: Search for Public Bodies whose name, short_name, request_email or # tags contain the given query # # query - String to query the searchable fields # locale - String to specify the language of the seach query # (default: I18n.locale) # # Returns an ActiveRecord::Relation def self.search(query, locale = I18n.locale) locale = locale.to_s.gsub('-', '_') # Clean the locale string sql = <<-SQL ( lower(public_body_translations.name) like lower('%'||?||'%') OR lower(public_body_translations.short_name) like lower('%'||?||'%') OR lower(public_body_translations.request_email) like lower('%'||?||'%' ) OR lower(has_tag_string_tags.name) like lower('%'||?||'%' ) ) AND has_tag_string_tags.model_id = public_bodies.id AND has_tag_string_tags.model = 'PublicBody' AND (public_body_translations.locale = ?) SQL PublicBody.joins(:translations, :tags). where([sql, query, query, query, query, locale]). uniq end def set_api_key set_api_key! if api_key.nil? end def set_api_key! self.api_key = SecureRandom.base64(33) end # like find_by_url_name but also search historic url_name if none found def self.find_by_url_name_with_historic(name) # If many bodies are found (usually because the url_name is the same # across locales) return any of them. found = joins(:translations). where("public_body_translations.url_name = ?", name). readonly(false). first return found if found # If none found, then search the history of short names and find unique # public bodies in it old = PublicBody::Version. where(:url_name => name). pluck('DISTINCT public_body_id') # Maybe return the first one, so we show something relevant, # rather than throwing an error? raise "Two bodies with the same historical URL name: #{name}" if old.size > 1 return unless old.size == 1 # does acts_as_versioned provide a method that returns the current version? PublicBody.find(old.first) end # If tagged "not_apply", then FOI/EIR no longer applies to authority at all def not_apply? has_tag?('not_apply') end # If tagged "defunct", then the authority no longer exists at all def defunct? has_tag?('defunct') end # Can an FOI (etc.) request be made to this body? def is_requestable? has_request_email? && !defunct? && !not_apply? end # Strict superset of is_requestable? def is_followupable? has_request_email? end def has_request_email? !request_email.blank? && request_email != 'blank' end # Also used as not_followable_reason def not_requestable_reason if defunct? 'defunct' elsif not_apply? 'not_apply' elsif !has_request_email? 'bad_contact' else raise "not_requestable_reason called with type that has no reason" end end def special_not_requestable_reason? defunct? || not_apply? end def created_at_numeric # format it here as no datetime support in Xapian's value ranges created_at.strftime("%Y%m%d%H%M%S") end def variety "authority" end # Guess home page from the request email, or use explicit override, or nil # if not known. def calculated_home_page if home_page && !home_page.empty? home_page[URI::regexp(%w(http https))] ? home_page : "http://#{home_page}" elsif request_email_domain "http://www.#{request_email_domain}" end end # Are all requests to this body under the Environmental Information Regulations? def eir_only? has_tag?('eir_only') end def law_only_short eir_only? ? 'EIR' : 'FOI' end # Schools are allowed more time in holidays, so we change some wordings def is_school? has_tag?('school') end def site_administration? has_tag?('site_administration') end # The "internal admin" is a special body for internal use. def self.internal_admin_body # Use find_by_sql to avoid the search being specific to a # locale, since url_name is a translated field: sql = "SELECT * FROM public_bodies WHERE url_name = 'internal_admin_authority'" matching_pbs = PublicBody.find_by_sql sql case when matching_pbs.empty? then I18n.with_locale(I18n.default_locale) do PublicBody.create!(:name => 'Internal admin authority', :short_name => "", :request_email => AlaveteliConfiguration::contact_email, :home_page => "", :notes => "", :publication_scheme => "", :last_edit_editor => "internal_admin", :last_edit_comment => "Made by PublicBody.internal_admin_body") end when matching_pbs.length == 1 then matching_pbs[0] else raise "Multiple public bodies (#{matching_pbs.length}) found with url_name 'internal_admin_authority'" end end # Import from a string in CSV format. # Just tests things and returns messages if dry_run is true. # Returns an array of [array of errors, array of notes]. If there # are errors, always rolls back (as with dry_run). def self.import_csv(csv, tag, tag_behaviour, dry_run, editor, available_locales = []) tmp_csv = nil Tempfile.open('alaveteli') do |f| f.write csv tmp_csv = f end PublicBody.import_csv_from_file(tmp_csv.path, tag, tag_behaviour, dry_run, editor, available_locales) end # Import from a CSV file. # Just tests things and returns messages if dry_run is true. # Returns an array of [array of errors, array of notes]. If there # are errors, always rolls back (as with dry_run). def self.import_csv_from_file(csv_filename, tag, tag_behaviour, dry_run, editor, available_locales = []) errors = [] notes = [] begin ActiveRecord::Base.transaction do # Use the default locale when retrieving existing bodies; otherwise # matching names won't work afterwards, and we'll create new bodies instead # of updating them bodies_by_name = {} set_of_existing = Set.new internal_admin_body_id = PublicBody.internal_admin_body.id I18n.with_locale(I18n.default_locale) do bodies = (tag.nil? || tag.empty?) ? PublicBody.find(:all, :include => :translations) : PublicBody.find_by_tag(tag) for existing_body in bodies # Hide InternalAdminBody from import notes next if existing_body.id == internal_admin_body_id bodies_by_name[existing_body.name] = existing_body set_of_existing.add(existing_body.name) end end set_of_importing = Set.new # Default values in case no field list is given field_names = { 'name' => 1, 'request_email' => 2 } line = 0 import_options = {:field_names => field_names, :available_locales => available_locales, :tag => tag, :tag_behaviour => tag_behaviour, :editor => editor, :notes => notes, :errors => errors } CSV.foreach(csv_filename) do |row| line = line + 1 # Parse the first line as a field list if it starts with '#' if line==1 and row.first.to_s =~ /^#(.*)$/ row[0] = row[0][1..-1] # Remove the # sign on first field row.each_with_index {|field, i| field_names[field] = i} next end fields = {} field_names.each{ |name, i| fields[name] = row[i] } yield line, fields if block_given? name = row[field_names['name']] email = row[field_names['request_email']] next if name.nil? name.strip! email.strip! unless email.nil? if !email.nil? && !email.empty? && !MySociety::Validate.is_valid_email(email) errors.push "error: line #{line.to_s}: invalid email '#{email}' for authority '#{name}'" next end public_body = bodies_by_name[name] || PublicBody.new(:name => "", :short_name => "", :request_email => "") public_body.import_values_from_csv_row(row, line, name, import_options) set_of_importing.add(name) end # Give an error listing ones that are to be deleted deleted_ones = set_of_existing - set_of_importing if deleted_ones.size > 0 notes.push "Notes: Some " + tag + " bodies are in database, but not in CSV file:\n " + Array(deleted_ones).sort.join("\n ") + "\nYou may want to delete them manually.\n" end # Rollback if a dry run, or we had errors if dry_run or errors.size > 0 raise ImportCSVDryRun end end rescue ImportCSVDryRun # Ignore end return [errors, notes] end def self.localized_csv_field_name(locale, field_name) (locale.to_s == I18n.default_locale.to_s) ? field_name : "#{field_name}.#{locale}" end # import values from a csv row (that may include localized columns) def import_values_from_csv_row(row, line, name, options) is_new = new_record? edit_info = if is_new { :action => "creating new authority", :comment => 'Created from spreadsheet' } else { :action => "updating authority", :comment => 'Updated from spreadsheet' } end locales = options[:available_locales] locales = [I18n.default_locale] if locales.empty? locales.each do |locale| I18n.with_locale(locale) do changed = set_locale_fields_from_csv_row(is_new, locale, row, options) unless changed.empty? options[:notes].push "line #{ line }: #{ edit_info[:action] } '#{ name }' (locale: #{ locale }):\n\t#{ changed.to_json }" self.last_edit_comment = edit_info[:comment] self.publication_scheme = publication_scheme || "" self.last_edit_editor = options[:editor] begin save! rescue ActiveRecord::RecordInvalid errors.full_messages.each do |msg| options[:errors].push "error: line #{ line }: #{ msg } for authority '#{ name }'" end next end end end end end # Sets attribute values for a locale from a csv row def set_locale_fields_from_csv_row(is_new, locale, row, options) changed = ActiveSupport::OrderedHash.new csv_field_names = options[:field_names] csv_import_fields.each do |field_name, field_notes| localized_field_name = self.class.localized_csv_field_name(locale, field_name) column = csv_field_names[localized_field_name] value = column && row[column] # Tags are a special case, as we support adding to the field, not just setting a new value if field_name == 'tag_string' new_tags = [value, options[:tag]].select{ |new_tag| !new_tag.blank? } if new_tags.empty? value = nil else value = new_tags.join(" ") value = "#{value} #{tag_string}"if options[:tag_behaviour] == 'add' end end if value and read_attribute_value(field_name, locale) != value if is_new changed[field_name] = value else changed[field_name] = "#{read_attribute_value(field_name, locale)}: #{value}" end assign_attributes({ field_name => value }) end end changed end # Does this user have the power of FOI officer for this body? def is_foi_officer?(user) user_domain = user.email_domain our_domain = request_email_domain return false if user_domain.nil? or our_domain.nil? our_domain == user_domain end def foi_officer_domain_required request_email_domain end def request_email if AlaveteliConfiguration::override_all_public_body_request_emails.blank? || read_attribute(:request_email).blank? read_attribute(:request_email) else AlaveteliConfiguration::override_all_public_body_request_emails end end # Domain name of the request email def request_email_domain PublicBody.extract_domain_from_email(request_email) end # Return the domain part of an email address, canonicalised and with common # extra UK Government server name parts removed. def self.extract_domain_from_email(email) email =~ /@(.*)/ if $1.nil? return nil end # take lower case ret = $1.downcase # remove special email domains for UK Government addresses ret.sub!(".gsi.", ".") ret.sub!(".x.", ".") ret.sub!(".pnn.", ".") return ret end def reverse_sorted_versions versions.sort { |a,b| b.version <=> a.version } end def sorted_versions versions.sort { |a,b| a.version <=> b.version } end def has_notes? !notes.nil? && notes != "" end def notes_as_html notes end def notes_without_html # assume notes are reasonably behaved HTML, so just use simple regexp on this @notes_without_html ||= (self.notes.nil? ? '' : self.notes.gsub(/<\/?[^>]*>/, "")) end def json_for_api { :id => id, :url_name => url_name, :name => name, :short_name => short_name, # :request_email # we hide this behind a captcha, to stop people # doing bulk requests easily :created_at => created_at, :updated_at => updated_at, # don't add the history as some edit comments contain sensitive # information # :version, :last_edit_editor, :last_edit_comment :home_page => calculated_home_page, :notes => notes, :publication_scheme => publication_scheme, :tags => tag_array, } end def purge_in_cache info_requests.each { |x| x.purge_in_cache } end def self.where_clause_for_stats(minimum_requests, total_column) # When producing statistics for public bodies, we want to # exclude any that are tagged with 'test' - we use a # sub-select to find the IDs of those public bodies. test_tagged_query = "SELECT model_id FROM has_tag_string_tags" \ " WHERE model = 'PublicBody' AND name = 'test'" "#{total_column} >= #{minimum_requests} AND id NOT IN (#{test_tagged_query})" end # Return data for the 'n' public bodies with the highest (or # lowest) number of requests, but only returning data for those # with at least 'minimum_requests' requests. def self.get_request_totals(n, highest, minimum_requests) ordering = "info_requests_count" ordering += " DESC" if highest where_clause = where_clause_for_stats minimum_requests, 'info_requests_count' public_bodies = PublicBody.order(ordering).where(where_clause).limit(n) public_bodies.reverse! if highest y_values = public_bodies.map { |pb| pb.info_requests_count } return { 'public_bodies' => public_bodies, 'y_values' => y_values, 'y_max' => y_values.max, 'totals' => y_values} end # Return data for the 'n' public bodies with the highest (or # lowest) score according to the metric of the value in 'column' # divided by the total number of requests, expressed as a # percentage. This only returns data for those public bodies with # at least 'minimum_requests' requests. def self.get_request_percentages(column, n, highest, minimum_requests) total_column = "info_requests_visible_classified_count" ordering = "y_value" ordering += " DESC" if highest y_value_column = "(cast(#{column} as float) / #{total_column})" where_clause = where_clause_for_stats minimum_requests, total_column where_clause += " AND #{column} IS NOT NULL" public_bodies = PublicBody.select("*, #{y_value_column} AS y_value").order(ordering).where(where_clause).limit(n) public_bodies.reverse! if highest y_values = public_bodies.map { |pb| pb.y_value.to_f } original_values = public_bodies.map { |pb| pb.send(column) } # If these are all nil, then probably the values have never # been set; some have to be set by a rake task. In that case, # just return nil: return nil unless original_values.any? { |ov| !ov.nil? } original_totals = public_bodies.map { |pb| pb.send(total_column) } # Calculate confidence intervals, as offsets from the proportion: cis_below = [] cis_above = [] original_totals.each_with_index.map { |total, i| lower_ci, higher_ci = ci_bounds original_values[i], total, 0.05 cis_below.push(y_values[i] - lower_ci) cis_above.push(higher_ci - y_values[i]) } # Turn the y values and confidence interval offsets into # percentages: [y_values, cis_below, cis_above].each { |l| l.map! { |v| 100 * v } } return { 'public_bodies' => public_bodies, 'y_values' => y_values, 'cis_below' => cis_below, 'cis_above' => cis_above, 'y_max' => 100, 'totals' => original_totals} end def self.popular_bodies(locale) # get some example searches and public bodies to display # either from config, or based on a (slow!) query if not set body_short_names = AlaveteliConfiguration::frontpage_publicbody_examples.split(/\s*;\s*/) locale_condition = 'public_body_translations.locale = ?' underscore_locale = locale.gsub '-', '_' conditions = [locale_condition, underscore_locale] bodies = [] I18n.with_locale(locale) do if body_short_names.empty? # This is too slow bodies = visible.find(:all, :order => "info_requests_count desc", :limit => 32, :conditions => conditions, :joins => :translations ) else conditions[0] += " and public_bodies.url_name in (?)" conditions << body_short_names bodies = find(:all, :conditions => conditions, :joins => :translations) end end return bodies end # Methods to privatise # -------------------------------------------------------------------------- # TODO: This could be removed by updating the default value (to '') of the # `publication_scheme` column in the `public_body_translations` table. # # TODO: Can't actually deprecate this because spec/script/mailin_spec.rb:28 # fails due to the deprecation notice output def set_default_publication_scheme # warn %q([DEPRECATION] PublicBody#set_default_publication_scheme will # become a private method in 0.23).squish # Make sure publication_scheme gets the correct default value. # (This would work automatically, were publication_scheme not a # translated attribute) self.publication_scheme = "" if publication_scheme.nil? end # if the URL name has changed, then all requested_from: queries # will break unless we update index for every event for every # request linked to it # # TODO: Can't actually deprecate this because spec/script/mailin_spec.rb:28 # fails due to the deprecation notice output def reindex_requested_from # warn %q([DEPRECATION] PublicBody#reindex_requested_from will become a # private method in 0.23).squish if changes.include?('url_name') info_requests.each do |info_request| info_request.info_request_events.each do |info_request_event| info_request_event.xapian_mark_needs_index end end end end # Methods to remove # -------------------------------------------------------------------------- # Set the first letter on a public body or translation def self.set_first_letter(instance) warn %q([DEPRECATION] PublicBody.set_first_letter will be removed in 0.23).squish unless instance.name.nil? or instance.name.empty? # we use a regex to ensure it works with utf-8/multi-byte first_letter = Unicode.upcase instance.name.scan(/^./mu)[0] if first_letter != instance.first_letter instance.first_letter = first_letter end end end def calculate_cached_fields(t) warn %q([DEPRECATION] PublicBody#calculate_cached_fields will be removed in 0.23).squish PublicBody.set_first_letter(t) short_long_name = t.name short_long_name = t.short_name if t.short_name and !t.short_name.empty? t.url_name = MySociety::Format.simplify_url_part(short_long_name, 'body') end private # Read an attribute value (without using locale fallbacks if the attribute is translated) def read_attribute_value(name, locale) if self.class.translates.include?(name.to_sym) if globalize.stash.contains?(locale, name) globalize.stash.read(locale, name) else translation_for(locale).send(name) end else send(name) end end def request_email_if_requestable # Request_email can be blank, meaning we don't have details if self.is_requestable? unless MySociety::Validate.is_valid_email(self.request_email) errors.add(:request_email, "Request email doesn't look like a valid email address") end end end end Group similar methods # -*- encoding : utf-8 -*- # == Schema Information # Schema version: 20131024114346 # # Table name: public_bodies # # id :integer not null, primary key # name :text not null # short_name :text default(""), not null # request_email :text not null # version :integer not null # last_edit_editor :string(255) not null # last_edit_comment :text not null # created_at :datetime not null # updated_at :datetime not null # url_name :text not null # home_page :text default(""), not null # notes :text default(""), not null # first_letter :string(255) not null # publication_scheme :text default(""), not null # api_key :string(255) not null # info_requests_count :integer default(0), not null # disclosure_log :text default(""), not null # info_requests_successful_count :integer # info_requests_not_held_count :integer # info_requests_overdue_count :integer # info_requests_visible_classified_count :integer # require 'csv' require 'securerandom' require 'set' class PublicBody < ActiveRecord::Base include AdminColumn class ImportCSVDryRun < StandardError ; end @non_admin_columns = %w(name last_edit_comment) attr_accessor :no_xapian_reindex # Default fields available for importing from CSV, in the format # [field_name, 'short description of field (basic html allowed)'] cattr_accessor :csv_import_fields do [ ['name', '(i18n)<strong>Existing records cannot be renamed</strong>'], ['short_name', '(i18n)'], ['request_email', '(i18n)'], ['notes', '(i18n)'], ['publication_scheme', '(i18n)'], ['disclosure_log', '(i18n)'], ['home_page', ''], ['tag_string', '(tags separated by spaces)'], ] end has_many :info_requests, :order => 'created_at desc' has_many :track_things, :order => 'created_at desc' has_many :censor_rules, :order => 'created_at desc' validates_presence_of :name, :message => N_("Name can't be blank") validates_presence_of :url_name, :message => N_("URL name can't be blank") validates_uniqueness_of :short_name, :message => N_("Short name is already taken"), :allow_blank => true validates_uniqueness_of :url_name, :message => N_("URL name is already taken") validates_uniqueness_of :name, :message => N_("Name is already taken") validate :request_email_if_requestable before_save :set_api_key!, :unless => :api_key before_save :set_default_publication_scheme after_save :purge_in_cache after_update :reindex_requested_from # Every public body except for the internal admin one is visible scope :visible, lambda { { :conditions => "public_bodies.id <> #{PublicBody.internal_admin_body.id}" } } acts_as_versioned acts_as_xapian :texts => [:name, :short_name, :notes], :values => [ # for sorting [:created_at_numeric, 1, "created_at", :number] ], :terms => [ [:variety, 'V', "variety"], [:tag_array_for_search, 'U', "tag"] ] has_tag_string strip_attributes! translates :name, :short_name, :request_email, :url_name, :notes, :first_letter, :publication_scheme # Cannot be grouped at top as it depends on the `translates` macro include Translatable # Cannot be grouped at top as it depends on the `translates` macro include PublicBodyDerivedFields # Cannot be grouped at top as it depends on the `translates` macro class Translation include PublicBodyDerivedFields end self.non_versioned_columns << 'created_at' << 'updated_at' << 'first_letter' << 'api_key' self.non_versioned_columns << 'info_requests_count' << 'info_requests_successful_count' self.non_versioned_columns << 'info_requests_count' << 'info_requests_visible_classified_count' self.non_versioned_columns << 'info_requests_not_held_count' << 'info_requests_overdue' self.non_versioned_columns << 'info_requests_overdue_count' # Cannot be defined directly under `include` statements as this is opening # the PublicBody::Version class dynamically defined by the # `acts_as_versioned` macro. # # TODO: acts_as_versioned accepts an extend parameter [1] so these methods # could be extracted to a module: # # acts_as_versioned :extend => PublicBodyVersionExtensions # # This includes the module in both the parent class (PublicBody) and the # Version class (PublicBody::Version), so the behaviour is slightly # different to opening up PublicBody::Version. # # We could add an `extend_version_class` option pretty trivially by # following the pattern for the existing `extend` option. # # [1] http://git.io/vIetK class Version def last_edit_comment_for_html_display text = self.last_edit_comment.strip text = CGI.escapeHTML(text) text = MySociety::Format.make_clickable(text) text = text.gsub(/\n/, '<br>') return text end def compare(previous = nil) if previous.nil? yield([]) else v = self changes = self.class.content_columns.inject([]) {|memo, c| unless %w(version last_edit_editor last_edit_comment updated_at).include?(c.name) from = previous.send(c.name) to = self.send(c.name) memo << { :name => c.human_name, :from => from, :to => to } if from != to end memo } changes.each do |change| yield(change) end end end end # Public: Search for Public Bodies whose name, short_name, request_email or # tags contain the given query # # query - String to query the searchable fields # locale - String to specify the language of the seach query # (default: I18n.locale) # # Returns an ActiveRecord::Relation def self.search(query, locale = I18n.locale) locale = locale.to_s.gsub('-', '_') # Clean the locale string sql = <<-SQL ( lower(public_body_translations.name) like lower('%'||?||'%') OR lower(public_body_translations.short_name) like lower('%'||?||'%') OR lower(public_body_translations.request_email) like lower('%'||?||'%' ) OR lower(has_tag_string_tags.name) like lower('%'||?||'%' ) ) AND has_tag_string_tags.model_id = public_bodies.id AND has_tag_string_tags.model = 'PublicBody' AND (public_body_translations.locale = ?) SQL PublicBody.joins(:translations, :tags). where([sql, query, query, query, query, locale]). uniq end def set_api_key set_api_key! if api_key.nil? end def set_api_key! self.api_key = SecureRandom.base64(33) end # like find_by_url_name but also search historic url_name if none found def self.find_by_url_name_with_historic(name) # If many bodies are found (usually because the url_name is the same # across locales) return any of them. found = joins(:translations). where("public_body_translations.url_name = ?", name). readonly(false). first return found if found # If none found, then search the history of short names and find unique # public bodies in it old = PublicBody::Version. where(:url_name => name). pluck('DISTINCT public_body_id') # Maybe return the first one, so we show something relevant, # rather than throwing an error? raise "Two bodies with the same historical URL name: #{name}" if old.size > 1 return unless old.size == 1 # does acts_as_versioned provide a method that returns the current version? PublicBody.find(old.first) end # If tagged "not_apply", then FOI/EIR no longer applies to authority at all def not_apply? has_tag?('not_apply') end # If tagged "defunct", then the authority no longer exists at all def defunct? has_tag?('defunct') end # Are all requests to this body under the Environmental Information # Regulations? def eir_only? has_tag?('eir_only') end # Schools are allowed more time in holidays, so we change some wordings def is_school? has_tag?('school') end def site_administration? has_tag?('site_administration') end # Can an FOI (etc.) request be made to this body? def is_requestable? has_request_email? && !defunct? && !not_apply? end # Strict superset of is_requestable? def is_followupable? has_request_email? end def has_request_email? !request_email.blank? && request_email != 'blank' end # Also used as not_followable_reason def not_requestable_reason if defunct? 'defunct' elsif not_apply? 'not_apply' elsif !has_request_email? 'bad_contact' else raise "not_requestable_reason called with type that has no reason" end end def special_not_requestable_reason? defunct? || not_apply? end def created_at_numeric # format it here as no datetime support in Xapian's value ranges created_at.strftime("%Y%m%d%H%M%S") end def variety "authority" end def law_only_short eir_only? ? 'EIR' : 'FOI' end # Guess home page from the request email, or use explicit override, or nil # if not known. def calculated_home_page if home_page && !home_page.empty? home_page[URI::regexp(%w(http https))] ? home_page : "http://#{home_page}" elsif request_email_domain "http://www.#{request_email_domain}" end end # The "internal admin" is a special body for internal use. def self.internal_admin_body # Use find_by_sql to avoid the search being specific to a # locale, since url_name is a translated field: sql = "SELECT * FROM public_bodies WHERE url_name = 'internal_admin_authority'" matching_pbs = PublicBody.find_by_sql sql case when matching_pbs.empty? then I18n.with_locale(I18n.default_locale) do PublicBody.create!(:name => 'Internal admin authority', :short_name => "", :request_email => AlaveteliConfiguration::contact_email, :home_page => "", :notes => "", :publication_scheme => "", :last_edit_editor => "internal_admin", :last_edit_comment => "Made by PublicBody.internal_admin_body") end when matching_pbs.length == 1 then matching_pbs[0] else raise "Multiple public bodies (#{matching_pbs.length}) found with url_name 'internal_admin_authority'" end end # Import from a string in CSV format. # Just tests things and returns messages if dry_run is true. # Returns an array of [array of errors, array of notes]. If there # are errors, always rolls back (as with dry_run). def self.import_csv(csv, tag, tag_behaviour, dry_run, editor, available_locales = []) tmp_csv = nil Tempfile.open('alaveteli') do |f| f.write csv tmp_csv = f end PublicBody.import_csv_from_file(tmp_csv.path, tag, tag_behaviour, dry_run, editor, available_locales) end # Import from a CSV file. # Just tests things and returns messages if dry_run is true. # Returns an array of [array of errors, array of notes]. If there # are errors, always rolls back (as with dry_run). def self.import_csv_from_file(csv_filename, tag, tag_behaviour, dry_run, editor, available_locales = []) errors = [] notes = [] begin ActiveRecord::Base.transaction do # Use the default locale when retrieving existing bodies; otherwise # matching names won't work afterwards, and we'll create new bodies instead # of updating them bodies_by_name = {} set_of_existing = Set.new internal_admin_body_id = PublicBody.internal_admin_body.id I18n.with_locale(I18n.default_locale) do bodies = (tag.nil? || tag.empty?) ? PublicBody.find(:all, :include => :translations) : PublicBody.find_by_tag(tag) for existing_body in bodies # Hide InternalAdminBody from import notes next if existing_body.id == internal_admin_body_id bodies_by_name[existing_body.name] = existing_body set_of_existing.add(existing_body.name) end end set_of_importing = Set.new # Default values in case no field list is given field_names = { 'name' => 1, 'request_email' => 2 } line = 0 import_options = {:field_names => field_names, :available_locales => available_locales, :tag => tag, :tag_behaviour => tag_behaviour, :editor => editor, :notes => notes, :errors => errors } CSV.foreach(csv_filename) do |row| line = line + 1 # Parse the first line as a field list if it starts with '#' if line==1 and row.first.to_s =~ /^#(.*)$/ row[0] = row[0][1..-1] # Remove the # sign on first field row.each_with_index {|field, i| field_names[field] = i} next end fields = {} field_names.each{ |name, i| fields[name] = row[i] } yield line, fields if block_given? name = row[field_names['name']] email = row[field_names['request_email']] next if name.nil? name.strip! email.strip! unless email.nil? if !email.nil? && !email.empty? && !MySociety::Validate.is_valid_email(email) errors.push "error: line #{line.to_s}: invalid email '#{email}' for authority '#{name}'" next end public_body = bodies_by_name[name] || PublicBody.new(:name => "", :short_name => "", :request_email => "") public_body.import_values_from_csv_row(row, line, name, import_options) set_of_importing.add(name) end # Give an error listing ones that are to be deleted deleted_ones = set_of_existing - set_of_importing if deleted_ones.size > 0 notes.push "Notes: Some " + tag + " bodies are in database, but not in CSV file:\n " + Array(deleted_ones).sort.join("\n ") + "\nYou may want to delete them manually.\n" end # Rollback if a dry run, or we had errors if dry_run or errors.size > 0 raise ImportCSVDryRun end end rescue ImportCSVDryRun # Ignore end return [errors, notes] end def self.localized_csv_field_name(locale, field_name) (locale.to_s == I18n.default_locale.to_s) ? field_name : "#{field_name}.#{locale}" end # import values from a csv row (that may include localized columns) def import_values_from_csv_row(row, line, name, options) is_new = new_record? edit_info = if is_new { :action => "creating new authority", :comment => 'Created from spreadsheet' } else { :action => "updating authority", :comment => 'Updated from spreadsheet' } end locales = options[:available_locales] locales = [I18n.default_locale] if locales.empty? locales.each do |locale| I18n.with_locale(locale) do changed = set_locale_fields_from_csv_row(is_new, locale, row, options) unless changed.empty? options[:notes].push "line #{ line }: #{ edit_info[:action] } '#{ name }' (locale: #{ locale }):\n\t#{ changed.to_json }" self.last_edit_comment = edit_info[:comment] self.publication_scheme = publication_scheme || "" self.last_edit_editor = options[:editor] begin save! rescue ActiveRecord::RecordInvalid errors.full_messages.each do |msg| options[:errors].push "error: line #{ line }: #{ msg } for authority '#{ name }'" end next end end end end end # Sets attribute values for a locale from a csv row def set_locale_fields_from_csv_row(is_new, locale, row, options) changed = ActiveSupport::OrderedHash.new csv_field_names = options[:field_names] csv_import_fields.each do |field_name, field_notes| localized_field_name = self.class.localized_csv_field_name(locale, field_name) column = csv_field_names[localized_field_name] value = column && row[column] # Tags are a special case, as we support adding to the field, not just setting a new value if field_name == 'tag_string' new_tags = [value, options[:tag]].select{ |new_tag| !new_tag.blank? } if new_tags.empty? value = nil else value = new_tags.join(" ") value = "#{value} #{tag_string}"if options[:tag_behaviour] == 'add' end end if value and read_attribute_value(field_name, locale) != value if is_new changed[field_name] = value else changed[field_name] = "#{read_attribute_value(field_name, locale)}: #{value}" end assign_attributes({ field_name => value }) end end changed end # Does this user have the power of FOI officer for this body? def is_foi_officer?(user) user_domain = user.email_domain our_domain = request_email_domain return false if user_domain.nil? or our_domain.nil? our_domain == user_domain end def foi_officer_domain_required request_email_domain end def request_email if AlaveteliConfiguration::override_all_public_body_request_emails.blank? || read_attribute(:request_email).blank? read_attribute(:request_email) else AlaveteliConfiguration::override_all_public_body_request_emails end end # Domain name of the request email def request_email_domain PublicBody.extract_domain_from_email(request_email) end # Return the domain part of an email address, canonicalised and with common # extra UK Government server name parts removed. def self.extract_domain_from_email(email) email =~ /@(.*)/ if $1.nil? return nil end # take lower case ret = $1.downcase # remove special email domains for UK Government addresses ret.sub!(".gsi.", ".") ret.sub!(".x.", ".") ret.sub!(".pnn.", ".") return ret end def reverse_sorted_versions versions.sort { |a,b| b.version <=> a.version } end def sorted_versions versions.sort { |a,b| a.version <=> b.version } end def has_notes? !notes.nil? && notes != "" end def notes_as_html notes end def notes_without_html # assume notes are reasonably behaved HTML, so just use simple regexp on this @notes_without_html ||= (self.notes.nil? ? '' : self.notes.gsub(/<\/?[^>]*>/, "")) end def json_for_api { :id => id, :url_name => url_name, :name => name, :short_name => short_name, # :request_email # we hide this behind a captcha, to stop people # doing bulk requests easily :created_at => created_at, :updated_at => updated_at, # don't add the history as some edit comments contain sensitive # information # :version, :last_edit_editor, :last_edit_comment :home_page => calculated_home_page, :notes => notes, :publication_scheme => publication_scheme, :tags => tag_array, } end def purge_in_cache info_requests.each { |x| x.purge_in_cache } end def self.where_clause_for_stats(minimum_requests, total_column) # When producing statistics for public bodies, we want to # exclude any that are tagged with 'test' - we use a # sub-select to find the IDs of those public bodies. test_tagged_query = "SELECT model_id FROM has_tag_string_tags" \ " WHERE model = 'PublicBody' AND name = 'test'" "#{total_column} >= #{minimum_requests} AND id NOT IN (#{test_tagged_query})" end # Return data for the 'n' public bodies with the highest (or # lowest) number of requests, but only returning data for those # with at least 'minimum_requests' requests. def self.get_request_totals(n, highest, minimum_requests) ordering = "info_requests_count" ordering += " DESC" if highest where_clause = where_clause_for_stats minimum_requests, 'info_requests_count' public_bodies = PublicBody.order(ordering).where(where_clause).limit(n) public_bodies.reverse! if highest y_values = public_bodies.map { |pb| pb.info_requests_count } return { 'public_bodies' => public_bodies, 'y_values' => y_values, 'y_max' => y_values.max, 'totals' => y_values} end # Return data for the 'n' public bodies with the highest (or # lowest) score according to the metric of the value in 'column' # divided by the total number of requests, expressed as a # percentage. This only returns data for those public bodies with # at least 'minimum_requests' requests. def self.get_request_percentages(column, n, highest, minimum_requests) total_column = "info_requests_visible_classified_count" ordering = "y_value" ordering += " DESC" if highest y_value_column = "(cast(#{column} as float) / #{total_column})" where_clause = where_clause_for_stats minimum_requests, total_column where_clause += " AND #{column} IS NOT NULL" public_bodies = PublicBody.select("*, #{y_value_column} AS y_value").order(ordering).where(where_clause).limit(n) public_bodies.reverse! if highest y_values = public_bodies.map { |pb| pb.y_value.to_f } original_values = public_bodies.map { |pb| pb.send(column) } # If these are all nil, then probably the values have never # been set; some have to be set by a rake task. In that case, # just return nil: return nil unless original_values.any? { |ov| !ov.nil? } original_totals = public_bodies.map { |pb| pb.send(total_column) } # Calculate confidence intervals, as offsets from the proportion: cis_below = [] cis_above = [] original_totals.each_with_index.map { |total, i| lower_ci, higher_ci = ci_bounds original_values[i], total, 0.05 cis_below.push(y_values[i] - lower_ci) cis_above.push(higher_ci - y_values[i]) } # Turn the y values and confidence interval offsets into # percentages: [y_values, cis_below, cis_above].each { |l| l.map! { |v| 100 * v } } return { 'public_bodies' => public_bodies, 'y_values' => y_values, 'cis_below' => cis_below, 'cis_above' => cis_above, 'y_max' => 100, 'totals' => original_totals} end def self.popular_bodies(locale) # get some example searches and public bodies to display # either from config, or based on a (slow!) query if not set body_short_names = AlaveteliConfiguration::frontpage_publicbody_examples.split(/\s*;\s*/) locale_condition = 'public_body_translations.locale = ?' underscore_locale = locale.gsub '-', '_' conditions = [locale_condition, underscore_locale] bodies = [] I18n.with_locale(locale) do if body_short_names.empty? # This is too slow bodies = visible.find(:all, :order => "info_requests_count desc", :limit => 32, :conditions => conditions, :joins => :translations ) else conditions[0] += " and public_bodies.url_name in (?)" conditions << body_short_names bodies = find(:all, :conditions => conditions, :joins => :translations) end end return bodies end # Methods to privatise # -------------------------------------------------------------------------- # TODO: This could be removed by updating the default value (to '') of the # `publication_scheme` column in the `public_body_translations` table. # # TODO: Can't actually deprecate this because spec/script/mailin_spec.rb:28 # fails due to the deprecation notice output def set_default_publication_scheme # warn %q([DEPRECATION] PublicBody#set_default_publication_scheme will # become a private method in 0.23).squish # Make sure publication_scheme gets the correct default value. # (This would work automatically, were publication_scheme not a # translated attribute) self.publication_scheme = "" if publication_scheme.nil? end # if the URL name has changed, then all requested_from: queries # will break unless we update index for every event for every # request linked to it # # TODO: Can't actually deprecate this because spec/script/mailin_spec.rb:28 # fails due to the deprecation notice output def reindex_requested_from # warn %q([DEPRECATION] PublicBody#reindex_requested_from will become a # private method in 0.23).squish if changes.include?('url_name') info_requests.each do |info_request| info_request.info_request_events.each do |info_request_event| info_request_event.xapian_mark_needs_index end end end end # Methods to remove # -------------------------------------------------------------------------- # Set the first letter on a public body or translation def self.set_first_letter(instance) warn %q([DEPRECATION] PublicBody.set_first_letter will be removed in 0.23).squish unless instance.name.nil? or instance.name.empty? # we use a regex to ensure it works with utf-8/multi-byte first_letter = Unicode.upcase instance.name.scan(/^./mu)[0] if first_letter != instance.first_letter instance.first_letter = first_letter end end end def calculate_cached_fields(t) warn %q([DEPRECATION] PublicBody#calculate_cached_fields will be removed in 0.23).squish PublicBody.set_first_letter(t) short_long_name = t.name short_long_name = t.short_name if t.short_name and !t.short_name.empty? t.url_name = MySociety::Format.simplify_url_part(short_long_name, 'body') end private # Read an attribute value (without using locale fallbacks if the attribute is translated) def read_attribute_value(name, locale) if self.class.translates.include?(name.to_sym) if globalize.stash.contains?(locale, name) globalize.stash.read(locale, name) else translation_for(locale).send(name) end else send(name) end end def request_email_if_requestable # Request_email can be blank, meaning we don't have details if self.is_requestable? unless MySociety::Validate.is_valid_email(self.request_email) errors.add(:request_email, "Request email doesn't look like a valid email address") end end end end
require "date" class Reservation < ActiveRecord::Base include DateHelper include Reservations::DateSupport include Reservations::Validations include Reservations::Rendering include Reservations::RelaySupport include Reservations::MovingUp # Associations ##### belongs_to :product belongs_to :order_detail, :inverse_of => :reservation belongs_to :canceled_by_user, :foreign_key => :canceled_by, :class_name => 'User' ## Virtual attributes ##### # Represents a resevation time that is unavailable, but is not an admin reservation # Used by timeline view attr_accessor :blackout attr_writer :note # used for overriding certain restrictions attr_accessor :reserved_by_admin # Delegations ##### delegate :note, :ordered_on_behalf_of?, :complete?, :account, :order, :to => :order_detail, :allow_nil => true delegate :user, :account, :to => :order, :allow_nil => true delegate :facility, :to => :product, :allow_nil => true delegate :owner, :to => :account, :allow_nil => true ## AR Hooks after_save :save_note after_update :auto_save_order_detail, :if => :order_detail # Scopes ##### def self.active not_cancelled. where("(orders.state = 'purchased' OR orders.state IS NULL)"). joins_order end def self.joins_order joins('LEFT JOIN order_details ON order_details.id = reservations.order_detail_id'). joins('LEFT JOIN orders ON orders.id = order_details.order_id') end def self.admin where(:order_detail_id => nil) end def self.not_cancelled where(:canceled_at => nil) end def self.not_started where(:actual_start_at => nil) end def self.not_ended where(:actual_end_at => nil) end def self.not_this_reservation(reservation) if reservation.id where('reservations.id <> ?', reservation.id) else scoped end end def self.today for_date(Time.zone.now) end def self.for_date(date) in_range(date.beginning_of_day, date.end_of_day) end def self.in_range(start_time, end_time) where('reserve_end_at >= ?', start_time). where('reserve_start_at < ?', end_time) end def self.upcoming(t=Time.zone.now) # If this is a named scope differences emerge between Oracle & MySQL on #reserve_end_at querying. # Eliminate by letting Rails filter by #reserve_end_at reservations=find(:all, :conditions => "reservations.canceled_at IS NULL AND (orders.state = 'purchased' OR orders.state IS NULL)", :order => 'reserve_end_at asc', :joins => ['LEFT JOIN order_details ON order_details.id = reservations.order_detail_id', 'LEFT JOIN orders ON orders.id = order_details.order_id']) reservations.delete_if{|r| r.reserve_end_at < t} reservations end def self.overlapping(start_at, end_at) # remove millisecond precision from time tstart_at = Time.zone.parse(start_at.to_s) tend_at = Time.zone.parse(end_at.to_s) where("((reserve_start_at <= :start AND reserve_end_at >= :end) OR (reserve_start_at >= :start AND reserve_end_at <= :end) OR (reserve_start_at <= :start AND reserve_end_at > :start) OR (reserve_start_at < :end AND reserve_end_at >= :end) OR (reserve_start_at = :start AND reserve_end_at = :end))", :start => tstart_at, :end => tend_at) end # Instance Methods ##### def start_reservation! self.actual_start_at = Time.zone.now start_in_grace_period if in_grace_period? save! end def end_reservation! self.actual_end_at = Time.zone.now save! # reservation is done, now give the best price order_detail.assign_price_policy order_detail.save! end def round_reservation_times self.reserve_start_at = time_ceil(self.reserve_start_at) if self.reserve_start_at self.reserve_end_at = time_ceil(self.reserve_end_at) if self.reserve_end_at end def assign_actuals_off_reserve self.actual_start_at ||= self.reserve_start_at self.actual_end_at ||= self.reserve_end_at end def save_as_user(user) if (user.operator_of?(product.facility)) @reserved_by_admin = true self.save else @reserved_by_admin = false self.save_extended_validations end end def save_as_user!(user) raise ActiveRecord::RecordInvalid.new(self) unless save_as_user(user) end def admin? order.nil? && !blackout? end def blackout? blackout.present? end def can_start_early? return false unless in_grace_period? # no other reservation ongoing; no res between now and reserve_start; where = <<-SQL reserve_start_at > ? AND reserve_start_at < ? AND actual_start_at IS NULL AND reservations.product_id = ? AND (order_detail_id IS NULL OR order_details.state = 'new' OR order_details.state = 'inprocess') SQL Reservation.joins(:order_detail).where(where, Time.zone.now, reserve_start_at, product_id).first.nil? end def cancelled? !canceled_at.nil? end # can the CUSTOMER cancel the order def can_cancel? canceled_at.nil? && reserve_start_at > Time.zone.now && actual_start_at.nil? && actual_end_at.nil? end def can_customer_edit? !cancelled? && !complete? && reserve_start_at > Time.zone.now end # can the ADMIN edit the reservation? def can_edit? return true if id.nil? # object is new and hasn't been saved to the DB successfully # an admin can edit the reservation times as long as the reservation has not been cancelled, # even if it is in the past. !cancelled? end # TODO does this need to be more robust? def can_edit_actuals? return false if order_detail.nil? complete? end def reservation_changed? changes.any? { |k,v| k == 'reserve_start_at' || k == 'reserve_end_at' } end def valid_before_purchase? satisfies_minimum_length? && satisfies_maximum_length? && instrument_is_available_to_reserve? && does_not_conflict_with_other_reservation? end def has_actuals? !!(actual_start_at && actual_end_at) end def requires_but_missing_actuals? !!(!cancelled? && product.control_mechanism != Relay::CONTROL_MECHANISMS[:manual] && !has_actuals?) end protected def has_order_detail? !self.order_detail.nil? end private def auto_save_order_detail if (['actual_start_at', 'actual_end_at', 'reserve_start_at', 'reserve_end_at'] & changes.keys).any? order_detail.save end end def save_note if order_detail && @note order_detail.note = @note order_detail.save end end def in_grace_period?(at = Time.zone.now) at = at.to_i grace_period_end = reserve_start_at.to_i grace_period_begin = (reserve_start_at - grace_period_duration).to_i at >= grace_period_begin && at <= grace_period_end end def grace_period_duration SettingsHelper.setting('reservations.grace_period') || 5.minutes end def start_in_grace_period # Move the reservation time forward so other reservations can't overlap # with this one, but only move it forward if there's not already a reservation # currently in progress. original_start_at = reserve_start_at self.reserve_start_at = actual_start_at unless does_not_conflict_with_other_reservation? self.reserve_start_at = original_start_at end end end comment issue #79127 require "date" class Reservation < ActiveRecord::Base include DateHelper include Reservations::DateSupport include Reservations::Validations include Reservations::Rendering include Reservations::RelaySupport include Reservations::MovingUp # Associations ##### belongs_to :product belongs_to :order_detail, :inverse_of => :reservation belongs_to :canceled_by_user, :foreign_key => :canceled_by, :class_name => 'User' ## Virtual attributes ##### # Represents a resevation time that is unavailable, but is not an admin reservation # Used by timeline view attr_accessor :blackout attr_writer :note # used for overriding certain restrictions attr_accessor :reserved_by_admin # Delegations ##### delegate :note, :ordered_on_behalf_of?, :complete?, :account, :order, :to => :order_detail, :allow_nil => true delegate :user, :account, :to => :order, :allow_nil => true delegate :facility, :to => :product, :allow_nil => true delegate :owner, :to => :account, :allow_nil => true ## AR Hooks after_save :save_note after_update :auto_save_order_detail, :if => :order_detail # Scopes ##### def self.active not_cancelled. where("(orders.state = 'purchased' OR orders.state IS NULL)"). joins_order end def self.joins_order joins('LEFT JOIN order_details ON order_details.id = reservations.order_detail_id'). joins('LEFT JOIN orders ON orders.id = order_details.order_id') end def self.admin where(:order_detail_id => nil) end def self.not_cancelled where(:canceled_at => nil) end def self.not_started where(:actual_start_at => nil) end def self.not_ended where(:actual_end_at => nil) end def self.not_this_reservation(reservation) if reservation.id where('reservations.id <> ?', reservation.id) else scoped end end def self.today for_date(Time.zone.now) end def self.for_date(date) in_range(date.beginning_of_day, date.end_of_day) end def self.in_range(start_time, end_time) where('reserve_end_at >= ?', start_time). where('reserve_start_at < ?', end_time) end def self.upcoming(t=Time.zone.now) # If this is a named scope differences emerge between Oracle & MySQL on #reserve_end_at querying. # Eliminate by letting Rails filter by #reserve_end_at reservations=find(:all, :conditions => "reservations.canceled_at IS NULL AND (orders.state = 'purchased' OR orders.state IS NULL)", :order => 'reserve_end_at asc', :joins => ['LEFT JOIN order_details ON order_details.id = reservations.order_detail_id', 'LEFT JOIN orders ON orders.id = order_details.order_id']) reservations.delete_if{|r| r.reserve_end_at < t} reservations end def self.overlapping(start_at, end_at) # remove millisecond precision from time tstart_at = Time.zone.parse(start_at.to_s) tend_at = Time.zone.parse(end_at.to_s) where("((reserve_start_at <= :start AND reserve_end_at >= :end) OR (reserve_start_at >= :start AND reserve_end_at <= :end) OR (reserve_start_at <= :start AND reserve_end_at > :start) OR (reserve_start_at < :end AND reserve_end_at >= :end) OR (reserve_start_at = :start AND reserve_end_at = :end))", :start => tstart_at, :end => tend_at) end # Instance Methods ##### def start_reservation! self.actual_start_at = Time.zone.now start_in_grace_period if in_grace_period? save! end def end_reservation! self.actual_end_at = Time.zone.now save! # reservation is done, now give the best price order_detail.assign_price_policy order_detail.save! end def round_reservation_times self.reserve_start_at = time_ceil(self.reserve_start_at) if self.reserve_start_at self.reserve_end_at = time_ceil(self.reserve_end_at) if self.reserve_end_at end def assign_actuals_off_reserve self.actual_start_at ||= self.reserve_start_at self.actual_end_at ||= self.reserve_end_at end def save_as_user(user) if (user.operator_of?(product.facility)) @reserved_by_admin = true self.save else @reserved_by_admin = false self.save_extended_validations end end def save_as_user!(user) raise ActiveRecord::RecordInvalid.new(self) unless save_as_user(user) end def admin? order.nil? && !blackout? end def blackout? blackout.present? end def can_start_early? return false unless in_grace_period? # no other reservation ongoing; no res between now and reserve_start; where = <<-SQL reserve_start_at > ? AND reserve_start_at < ? AND actual_start_at IS NULL AND reservations.product_id = ? AND (order_detail_id IS NULL OR order_details.state = 'new' OR order_details.state = 'inprocess') SQL Reservation.joins(:order_detail).where(where, Time.zone.now, reserve_start_at, product_id).first.nil? end def cancelled? !canceled_at.nil? end # can the CUSTOMER cancel the order def can_cancel? canceled_at.nil? && reserve_start_at > Time.zone.now && actual_start_at.nil? && actual_end_at.nil? end def can_customer_edit? !cancelled? && !complete? && reserve_start_at > Time.zone.now end # can the ADMIN edit the reservation? def can_edit? return true if id.nil? # object is new and hasn't been saved to the DB successfully # an admin can edit the reservation times as long as the reservation has not been cancelled, # even if it is in the past. !cancelled? end # TODO does this need to be more robust? def can_edit_actuals? return false if order_detail.nil? complete? end def reservation_changed? changes.any? { |k,v| k == 'reserve_start_at' || k == 'reserve_end_at' } end def valid_before_purchase? satisfies_minimum_length? && satisfies_maximum_length? && instrument_is_available_to_reserve? && does_not_conflict_with_other_reservation? end def has_actuals? !!(actual_start_at && actual_end_at) end def requires_but_missing_actuals? !!(!cancelled? && product.control_mechanism != Relay::CONTROL_MECHANISMS[:manual] && !has_actuals?) end protected def has_order_detail? !self.order_detail.nil? end private def auto_save_order_detail if (['actual_start_at', 'actual_end_at', 'reserve_start_at', 'reserve_end_at'] & changes.keys).any? order_detail.save end end def save_note if order_detail && @note order_detail.note = @note order_detail.save end end def in_grace_period?(at = Time.zone.now) at = at.to_i grace_period_end = reserve_start_at.to_i grace_period_begin = (reserve_start_at - grace_period_duration).to_i # Compare int values, not timestamps. If you do the # latter fractions of a second can cause false positives. at >= grace_period_begin && at <= grace_period_end end def grace_period_duration SettingsHelper.setting('reservations.grace_period') || 5.minutes end def start_in_grace_period # Move the reservation time forward so other reservations can't overlap # with this one, but only move it forward if there's not already a reservation # currently in progress. original_start_at = reserve_start_at self.reserve_start_at = actual_start_at unless does_not_conflict_with_other_reservation? self.reserve_start_at = original_start_at end end end
require 'RMagick' class Spree::Board < ActiveRecord::Base belongs_to :designer, :class_name => "User", :foreign_key => "designer_id" belongs_to :room, :foreign_key => "room_id", :class_name => "Spree::Taxon" belongs_to :style, :foreign_key => "style_id", :class_name => "Spree::Taxon" has_many :board_products, :order => "z_index", dependent: :destroy has_many :products, :through => :board_products has_many :color_matches has_many :colors, :through => :color_matches has_many :conversations, as: :conversationable, class_name: "::Mailboxer::Conversation" has_and_belongs_to_many :promotion_rules, class_name: '::Spree::PromotionRule', join_table: 'spree_boards_promotion_rules', foreign_key: 'board_id' has_many :promotions, :through => :promotion_rules has_one :board_image, as: :viewable, order: :position, dependent: :destroy, class_name: "Spree::BoardImage" has_one :conversation, :class_name => "Mailboxer::Conversation" has_one :portfolio, dependent: :destroy has_many :questions, dependent: :destroy has_many :board_favorites, dependent: :destroy has_many :invoice_lines has_many :board_histories, dependent: :destroy extend FriendlyId friendly_id :slug_candidates, use: :slugged #friendly_id [:name, :room_style, :room_type], use: :slugged def slug_candidates [ [:name, :room_style, :room_type] ] end # state machine audit trail requires that there are fields on the model being audited. We're creating them virtually since they don't need to be persisted here. attr_accessor :state_message attr_accessor :transition_user_id #attr_accessible :board_image_attributes accepts_nested_attributes_for :board_image is_impressionable validates_presence_of :name after_save :update_product_publish_status before_save :cache_style_and_room_type default_scope { where("#{Spree::Board.quoted_table_name}.deleted_at IS NULL or #{Spree::Board.quoted_table_name}.deleted_at >= ?", Time.zone.now) } state_machine :state, :initial => :new do store_audit_trail :context_to_log => [:state_message, :transition_user_id] after_transition :on => [:publish, :request_designer_revision], :do => :remove_marked_products after_transition :on => :publish, :do => :update_state_published event :submit_for_publication do transition all => :submitted_for_publication, :in_revision => :submitted_for_publication end event :request_designer_revision do transition all => :in_revision end event :publish do transition all => :published end event :suspend do transition all => :suspended end event :delete do transition all => :deleted end state :new, :in_revision, :submitted_for_publication do def draft? true end end state :published do def published? true end end end def is_favorite?(user) self.board_favorites.find_by(user_id: user.id) ? true : false end def update_state_published self.update(status: 'published') end def set_state_transition_context(message, user) self.state_message = message self.transition_user_id = user.id end def remove_marked_products Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "----- REMOVE MARKED PRODUCTS -----" Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "---------------" delete_removed_board_products delete_deleted_board_products self.queue_image_generation end #def handle_publication # self.update_attributes!({status: "published"}, without_protection: true ) # delete_removed_board_products # delete_marked_products #end def handle_deletion #self.update_attributes!({status: "deleted"}, without_protection: true ) #delete_removed_board_products #delete_marked_products #self.destroy end def delete_removed_board_products self.board_products.marked_for_removal.each(&:destroy) end def delete_deleted_board_products self.board_products.marked_for_deletion.collect(&:product).compact.each(&:destroy) self.board_products.marked_for_deletion.each(&:destroy) end def update_submitted_for_publication_status self.update_attributes!({status: "submitted_for_publication"}, without_protection: true) end def process_revision_request self.update_attributes!({current_state_label: "needs revision", status: "needs_revision"}, without_protection: true) delete_removed_board_products delete_marked_products end def remove_all_products self.board_products.each(&:destroy!) end def update_product_publish_status if self.status =="published" self.products.update_all(:is_published => 1) else self.products.each do |product| if product.available_sans_board == true product.update_attribute("is_published", 1) else product.update_attribute("is_published", 0) end end end end # use deleted? rather than checking the attribute directly. this # allows extensions to override deleted? if they want to provide # their own definition. def deleted? !!deleted_at end def self.active where(:status => 'published') end def self.featured #where(:featured => 1) where("featured_starts_at <= ? and featured_expires_at >= ?", Date.today, Date.today) end def self.promoted includes(promotion_rules: [:promotion]).where("spree_promotions.starts_at <= ? and spree_promotions.expires_at >= ?", Date.today, Date.today) end def currently_promoted? self.current_promotion end def current_promotion p = self.promotions.where("spree_promotions.starts_at <= ? and spree_promotions.expires_at >= ?", Date.today, Date.today) p.empty? ? nil : p.first end def room_and_style rs = [] rs << self.room.name if self.room.present? rs << self.style.name if self.style.present? rs.join(", ") end def self.draft where(:status => ["new"]) end def self.pending where(:status => ["submitted_for_publication", "needs_revision"]) end def self.published where(:status => ["published"]) end def display_short_status case self.state when "new" "Draft" when "submitted_for_publication" "Pending" when "published" "Published" when "suspended" "Suspended" when "deleted" "Deleted" when "unpublished" "Unpublished" when "retired" "Retired" when "in_revision" "In Revision" else "N/A" end end def display_status case self.state when "new" "Draft - Not Published" when "submitted_for_publication" "Pending - Submitted for Publication" when "published" "Published" when "suspended" "Suspended" when "deleted" "Deleted" when "unpublished" "Unpublished" when "retired" "Retired" when "in_revision" "Pending - Revisions Requested" else "status not available" end end def is_dirty? self.is_dirty end def self.available_room_taxons ["Living Room", "Dining Room", "Bedroom", "Outdoor Living", "Home Office", "Kids Room", "Kitchen", "Foyer","Bathroom"] end def self.by_style(style_id) where(:style_id => style_id) end def self.exclude_self(board_id) where("id <> #{board_id}") end def self.by_room(room_id) where(:room_id => room_id) end def self.by_color_family(color_family) related_colors = Spree::Color.by_color_family(color_family) includes(:colors).where('spree_colors.id' => related_colors.collect { |color| color.id }) end def self.status_options [["Draft - Not Published", "new"], ["Pending - Submitted for Publication", "submitted_for_publication"], ["Published", "published"], ["Suspended", "suspended"], ["Deleted", "deleted"], ["Unpublished", "unpublished"], ["Retired", "retired"], ["Pending - Revisions Requested", "needs_revision"]] end def self.color_categories ["Blue", "Cool Neutral", "Green", "Orange", "Red", "Violet", "Warm Neutral", "White", "Yellow"].sort end scope :by_color, (lambda do |color| joins(:color_matches).where('spree_color_matches.color_id = ?', color.id) unless color.nil? end) def self.by_designer(designer_id) where(:designer_id => designer_id) end def self.by_lower_bound_price(price) includes(:products).where('spree_products.id' => Spree::Product.master_price_gte(price).collect { |color| color.id }) #includes(:products).where('spree_products.master_price > ?', price) #joins(:products).merge(Spree::Product.master_price_gte(price)) end def self.by_upper_bound_price(price) includes(:products).where('spree_products.id' => Spree::Product.master_price_lte(price).collect { |color| color.id }) #includes(:products).where('spree_products.master_price < ?', price) #joins(:products).merge(Spree::Product.master_price_lte(price)) end def self.render_taxon_select(taxon, subsubcategory) taxon.children.each do |child_taxon| subsubcategory << [child_taxon.name, child_taxon.id] if child_taxon.children.present? render_taxon_select(child_taxon, []) end end return subsubcategory end def other_designer_boards self.designer.boards.where(status: "published").where.not(id: self.id) end def related_boards boards_scope = Spree::Board.active boards_scope = boards_scope.exclude_self(self.id) #unless self.color_family.blank? # #@boards_scope = @boards_scope.by_color_family(self.color_family) #end unless self.room_id.blank? boards_scope = boards_scope.by_room(self.room_id) end unless self.style_id.blank? boards_scope = boards_scope.by_style(self.style_id) end boards_scope end def queue_image_generation if !self.dirty_at or self.dirty_at < 10.seconds.ago self.update_attribute("dirty_at", Time.now) #self.delay(run_at: 3.seconds.from_now).generate_image self.generate_image end # the board is marked as dirty whenever it is added to the delayed job queue. That way we don't have to make countless updates but instead can just queue them all up # so skip this if it is already dirty...that means it has already been added to the queue # unless self.is_dirty? # self.update_attribute("is_dirty",1) # self.delay(run_at: 3.seconds.from_now).generate_image # #self.generate_image # end end def self.generate_brands(searcher = nil) suppliers_tab = [] if searcher.present? and searcher.solr_search.present? and searcher.solr_search.facet(:brands).present? and searcher.solr_search.facet(:brands).rows.present? searcher.solr_search.facet(:brands).rows.each do |supp| supplier = Spree::Supplier.where(name: supp.value).first if supplier.present? suppliers_tab << [supplier.name, supplier.id] end end end return suppliers_tab end def generate_image white_canvas = ::Magick::Image.new(630, 360) { self.background_color = "white" } self.board_products(:order => "z_index asc").includes(:product => {:master => [:images]}).reload.collect file = nil self.board_products.each do |bp| top_left_x, top_left_y = bp.top_left_x, bp.top_left_y if bp.height == 0 bp.height = 5 bp.width = 5 * bp.width end if bp.width == 0 bp.width == 5 bp.height == 5 * bp.height end if bp.present? and bp.product.present? product_image = bp.product.image_for_board(bp) elsif bp.custom_item.present? product_image = bp.custom_item.custom_image_for_board(bp) else product_image ="" end if product_image.present? # set the rotation product_image.rotate!(bp.rotation_offset) # if turned sideways, then swap the width and height when scaling if [90, 270].include?(bp.rotation_offset) product_image.scale!(bp.height, bp.width) top_left_x = bp.center_point_x - bp.height/2 top_left_y = bp.center_point_y - bp.width/2 # original width and height work if it is just rotated 180 else product_image.scale!(bp.width, bp.height) top_left_x = bp.center_point_x - bp.width/2 top_left_y = bp.center_point_y - bp.height/2 end white_canvas.composite!(product_image, ::Magick::NorthWestGravity, top_left_x, top_left_y, ::Magick::OverCompositeOp) end white_canvas.format = 'jpeg' file = Tempfile.new("room_#{self.id}.jpg") white_canvas.write(file.path) end #self.board_image.destroy if self.board_image self.build_board_image if self.board_image.blank? #self.board_image.reload self.board_image.attachment = file self.board_image.save # set it to be clean again #self.is_dirty = 0 self.dirty_at = nil self.save end def cache_style_and_room_type self.room_type = self.room.name.parameterize if self.room self.room_style = self.style.name.parameterize if self.style end def destroy self.board_products.destroy_all self.update_attribute('deleted_at', Time.zone.now) end def designer_name "#{self.designer.first_name} #{self.designer.last_name}" end def coded_designer_name "#{self.designer.first_name.downcase}_#{self.designer.last_name.downcase}" end def to_url "https://scoutandnimble.com/rooms/#{self.id}" end def create_or_update_board_product(params,board_id,email) Resque.enqueue_at(4.days.from_now,RoomSavedButNotPublishedEmail, board_id) if !email # if params[:products_board].present? # Rails.logger.info params[:products_board] # board_products = JSON.parse(params[:products_board]) # # board_products.each do |_, product_hash| # if product_hash['action_board'] == 'update' # board_product = self.board_products.where(id: product_hash['product_id']).first # if board_product.present? # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "update_product|#{board_product.product.present? ? board_product.product.name : board_product.custom_item.name}") # if product_hash['image'].present? # crop_image(product_hash['image'], board_product) # end # attr = product_hash.except!('action_board', 'board_id', 'product_id', 'image') # board_product.update(attr) # end # elsif product_hash['action_board'] == 'create' # product = Spree::Product.where(id: product_hash['product_id']).first # if product.present? # image = product_hash['image'] # attr = product_hash.except!('action_board', 'product_id', 'image') # board_product = product.board_products.new(attr) # if board_product.save # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "new_product|#{board_product.product.name}") # if image.present? # crop_image(image, board_product) # end # board_product.update(z_index: product_hash['z_index']) # end # else # puts 'nie istnieje' # puts 'nie istnieje' # puts 'nie istnieje' # custom = Spree::CustomItem.find(product_hash['custom_item_id']) # puts custom.inspect # if custom.present? # image = product_hash['image'] # attr = product_hash.except!('action_board', 'product_id', 'image') # board_product = Spree::BoardProduct.new(attr) # if board_product.save # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "new_product|#{board_product.custom_item.name}") # if image.present? # crop_image(image, board_product) # end # board_product.update(z_index: product_hash['z_index']) # end # puts "END " # puts "END " # puts "END " # end # end # end # end # end end def crop_image(base64, board_product) data = Base64.decode64(base64['data:image/png;base64,'.length .. -1]) file_img = File.new("#{Rails.root}/public/somefilename#{DateTime.now.to_i + rand(1000)}.png", 'wb') file_img.write data if board_product.update({photo: file_img, image_id: ''}) File.delete(file_img) end end def send_revision_request_email(message_content="") html_content = "Hi #{self.designer.full_name}, <br /> Your room, \"#{self.name}\" has been reviewed and needs revision before publishing. Please visit the <a href=\"#{self.to_url}/design\">design page</a> to make any revisions. " m = Mandrill::API.new(MANDRILL_KEY) message = { :subject => "Your room status has changed: needs revision", :from_name => "Scout & Nimble", :text => "#{message_content} \n\n The Scout & Nimble Team", :to => [ { :email => self.designer.email, :name => self.designer.full_name } ], :from_email => "designer@scoutandnimble.com", :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com" } sending = m.messages.send_template('simple-template', [{:name => 'main', :content => html_content}, {:name => 'extra-message', :content => message_content}], message, true) logger.info sending end def send_email_according_to_board(html_content,subject,from_name,text,template) html_content = html_content m = Mandrill::API.new(MANDRILL_KEY) message = { :subject => subject, :from_name => from_name, :text => "#{text} \n\n The Scout & Nimble Team", :to => [ { :email => self.designer.email, :name => self.designer.full_name } ], :from_email => "designer@scoutandnimble.com", :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com" } sending = m.messages.send_template(template, [{:name => 'main', :content => html_content}, {:name => 'extra-message', :content => text}], message, true) logger.info sending end def calculate_tax designer=self.designer.designer_registrations.first if designer.present? dest_state = Spree::State.find(self.state_id) origin=::TaxCloud::Address.new(address1: designer.address1 , city: designer.city, zip5: designer.postal_code, state: designer.state) destination=::TaxCloud::Address.new(address1: self.customer_address, city: self.customer_city, zip5: self.customer_zipcode, state: dest_state.abbr) transaction = ::TaxCloud::Transaction.new(customer_id: 102, order_id: 12, cart_id: 12,origin: origin, destination: destination) self.board_products.each_with_index do |item,index| transaction.cart_items << get_item_data_for_tax(item,index) end response = transaction.lookup end end def get_item_data_for_tax(item,index) ::TaxCloud::CartItem.new( index: index, # item_id: item.get_item_data('name')[0...50], item_id: item.get_item_data('sku'), tic: Spree::Config.taxcloud_shipping_tic, price: item.get_item_data('cost'), quantity: 1 ) end def send_email_with_invoice(to_addr,to_name,pdf) html_content = '' m = Mandrill::API.new(MANDRILL_KEY) colors = [] products = [] self.colors.each do |c| colors << {:r => c.rgb_r, :g => c.rgb_g,:b => c.rgb_b, :name => c.name, :swatch_val => c.swatch_val} end products = [] self.board_products.each do |bp| if bp.product.present? products << {:img => bp.product.images.first.attachment.url, :name => bp.get_item_data('name'), :cost => bp.get_item_data('cost')} else products << {:img => bp.custom_item.image(:original), :name => bp.get_item_data('name'), :cost => bp.get_item_data('cost')} end end message = { :subject => self.name, :from_name => "INVOICE", :text => "INVOICE", :to => [ { :email => to_addr, :name => to_name } ], :from_email => to_addr, :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com", :merge_language => "handlebars", :attachments => [ { :type => "pdf", :name => "invoice.pdf", :content => Base64.encode64(pdf) } ], :merge_vars => [ { :rcpt => to_addr, :vars => [ { :name => "boardimage", :content => self.board_image.attachment(:original)#.split('?')[0] }, { :name => "colors", :content => colors }, { :name => "products", :content => products }, { :name => "notes", :content => self.description } ] } ] } sending = m.messages.send_template('invoice-email', [{:name => 'main', :content => html_content}], message, true) end end fixed bad merge again require 'RMagick' class Spree::Board < ActiveRecord::Base belongs_to :designer, :class_name => "User", :foreign_key => "designer_id" belongs_to :room, :foreign_key => "room_id", :class_name => "Spree::Taxon" belongs_to :style, :foreign_key => "style_id", :class_name => "Spree::Taxon" has_many :board_products, :order => "z_index", dependent: :destroy has_many :products, :through => :board_products has_many :color_matches has_many :colors, :through => :color_matches has_many :conversations, as: :conversationable, class_name: "::Mailboxer::Conversation" has_and_belongs_to_many :promotion_rules, class_name: '::Spree::PromotionRule', join_table: 'spree_boards_promotion_rules', foreign_key: 'board_id' has_many :promotions, :through => :promotion_rules has_one :board_image, as: :viewable, order: :position, dependent: :destroy, class_name: "Spree::BoardImage" has_one :conversation, :class_name => "Mailboxer::Conversation" has_one :portfolio, dependent: :destroy has_many :questions, dependent: :destroy has_many :board_favorites, dependent: :destroy has_many :invoice_lines has_many :board_histories, dependent: :destroy extend FriendlyId friendly_id :slug_candidates, use: :slugged #friendly_id [:name, :room_style, :room_type], use: :slugged def slug_candidates [ [:name, :room_style, :room_type] ] end # state machine audit trail requires that there are fields on the model being audited. We're creating them virtually since they don't need to be persisted here. attr_accessor :state_message attr_accessor :transition_user_id #attr_accessible :board_image_attributes accepts_nested_attributes_for :board_image is_impressionable validates_presence_of :name after_save :update_product_publish_status before_save :cache_style_and_room_type default_scope { where("#{Spree::Board.quoted_table_name}.deleted_at IS NULL or #{Spree::Board.quoted_table_name}.deleted_at >= ?", Time.zone.now) } state_machine :state, :initial => :new do store_audit_trail :context_to_log => [:state_message, :transition_user_id] after_transition :on => [:publish, :request_designer_revision], :do => :remove_marked_products after_transition :on => :publish, :do => :update_state_published event :submit_for_publication do transition all => :submitted_for_publication, :in_revision => :submitted_for_publication end event :request_designer_revision do transition all => :in_revision end event :publish do transition all => :published end event :suspend do transition all => :suspended end event :delete do transition all => :deleted end state :new, :in_revision, :submitted_for_publication do def draft? true end end state :published do def published? true end end end def is_favorite?(user) self.board_favorites.find_by(user_id: user.id) ? true : false end def update_state_published self.update(status: 'published') end def set_state_transition_context(message, user) self.state_message = message self.transition_user_id = user.id end def remove_marked_products Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "----- REMOVE MARKED PRODUCTS -----" Rails.logger.info "---------------" Rails.logger.info "---------------" Rails.logger.info "---------------" delete_removed_board_products delete_deleted_board_products self.queue_image_generation end #def handle_publication # self.update_attributes!({status: "published"}, without_protection: true ) # delete_removed_board_products # delete_marked_products #end def handle_deletion #self.update_attributes!({status: "deleted"}, without_protection: true ) #delete_removed_board_products #delete_marked_products #self.destroy end def delete_removed_board_products self.board_products.marked_for_removal.each(&:destroy) end def delete_deleted_board_products self.board_products.marked_for_deletion.collect(&:product).compact.each(&:destroy) self.board_products.marked_for_deletion.each(&:destroy) end def update_submitted_for_publication_status self.update_attributes!({status: "submitted_for_publication"}, without_protection: true) end def process_revision_request self.update_attributes!({current_state_label: "needs revision", status: "needs_revision"}, without_protection: true) delete_removed_board_products delete_marked_products end def remove_all_products self.board_products.each(&:destroy!) end def update_product_publish_status if self.status =="published" self.products.update_all(:is_published => 1) else self.products.each do |product| if product.available_sans_board == true product.update_attribute("is_published", 1) else product.update_attribute("is_published", 0) end end end end # use deleted? rather than checking the attribute directly. this # allows extensions to override deleted? if they want to provide # their own definition. def deleted? !!deleted_at end def self.active where(:status => 'published') end def self.featured #where(:featured => 1) where("featured_starts_at <= ? and featured_expires_at >= ?", Date.today, Date.today) end def self.promoted includes(promotion_rules: [:promotion]).where("spree_promotions.starts_at <= ? and spree_promotions.expires_at >= ?", Date.today, Date.today) end def currently_promoted? self.current_promotion end def current_promotion p = self.promotions.where("spree_promotions.starts_at <= ? and spree_promotions.expires_at >= ?", Date.today, Date.today) p.empty? ? nil : p.first end def room_and_style rs = [] rs << self.room.name if self.room.present? rs << self.style.name if self.style.present? rs.join(", ") end def self.draft where(:status => ["new"]) end def self.pending where(:status => ["submitted_for_publication", "needs_revision"]) end def self.published where(:status => ["published"]) end def display_short_status case self.state when "new" "Draft" when "submitted_for_publication" "Pending" when "published" "Published" when "suspended" "Suspended" when "deleted" "Deleted" when "unpublished" "Unpublished" when "retired" "Retired" when "in_revision" "In Revision" else "N/A" end end def display_status case self.state when "new" "Draft - Not Published" when "submitted_for_publication" "Pending - Submitted for Publication" when "published" "Published" when "suspended" "Suspended" when "deleted" "Deleted" when "unpublished" "Unpublished" when "retired" "Retired" when "in_revision" "Pending - Revisions Requested" else "status not available" end end def is_dirty? self.is_dirty end def self.available_room_taxons ["Living Room", "Dining Room", "Bedroom", "Outdoor Living", "Home Office", "Kids Room", "Kitchen", "Foyer","Bathroom"] end def self.by_style(style_id) where(:style_id => style_id) end def self.exclude_self(board_id) where("id <> #{board_id}") end def self.by_room(room_id) where(:room_id => room_id) end def self.by_color_family(color_family) related_colors = Spree::Color.by_color_family(color_family) includes(:colors).where('spree_colors.id' => related_colors.collect { |color| color.id }) end def self.status_options [["Draft - Not Published", "new"], ["Pending - Submitted for Publication", "submitted_for_publication"], ["Published", "published"], ["Suspended", "suspended"], ["Deleted", "deleted"], ["Unpublished", "unpublished"], ["Retired", "retired"], ["Pending - Revisions Requested", "needs_revision"]] end def self.color_categories ["Blue", "Cool Neutral", "Green", "Orange", "Red", "Violet", "Warm Neutral", "White", "Yellow"].sort end scope :by_color, (lambda do |color| joins(:color_matches).where('spree_color_matches.color_id = ?', color.id) unless color.nil? end) def self.by_designer(designer_id) where(:designer_id => designer_id) end def self.by_lower_bound_price(price) includes(:products).where('spree_products.id' => Spree::Product.master_price_gte(price).collect { |color| color.id }) #includes(:products).where('spree_products.master_price > ?', price) #joins(:products).merge(Spree::Product.master_price_gte(price)) end def self.by_upper_bound_price(price) includes(:products).where('spree_products.id' => Spree::Product.master_price_lte(price).collect { |color| color.id }) #includes(:products).where('spree_products.master_price < ?', price) #joins(:products).merge(Spree::Product.master_price_lte(price)) end def self.render_taxon_select(taxon, subsubcategory) taxon.children.each do |child_taxon| subsubcategory << [child_taxon.name, child_taxon.id] if child_taxon.children.present? render_taxon_select(child_taxon, []) end end return subsubcategory end def other_designer_boards self.designer.boards.where(status: "published").where.not(id: self.id) end def related_boards boards_scope = Spree::Board.active boards_scope = boards_scope.exclude_self(self.id) #unless self.color_family.blank? # #@boards_scope = @boards_scope.by_color_family(self.color_family) #end unless self.room_id.blank? boards_scope = boards_scope.by_room(self.room_id) end unless self.style_id.blank? boards_scope = boards_scope.by_style(self.style_id) end boards_scope end def queue_image_generation if !self.dirty_at or self.dirty_at < 10.seconds.ago self.update_attribute("dirty_at", Time.now) #self.delay(run_at: 3.seconds.from_now).generate_image self.generate_image end # the board is marked as dirty whenever it is added to the delayed job queue. That way we don't have to make countless updates but instead can just queue them all up # so skip this if it is already dirty...that means it has already been added to the queue # unless self.is_dirty? # self.update_attribute("is_dirty",1) # self.delay(run_at: 3.seconds.from_now).generate_image # #self.generate_image # end end def self.generate_brands(searcher = nil) suppliers_tab = [] if searcher.present? and searcher.solr_search.present? and searcher.solr_search.facet(:brands).present? and searcher.solr_search.facet(:brands).rows.present? searcher.solr_search.facet(:brands).rows.each do |supp| supplier = Spree::Supplier.where(name: supp.value).first if supplier.present? suppliers_tab << [supplier.name, supplier.id] end end end return suppliers_tab end def generate_image white_canvas = ::Magick::Image.new(630, 360) { self.background_color = "white" } self.board_products(:order => "z_index asc").includes(:product => {:master => [:images]}).reload.collect file = nil self.board_products.each do |bp| top_left_x, top_left_y = bp.top_left_x, bp.top_left_y if bp.height == 0 bp.height = 5 bp.width = 5 * bp.width end if bp.width == 0 bp.width == 5 bp.height == 5 * bp.height end if bp.present? and bp.product.present? product_image = bp.product.image_for_board(bp) elsif bp.custom_item.present? product_image = bp.custom_item.custom_image_for_board(bp) else product_image ="" end if product_image.present? # set the rotation product_image.rotate!(bp.rotation_offset) # if turned sideways, then swap the width and height when scaling if [90, 270].include?(bp.rotation_offset) product_image.scale!(bp.height, bp.width) top_left_x = bp.center_point_x - bp.height/2 top_left_y = bp.center_point_y - bp.width/2 # original width and height work if it is just rotated 180 else product_image.scale!(bp.width, bp.height) top_left_x = bp.center_point_x - bp.width/2 top_left_y = bp.center_point_y - bp.height/2 end white_canvas.composite!(product_image, ::Magick::NorthWestGravity, top_left_x, top_left_y, ::Magick::OverCompositeOp) end white_canvas.format = 'jpeg' file = Tempfile.new("room_#{self.id}.jpg") white_canvas.write(file.path) end #self.board_image.destroy if self.board_image self.build_board_image if self.board_image.blank? #self.board_image.reload self.board_image.attachment = file self.board_image.save # set it to be clean again #self.is_dirty = 0 self.dirty_at = nil self.save end def cache_style_and_room_type self.room_type = self.room.name.parameterize if self.room self.room_style = self.style.name.parameterize if self.style end def destroy self.board_products.destroy_all self.update_attribute('deleted_at', Time.zone.now) end def designer_name "#{self.designer.first_name} #{self.designer.last_name}" end def coded_designer_name "#{self.designer.first_name.downcase}_#{self.designer.last_name.downcase}" end def to_url "https://scoutandnimble.com/rooms/#{self.id}" end def create_or_update_board_product(params,board_id,email) Resque.enqueue_at(4.days.from_now,RoomSavedButNotPublishedEmail, board_id) if !email Resque.enqueue(RoomUpdate, params,board_id) # if params[:products_board].present? # Rails.logger.info params[:products_board] # board_products = JSON.parse(params[:products_board]) # # board_products.each do |_, product_hash| # if product_hash['action_board'] == 'update' # board_product = self.board_products.where(id: product_hash['product_id']).first # if board_product.present? # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "update_product|#{board_product.product.present? ? board_product.product.name : board_product.custom_item.name}") # if product_hash['image'].present? # crop_image(product_hash['image'], board_product) # end # attr = product_hash.except!('action_board', 'board_id', 'product_id', 'image') # board_product.update(attr) # end # elsif product_hash['action_board'] == 'create' # product = Spree::Product.where(id: product_hash['product_id']).first # if product.present? # image = product_hash['image'] # attr = product_hash.except!('action_board', 'product_id', 'image') # board_product = product.board_products.new(attr) # if board_product.save # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "new_product|#{board_product.product.name}") # if image.present? # crop_image(image, board_product) # end # board_product.update(z_index: product_hash['z_index']) # end # else # puts 'nie istnieje' # puts 'nie istnieje' # puts 'nie istnieje' # custom = Spree::CustomItem.find(product_hash['custom_item_id']) # puts custom.inspect # if custom.present? # image = product_hash['image'] # attr = product_hash.except!('action_board', 'product_id', 'image') # board_product = Spree::BoardProduct.new(attr) # if board_product.save # Spree::BoardHistory.create(user_id: board_product.board.designer.id, board_id: board_product.board_id, action: "new_product|#{board_product.custom_item.name}") # if image.present? # crop_image(image, board_product) # end # board_product.update(z_index: product_hash['z_index']) # end # puts "END " # puts "END " # puts "END " # end # end # end # end # end end def crop_image(base64, board_product) data = Base64.decode64(base64['data:image/png;base64,'.length .. -1]) file_img = File.new("#{Rails.root}/public/somefilename#{DateTime.now.to_i + rand(1000)}.png", 'wb') file_img.write data if board_product.update({photo: file_img, image_id: ''}) File.delete(file_img) end end def send_revision_request_email(message_content="") html_content = "Hi #{self.designer.full_name}, <br /> Your room, \"#{self.name}\" has been reviewed and needs revision before publishing. Please visit the <a href=\"#{self.to_url}/design\">design page</a> to make any revisions. " m = Mandrill::API.new(MANDRILL_KEY) message = { :subject => "Your room status has changed: needs revision", :from_name => "Scout & Nimble", :text => "#{message_content} \n\n The Scout & Nimble Team", :to => [ { :email => self.designer.email, :name => self.designer.full_name } ], :from_email => "designer@scoutandnimble.com", :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com" } sending = m.messages.send_template('simple-template', [{:name => 'main', :content => html_content}, {:name => 'extra-message', :content => message_content}], message, true) logger.info sending end def send_email_according_to_board(html_content,subject,from_name,text,template) html_content = html_content m = Mandrill::API.new(MANDRILL_KEY) message = { :subject => subject, :from_name => from_name, :text => "#{text} \n\n The Scout & Nimble Team", :to => [ { :email => self.designer.email, :name => self.designer.full_name } ], :from_email => "designer@scoutandnimble.com", :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com" } sending = m.messages.send_template(template, [{:name => 'main', :content => html_content}, {:name => 'extra-message', :content => text}], message, true) logger.info sending end def calculate_tax designer=self.designer.designer_registrations.first if designer.present? dest_state = Spree::State.find(self.state_id) origin=::TaxCloud::Address.new(address1: designer.address1 , city: designer.city, zip5: designer.postal_code, state: designer.state) destination=::TaxCloud::Address.new(address1: self.customer_address, city: self.customer_city, zip5: self.customer_zipcode, state: dest_state.abbr) transaction = ::TaxCloud::Transaction.new(customer_id: 102, order_id: 12, cart_id: 12,origin: origin, destination: destination) self.board_products.each_with_index do |item,index| transaction.cart_items << get_item_data_for_tax(item,index) end response = transaction.lookup end end def get_item_data_for_tax(item,index) ::TaxCloud::CartItem.new( index: index, # item_id: item.get_item_data('name')[0...50], item_id: item.get_item_data('sku'), tic: Spree::Config.taxcloud_shipping_tic, price: item.get_item_data('cost'), quantity: 1 ) end def send_email_with_invoice(to_addr,to_name,pdf) html_content = '' m = Mandrill::API.new(MANDRILL_KEY) colors = [] products = [] self.colors.each do |c| colors << {:r => c.rgb_r, :g => c.rgb_g,:b => c.rgb_b, :name => c.name, :swatch_val => c.swatch_val} end products = [] self.board_products.each do |bp| if bp.product.present? products << {:img => bp.product.images.first.attachment.url, :name => bp.get_item_data('name'), :cost => bp.get_item_data('cost')} else products << {:img => bp.custom_item.image(:original), :name => bp.get_item_data('name'), :cost => bp.get_item_data('cost')} end end message = { :subject => self.name, :from_name => "INVOICE", :text => "INVOICE", :to => [ { :email => to_addr, :name => to_name } ], :from_email => to_addr, :track_opens => true, :track_clicks => true, :url_strip_qs => false, :signing_domain => "scoutandnimble.com", :merge_language => "handlebars", :attachments => [ { :type => "pdf", :name => "invoice.pdf", :content => Base64.encode64(pdf) } ], :merge_vars => [ { :rcpt => to_addr, :vars => [ { :name => "boardimage", :content => self.board_image.attachment(:original)#.split('?')[0] }, { :name => "colors", :content => colors }, { :name => "products", :content => products }, { :name => "notes", :content => self.description } ] } ] } sending = m.messages.send_template('invoice-email', [{:name => 'main', :content => html_content}], message, true) end end
require 'tolk/config' module Tolk class Locale < ActiveRecord::Base self.table_name = "tolk_locales" def self._dump_path # Necessary to acces rails.root at runtime ! @dump_path ||= Tolk.config.dump_path.is_a?(Proc) ? instance_eval(&Tolk.config.dump_path) : Tolk.config.dump_path end has_many :phrases, :through => :translations, :class_name => 'Tolk::Phrase' has_many :translations, :class_name => 'Tolk::Translation', :dependent => :destroy accepts_nested_attributes_for :translations, :reject_if => proc { |attributes| attributes['text'].blank? } before_validation :remove_invalid_translations_from_target, :on => :update attr_accessible :name cattr_accessor :locales_config_path self.locales_config_path = self._dump_path cattr_accessor :primary_locale_name self.primary_locale_name = I18n.default_locale.to_s include Tolk::Sync include Tolk::Import validates_uniqueness_of :name validates_presence_of :name cattr_accessor :special_prefixes self.special_prefixes = ['activerecord.attributes'] cattr_accessor :special_keys self.special_keys = ['activerecord.models'] class << self def primary_locale(reload = false) @_primary_locale = nil if reload @_primary_locale ||= begin raise "Primary locale is not set. Please set Locale.primary_locale_name in your application's config file" unless self.primary_locale_name find_or_create_by_name(self.primary_locale_name) end end def primary_language_name primary_locale.language_name end def secondary_locales all - [primary_locale] end def dump_all(to = self.locales_config_path) secondary_locales.each do |locale| File.open("#{to}/#{locale.name}.yml", "w+") do |file| data = locale.to_hash data.respond_to?(:ya2yaml) ? file.write(data.ya2yaml(:syck_compatible => true)) : file.write(YAML.dump(hs).force_encoding file.external_encoding.name) end end end def special_key_or_prefix?(prefix, key) self.special_prefixes.include?(prefix) || self.special_keys.include?(key) end # http://cldr.unicode.org/index/cldr-spec/plural-rules - TODO: usage of 'none' isn't standard-conform PLURALIZATION_KEYS = ['none', 'zero', 'one', 'two', 'few', 'many', 'other'] def pluralization_data?(data) keys = data.keys.map(&:to_s) keys.all? {|k| PLURALIZATION_KEYS.include?(k) } end end def has_updated_translations? translations.count(:conditions => {:'tolk_translations.primary_updated' => true}) > 0 end def phrases_with_translation(page = nil) find_phrases_with_translations(page, :'tolk_translations.primary_updated' => false) end def phrases_with_updated_translation(page = nil) find_phrases_with_translations(page, :'tolk_translations.primary_updated' => true) end def count_phrases_without_translation existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq Tolk::Phrase.count - existing_ids.count end def phrases_without_translation(page = nil, options = {}) phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq phrases = phrases.scoped(:conditions => ['tolk_phrases.id NOT IN (?)', existing_ids]) if existing_ids.present? result = phrases.paginate({:page => page, :per_page => Phrase.per_page}.merge(options)) ActiveRecord::Associations::Preloader.new result, :translations result end def search_phrases(query, scope, key_query, page = nil, options = {}) return [] unless query.present? || key_query.present? translations = case scope when :origin Tolk::Locale.primary_locale.translations.containing_text(query) else # :target self.translations.containing_text(query) end phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') phrases = phrases.containing_text(key_query) phrases = phrases.scoped(:conditions => ['tolk_phrases.id IN(?)', translations.map(&:phrase_id).uniq]) phrases.paginate({:page => page}.merge(options)) end def search_phrases_without_translation(query, page = nil, options = {}) return phrases_without_translation(page, options) unless query.present? phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') found_translations_ids = Tolk::Locale.primary_locale.translations.all(:conditions => ["tolk_translations.text LIKE ?", "%#{query}%"], :select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq phrases = phrases.scoped(:conditions => ['tolk_phrases.id NOT IN (?) AND tolk_phrases.id IN(?)', existing_ids, found_translations_ids]) if existing_ids.present? result = phrases.paginate({:page => page}.merge(options)) ActiveRecord::Associations::Preloader.new result, :translations result end def to_hash { name => translations.each_with_object({}) do |translation, locale| if translation.phrase.key.include?(".") locale.deep_merge!(unsquish(translation.phrase.key, translation.value)) else locale[translation.phrase.key] = translation.value end end } end def to_param name.parameterize end def primary? name == self.class.primary_locale_name end def language_name Tolk.config.mapping[self.name] || self.name end def get(key) if phrase = Tolk::Phrase.find_by_key(key) t = self.translations.where(:phrase_id => phrase.id).first t.text if t end end def translations_with_html translations = self.translations.all(:conditions => "tolk_translations.text LIKE '%>%' AND tolk_translations.text LIKE '%<%' AND tolk_phrases.key NOT LIKE '%_html'", :joins => :phrase) ActiveRecord::Associations::Preloader.new translations, :phrase translations end def self.rename(old_name, new_name) if old_name.blank? || new_name.blank? "You need to provide both names, aborting." else if locale = find_by_name(old_name) locale.name = new_name locale.save "Locale ' #{old_name}' was renamed '#{new_name}'" else "Locale with name '#{old_name}' not found." end end end private def remove_invalid_translations_from_target self.translations.target.dup.each do |t| unless t.valid? self.translations.target.delete(t) else t.updated_at_will_change! end end true end def find_phrases_with_translations(page, conditions = {}) result = Tolk::Phrase.paginate(:page => page, :conditions => { :'tolk_translations.locale_id' => self.id }.merge(conditions), :joins => :translations, :order => 'tolk_phrases.key ASC') result.each do |phrase| phrase.translation = phrase.translations.for(self) end ActiveRecord::Associations::Preloader.new result, :translations result end def unsquish(string, value) if string.is_a?(String) unsquish(string.split("."), value) elsif string.size == 1 { string.first => value } else key = string[0] rest = string[1..-1] { key => unsquish(rest, value) } end end end end Revert "fixes encoding exceptions for non-well encoded strings at dump" This reverts commit 6268ef521b82ee9340923d2faa8d6b0a5bbd77eb. require 'tolk/config' module Tolk class Locale < ActiveRecord::Base self.table_name = "tolk_locales" def self._dump_path # Necessary to acces rails.root at runtime ! @dump_path ||= Tolk.config.dump_path.is_a?(Proc) ? instance_eval(&Tolk.config.dump_path) : Tolk.config.dump_path end has_many :phrases, :through => :translations, :class_name => 'Tolk::Phrase' has_many :translations, :class_name => 'Tolk::Translation', :dependent => :destroy accepts_nested_attributes_for :translations, :reject_if => proc { |attributes| attributes['text'].blank? } before_validation :remove_invalid_translations_from_target, :on => :update attr_accessible :name cattr_accessor :locales_config_path self.locales_config_path = self._dump_path cattr_accessor :primary_locale_name self.primary_locale_name = I18n.default_locale.to_s include Tolk::Sync include Tolk::Import validates_uniqueness_of :name validates_presence_of :name cattr_accessor :special_prefixes self.special_prefixes = ['activerecord.attributes'] cattr_accessor :special_keys self.special_keys = ['activerecord.models'] class << self def primary_locale(reload = false) @_primary_locale = nil if reload @_primary_locale ||= begin raise "Primary locale is not set. Please set Locale.primary_locale_name in your application's config file" unless self.primary_locale_name find_or_create_by_name(self.primary_locale_name) end end def primary_language_name primary_locale.language_name end def secondary_locales all - [primary_locale] end def dump_all(to = self.locales_config_path) secondary_locales.each do |locale| File.open("#{to}/#{locale.name}.yml", "w+") do |file| data = locale.to_hash data.respond_to?(:ya2yaml) ? file.write(data.ya2yaml(:syck_compatible => true)) : YAML.dump(locale.to_hash, file) end end end def special_key_or_prefix?(prefix, key) self.special_prefixes.include?(prefix) || self.special_keys.include?(key) end # http://cldr.unicode.org/index/cldr-spec/plural-rules - TODO: usage of 'none' isn't standard-conform PLURALIZATION_KEYS = ['none', 'zero', 'one', 'two', 'few', 'many', 'other'] def pluralization_data?(data) keys = data.keys.map(&:to_s) keys.all? {|k| PLURALIZATION_KEYS.include?(k) } end end def has_updated_translations? translations.count(:conditions => {:'tolk_translations.primary_updated' => true}) > 0 end def phrases_with_translation(page = nil) find_phrases_with_translations(page, :'tolk_translations.primary_updated' => false) end def phrases_with_updated_translation(page = nil) find_phrases_with_translations(page, :'tolk_translations.primary_updated' => true) end def count_phrases_without_translation existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq Tolk::Phrase.count - existing_ids.count end def phrases_without_translation(page = nil, options = {}) phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq phrases = phrases.scoped(:conditions => ['tolk_phrases.id NOT IN (?)', existing_ids]) if existing_ids.present? result = phrases.paginate({:page => page, :per_page => Phrase.per_page}.merge(options)) ActiveRecord::Associations::Preloader.new result, :translations result end def search_phrases(query, scope, key_query, page = nil, options = {}) return [] unless query.present? || key_query.present? translations = case scope when :origin Tolk::Locale.primary_locale.translations.containing_text(query) else # :target self.translations.containing_text(query) end phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') phrases = phrases.containing_text(key_query) phrases = phrases.scoped(:conditions => ['tolk_phrases.id IN(?)', translations.map(&:phrase_id).uniq]) phrases.paginate({:page => page}.merge(options)) end def search_phrases_without_translation(query, page = nil, options = {}) return phrases_without_translation(page, options) unless query.present? phrases = Tolk::Phrase.scoped(:order => 'tolk_phrases.key ASC') found_translations_ids = Tolk::Locale.primary_locale.translations.all(:conditions => ["tolk_translations.text LIKE ?", "%#{query}%"], :select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq existing_ids = self.translations.all(:select => 'tolk_translations.phrase_id').map(&:phrase_id).uniq phrases = phrases.scoped(:conditions => ['tolk_phrases.id NOT IN (?) AND tolk_phrases.id IN(?)', existing_ids, found_translations_ids]) if existing_ids.present? result = phrases.paginate({:page => page}.merge(options)) ActiveRecord::Associations::Preloader.new result, :translations result end def to_hash { name => translations.each_with_object({}) do |translation, locale| if translation.phrase.key.include?(".") locale.deep_merge!(unsquish(translation.phrase.key, translation.value)) else locale[translation.phrase.key] = translation.value end end } end def to_param name.parameterize end def primary? name == self.class.primary_locale_name end def language_name Tolk.config.mapping[self.name] || self.name end def get(key) if phrase = Tolk::Phrase.find_by_key(key) t = self.translations.where(:phrase_id => phrase.id).first t.text if t end end def translations_with_html translations = self.translations.all(:conditions => "tolk_translations.text LIKE '%>%' AND tolk_translations.text LIKE '%<%' AND tolk_phrases.key NOT LIKE '%_html'", :joins => :phrase) ActiveRecord::Associations::Preloader.new translations, :phrase translations end def self.rename(old_name, new_name) if old_name.blank? || new_name.blank? "You need to provide both names, aborting." else if locale = find_by_name(old_name) locale.name = new_name locale.save "Locale ' #{old_name}' was renamed '#{new_name}'" else "Locale with name '#{old_name}' not found." end end end private def remove_invalid_translations_from_target self.translations.target.dup.each do |t| unless t.valid? self.translations.target.delete(t) else t.updated_at_will_change! end end true end def find_phrases_with_translations(page, conditions = {}) result = Tolk::Phrase.paginate(:page => page, :conditions => { :'tolk_translations.locale_id' => self.id }.merge(conditions), :joins => :translations, :order => 'tolk_phrases.key ASC') result.each do |phrase| phrase.translation = phrase.translations.for(self) end ActiveRecord::Associations::Preloader.new result, :translations result end def unsquish(string, value) if string.is_a?(String) unsquish(string.split("."), value) elsif string.size == 1 { string.first => value } else key = string[0] rest = string[1..-1] { key => unsquish(rest, value) } end end end end
# == Schema Information # # Table name: transactions # # id :integer not null, primary key # account_id :integer not null # direction :integer default(0), not null # amount_cents :integer default(0), not null # amount_currency :string default("USD"), not null # calculated_amount_cents :integer default(0), not null # calculated_amount_currency :string default("USD"), not null # rate :float default(1.0), not null # kind :integer default(0), not null # category_id :integer # memo :string default(""), not null # created_at :datetime not null # updated_at :datetime not null # class Transaction < ActiveRecord::Base monetize :amount_cents, with_model_currency: :amount_cents_currency monetize :calculated_amount_cents, with_model_currency: :calculated_amount_currency validates :amount_cents, numericality: { greater_than: 0 } validates :amount_currency, :calculated_amount_currency, inclusion: { in: Const::CURRENCY_CODES } validates :account_id, :direction, :kind, presence: true enum direction: Const::TRANSACTION_DIRECTIONS enum kind: Const::TRANSACTION_KINDS belongs_to :category belongs_to :account scope :with_account, -> { includes(:account) } scope :history, -> { with_account.order('updated_at desc, created_at asc') } scope :recent_history, -> { history.limit(Const::RECENT_HISTORY_LENGTH) } scope :outflows, -> { where(direction: Const::OUTFLOW) } scope :inflows, -> { where(direction: Const::INFLOW) } scope :expenses, -> { where(kind: Const::EXPENSE) } before_save :calculate_amount after_save :update_budget after_destroy :update_budget delegate :currency, to: :account, prefix: :account private def calculate_amount refresh_rate if new_record? || amount_currency_changed? Log.debug 'refresh_calculated_amount' self.calculated_amount_cents = Integer(amount_cents * rate) end def refresh_rate Log.debug 'refresh_rate' self.calculated_amount_currency = account_currency self.rate = (amount_currency == account_currency) ? 1 : load_rate end def refresh_rate_if_currency_changed refresh_rate if amount_currency_changed? end def load_rate Log.info "loading rate #{amount_currency}-#{account_currency}" Money.default_bank.get_rate(amount_currency, calculated_amount_currency) end def update_budget return unless expense? Budget.refresh!(created_at.year, created_at.month, category_id) end end Update budget in case transaction changed from ourflow to inflow # == Schema Information # # Table name: transactions # # id :integer not null, primary key # account_id :integer not null # direction :integer default(0), not null # amount_cents :integer default(0), not null # amount_currency :string default("USD"), not null # calculated_amount_cents :integer default(0), not null # calculated_amount_currency :string default("USD"), not null # rate :float default(1.0), not null # kind :integer default(0), not null # category_id :integer # memo :string default(""), not null # created_at :datetime not null # updated_at :datetime not null # class Transaction < ActiveRecord::Base monetize :amount_cents, with_model_currency: :amount_cents_currency monetize :calculated_amount_cents, with_model_currency: :calculated_amount_currency validates :amount_cents, numericality: { greater_than: 0 } validates :amount_currency, :calculated_amount_currency, inclusion: { in: Const::CURRENCY_CODES } validates :account_id, :direction, :kind, presence: true enum direction: Const::TRANSACTION_DIRECTIONS enum kind: Const::TRANSACTION_KINDS belongs_to :category belongs_to :account scope :with_account, -> { includes(:account) } scope :history, -> { with_account.order('updated_at desc, created_at asc') } scope :recent_history, -> { history.limit(Const::RECENT_HISTORY_LENGTH) } scope :outflows, -> { where(direction: Const::OUTFLOW) } scope :inflows, -> { where(direction: Const::INFLOW) } scope :expenses, -> { where(kind: Const::EXPENSE) } before_save :calculate_amount after_save :update_budget after_destroy :update_budget delegate :currency, to: :account, prefix: :account private def calculate_amount refresh_rate if new_record? || amount_currency_changed? Log.debug 'refresh_calculated_amount' self.calculated_amount_cents = Integer(amount_cents * rate) end def refresh_rate Log.debug 'refresh_rate' self.calculated_amount_currency = account_currency self.rate = (amount_currency == account_currency) ? 1 : load_rate end def refresh_rate_if_currency_changed refresh_rate if amount_currency_changed? end def load_rate Log.info "loading rate #{amount_currency}-#{account_currency}" Money.default_bank.get_rate(amount_currency, calculated_amount_currency) end def update_budget return unless category_id.present? Budget.refresh!(created_at.year, created_at.month, category_id) end end
class UpdatesWip attr_accessor :wip, :params, :user, :priority_above_id, :priority_below_id def self.update(wip, params, user) new(wip, params, user).update end def initialize(wip, params, user) self.wip = wip self.priority_above_id = params[:priority_above_id] self.priority_above_id = params[:priority_below_id] self.params = params.except(:priority_above_id, :priority_below_id) self.user = user end def update ActiveRecord::Base.transaction do update_title update_tags update_priority wip.update(params) end wip end def update_title return if title.blank? || title == wip.title wip.update_title!(current_user, title) end def update_tags return if tag_list.blank? tag_list.each do |t| MakeMarks.new.mark_with_name(wip, t) end wip.update_tag_names!(current_user, tag_list) end def update_priority if priority_above update_with_priority_above elsif priority_below update_with_priority_below end end def update_with_priority_above old_priority = wip.priority if old_priority > priority_above new_priority = priority_above update_affected_tasks('+', Range.new(new_priority, old_priority - 1)) else new_priority = priority_above - 1 update_affected_tasks('-', Range.new(old_priority + 1, new_priority)) end wip.priority = new_priority end def update_with_priority_below old_priority = wip.priority if old_priority > priority_below new_priority = priority_below + 1 update_affected_tasks('+', Range.new(new_priority, old_priority - 1)) else new_priority = priority_below update_affected_tasks('-', Range.new(old_priority + 1, new_priority)) end wip.priority = new_priority end def update_affected_tasks(operation, priority_range) affected_tasks(priority_range). update_all("priority = priority #{operation} 1") end def affected_tasks(priority_range) product.tasks.where(priority: priority_range) end def title params[:title] end def tag_list params[:tag_list] end def priority_above return unless priority_above_id product.tasks.find_by(id: priority_above_id).priority end def priority_below return unless priority_below_id product.tasks.find_by(id: priority_below_id).priority end def product wip.product end end Don't clobber above id class UpdatesWip attr_accessor :wip, :params, :user, :priority_above_id, :priority_below_id def self.update(wip, params, user) new(wip, params, user).update end def initialize(wip, params, user) self.wip = wip self.priority_above_id = params[:priority_above_id] self.priority_below_id = params[:priority_below_id] self.params = params.except(:priority_above_id, :priority_below_id) self.user = user end def update ActiveRecord::Base.transaction do update_title update_tags update_priority wip.update(params) end wip end def update_title return if title.blank? || title == wip.title wip.update_title!(current_user, title) end def update_tags return if tag_list.blank? tag_list.each do |t| MakeMarks.new.mark_with_name(wip, t) end wip.update_tag_names!(current_user, tag_list) end def update_priority if priority_above update_with_priority_above elsif priority_below update_with_priority_below end end def update_with_priority_above old_priority = wip.priority if old_priority > priority_above new_priority = priority_above update_affected_tasks('+', Range.new(new_priority, old_priority - 1)) else new_priority = priority_above - 1 update_affected_tasks('-', Range.new(old_priority + 1, new_priority)) end wip.priority = new_priority end def update_with_priority_below old_priority = wip.priority if old_priority > priority_below new_priority = priority_below + 1 update_affected_tasks('+', Range.new(new_priority, old_priority - 1)) else new_priority = priority_below update_affected_tasks('-', Range.new(old_priority + 1, new_priority)) end wip.priority = new_priority end def update_affected_tasks(operation, priority_range) affected_tasks(priority_range). update_all("priority = priority #{operation} 1") end def affected_tasks(priority_range) product.tasks.where(priority: priority_range) end def title params[:title] end def tag_list params[:tag_list] end def priority_above return unless priority_above_id product.tasks.find_by(id: priority_above_id).priority end def priority_below return unless priority_below_id product.tasks.find_by(id: priority_below_id).priority end def product wip.product end end
class UserMailer < ActionMailer::Base def trigger_email(trigger, email, inout, location) recipients email from "IceCondor Trigger <triggers@icecondor.com>" subject "#{inout} #{trigger.fence.name}" content_type "multipart/alternative" part "text/html" do |p| p.body = render_message("trigger_email", :trigger => trigger, :inout => inout, :location => location) end part "text/plain" do |p| p.body = render_message("trigger_email", :trigger => trigger, :inout => inout, :location => location) end attachment :content_type => "image/jpeg", :body => HTTParty.get("http://maps.google.com/maps/api/staticmap?center=#{location.latitude},#{location.longitude}&zoom=14&size=256x256&sensor=false&markers=color:green|label:X|#{location.latitude},#{location.longitude}"), :content_location => "map.jpg" end end manually encode this. fix. class UserMailer < ActionMailer::Base def trigger_email(trigger, email, inout, location) recipients email from "IceCondor Trigger <triggers@icecondor.com>" subject "#{inout} #{trigger.fence.name}" content_type "multipart/alternative" part "text/html" do |p| p.body = render_message("trigger_email", :trigger => trigger, :inout => inout, :location => location) end part "text/plain" do |p| p.body = render_message("trigger_email", :trigger => trigger, :inout => inout, :location => location) end attachment :content_type => "image/jpeg", :body => HTTParty.get("http://maps.google.com/maps/api/staticmap?center=#{location.latitude},#{location.longitude}&zoom=14&size=256x256&sensor=false&markers=color:green%7Clabel:X%7C#{location.latitude},#{location.longitude}"), :content_location => "map.jpg" end end
class VideoAsset < FileAsset has_many :transcode_jobs, class_name: 'TranscodeRemote', dependent: :destroy include AASM aasm do state :draft, initial: true state :submitted state :failed state :completed event :submit do transitions from: [:draft, :submitted, :failed], to: :submitted, guard: Proc.new { tj = transcode_jobs.build rv = tj.queue_new_job tj.save rv # the return value of the guard determines if we drop down to the next transition } transitions from: [:draft, :submitted, :failed, :completed], to: :failed end event :fail do transitions from: [:draft, :submitted, :initial, :failed], to: :failed end event :complete do transitions from: [:draft, :submitted, :initial, :failed], to: :completed end end has_attached_file :thumbnail, storage: :s3, s3_permissions: 'private', styles: { thumb: '300x200', format: 'jpg' } has_attached_file :asset, storage: :s3, s3_permissions: 'private', s3_credentials: Proc.new { |a| a.instance.s3_credentials }, styles: { mp4: { format: 'mp4' }, ogg: { format: 'ogg' }, thumb: { geometry: "300x200", format: 'jpg' }, }# normally, you would add processors: [:transcoder] to transcode locally # instead, we are using a service to do the transcoding for us. validates_attachment_content_type :asset, :content_type => /\Avideo\/(ogg|webm|mp4|quicktime)\Z/ MAX_FILE_SIZE = 1000.megabytes validates_attachment_size :asset, :in => 0.megabytes .. MAX_FILE_SIZE scope :published, -> { completed } scope :unpublished, -> { va = VideoAsset.arel_table where( va[:aasm_state].eq('draft').or(va[:aasm_state].eq('submitted')).or(va[:aasm_state].eq('failed')) ) } has_one :secret, class_name: 'VideoAccessSecret', dependent: :destroy after_initialize :init_values def init_values self.create_secret if !secret end before_post_process :do_local_processing? def do_local_processing? false # never do local processing in this app. Use the remote transcoding service (See TranscodeRemote) end def check_status! tj = transcode_jobs.build rv = tj.query_job_status tj.save! rv end def friendly_state submitted? ? "processing" : aasm_state end def s3_credentials unless %w[S3_BUCKET S3_ACCESS_KEY S3_SECRET].all? { |key| ENV[key].present? } raise "Missing S3 Environment variables" end { bucket: ENV['S3_BUCKET'], access_key_id: ENV['S3_ACCESS_KEY'], secret_access_key: ENV['S3_SECRET'] } end end add one more valid file type class VideoAsset < FileAsset has_many :transcode_jobs, class_name: 'TranscodeRemote', dependent: :destroy include AASM aasm do state :draft, initial: true state :submitted state :failed state :completed event :submit do transitions from: [:draft, :submitted, :failed], to: :submitted, guard: Proc.new { tj = transcode_jobs.build rv = tj.queue_new_job tj.save rv # the return value of the guard determines if we drop down to the next transition } transitions from: [:draft, :submitted, :failed, :completed], to: :failed end event :fail do transitions from: [:draft, :submitted, :initial, :failed], to: :failed end event :complete do transitions from: [:draft, :submitted, :initial, :failed], to: :completed end end has_attached_file :thumbnail, storage: :s3, s3_permissions: 'private', styles: { thumb: '300x200', format: 'jpg' } has_attached_file :asset, storage: :s3, s3_permissions: 'private', s3_credentials: Proc.new { |a| a.instance.s3_credentials }, styles: { mp4: { format: 'mp4' }, ogg: { format: 'ogg' }, thumb: { geometry: "300x200", format: 'jpg' }, }# normally, you would add processors: [:transcoder] to transcode locally # instead, we are using a service to do the transcoding for us. validates_attachment_content_type :asset, :content_type => /\Avideo\/(ogg|webm|mp4|m4v|quicktime)\Z/ MAX_FILE_SIZE = 1000.megabytes validates_attachment_size :asset, :in => 0.megabytes .. MAX_FILE_SIZE scope :published, -> { completed } scope :unpublished, -> { va = VideoAsset.arel_table where( va[:aasm_state].eq('draft').or(va[:aasm_state].eq('submitted')).or(va[:aasm_state].eq('failed')) ) } has_one :secret, class_name: 'VideoAccessSecret', dependent: :destroy after_initialize :init_values def init_values self.create_secret if !secret end before_post_process :do_local_processing? def do_local_processing? false # never do local processing in this app. Use the remote transcoding service (See TranscodeRemote) end def check_status! tj = transcode_jobs.build rv = tj.query_job_status tj.save! rv end def friendly_state submitted? ? "processing" : aasm_state end def s3_credentials unless %w[S3_BUCKET S3_ACCESS_KEY S3_SECRET].all? { |key| ENV[key].present? } raise "Missing S3 Environment variables" end { bucket: ENV['S3_BUCKET'], access_key_id: ENV['S3_ACCESS_KEY'], secret_access_key: ENV['S3_SECRET'] } end end
require 'openssl' module VCenterDriver class VIClient attr_accessor :vim attr_accessor :rp def initialize(opts) opts = {:insecure => true}.merge(opts) @vim = RbVmomi::VIM.connect(opts) rp_ref = opts.delete(:rp) @rp = RbVmomi::VIM::ResourcePool(@vim, rp_ref) if rp_ref end def rp_confined? !!@rp end def close_connection @vim.close end # @return RbVmomi::VIM::<type> objects def self.get_entities(folder, type, entities=[]) if folder == [] return nil end folder.childEntity.each do |child| the_name, junk = child.to_s.split('(') case the_name when "Folder" get_entities(child, type, entities) when type entities.push(child) end end return entities end def self.new_from_host(host_id) client = OpenNebula::Client.new host = OpenNebula::Host.new_with_id(host_id, client) rc = host.info if OpenNebula.is_error?(rc) puts rc.message exit -1 end password = host["TEMPLATE/VCENTER_PASSWORD"] system = OpenNebula::System.new(client) config = system.get_configuration if OpenNebula.is_error?(config) puts "Error getting oned configuration : #{config.message}" exit -1 end token = config["ONE_KEY"] password = VIClient::decrypt(password, token) connection = { :host => host["TEMPLATE/VCENTER_HOST"], :user => host["TEMPLATE/VCENTER_USER"], :rp => host["TEMPLATE/VCENTER_RP_REF"], :password => password } self.new(connection) end def self.decrypt(msg, token) begin cipher = OpenSSL::Cipher.new("aes-256-cbc") cipher.decrypt # Truncate for Ruby 2.4 (in previous versions this was being # automatically truncated) cipher.key = token[0..31] msg = cipher.update(Base64::decode64(msg)) msg << cipher.final rescue puts "Error decrypting secret." exit -1 end end def self.in_silence begin orig_stderr = $stderr.clone orig_stdout = $stdout.clone $stderr.reopen File.new('/dev/null', 'w') $stdout.reopen File.new('/dev/null', 'w') retval = yield rescue Exception => e $stdout.reopen orig_stdout $stderr.reopen orig_stderr raise e ensure $stdout.reopen orig_stdout $stderr.reopen orig_stderr end retval end def self.in_stderr_silence begin orig_stderr = $stderr.clone $stderr.reopen File.new('/dev/null', 'w') retval = yield rescue Exception => e $stderr.reopen orig_stderr raise e ensure $stderr.reopen orig_stderr end retval end end end # module VCenterDriver F #4913: Add vi_client resource pool confinement using host's VCENTER_RESOURCE_POOL require 'openssl' module VCenterDriver class VIClient attr_accessor :vim attr_accessor :rp def initialize(opts) opts = {:insecure => true}.merge(opts) @vim = RbVmomi::VIM.connect(opts) # Get ccr ccr_ref = opts.delete(:ccr) ccr = RbVmomi::VIM::ClusterComputeResource.new(@vim, ccr_ref) #Get ref for rp if ccr rp = opts.delete(:rp) rp_list = get_resource_pool_list(ccr) rp_ref = rp_list.select { |r| r[:name] == rp }.first._ref rescue nil @rp = RbVmomi::VIM::ResourcePool(@vim, rp_ref) if rp_ref end end def rp_confined? !!@rp end def get_resource_pool_list(ccr, rp = nil, parent_prefix = "", rp_array = []) current_rp = "" if rp.nil? rp = ccr.resourcePool else if !parent_prefix.empty? current_rp << parent_prefix current_rp << "/" end current_rp << rp.name end if rp.resourcePool.size == 0 rp_info = {} rp_info[:name] = current_rp rp_info[:ref] = rp._ref rp_array << rp_info else rp.resourcePool.each do |child_rp| get_resource_pool_list(ccr, child_rp, current_rp, rp_array) end rp_info = {} rp_info[:name] = current_rp rp_info[:ref] = rp._ref rp_array << rp_info if !current_rp.empty? end rp_array end def close_connection @vim.close end # @return RbVmomi::VIM::<type> objects def self.get_entities(folder, type, entities=[]) if folder == [] return nil end folder.childEntity.each do |child| the_name, junk = child.to_s.split('(') case the_name when "Folder" get_entities(child, type, entities) when type entities.push(child) end end return entities end def self.new_from_host(host_id) client = OpenNebula::Client.new host = OpenNebula::Host.new_with_id(host_id, client) rc = host.info if OpenNebula.is_error?(rc) puts rc.message exit -1 end password = host["TEMPLATE/VCENTER_PASSWORD"] system = OpenNebula::System.new(client) config = system.get_configuration if OpenNebula.is_error?(config) puts "Error getting oned configuration : #{config.message}" exit -1 end token = config["ONE_KEY"] password = VIClient::decrypt(password, token) connection = { :host => host["TEMPLATE/VCENTER_HOST"], :user => host["TEMPLATE/VCENTER_USER"], :rp => host["TEMPLATE/VCENTER_RESOURCE_POOL"], :ccr => host["TEMPLATE/VCENTER_CCR_REF"], :password => password } self.new(connection) end def self.decrypt(msg, token) begin cipher = OpenSSL::Cipher.new("aes-256-cbc") cipher.decrypt # Truncate for Ruby 2.4 (in previous versions this was being # automatically truncated) cipher.key = token[0..31] msg = cipher.update(Base64::decode64(msg)) msg << cipher.final rescue puts "Error decrypting secret." exit -1 end end def self.in_silence begin orig_stderr = $stderr.clone orig_stdout = $stdout.clone $stderr.reopen File.new('/dev/null', 'w') $stdout.reopen File.new('/dev/null', 'w') retval = yield rescue Exception => e $stdout.reopen orig_stdout $stderr.reopen orig_stderr raise e ensure $stdout.reopen orig_stdout $stderr.reopen orig_stderr end retval end def self.in_stderr_silence begin orig_stderr = $stderr.clone $stderr.reopen File.new('/dev/null', 'w') retval = yield rescue Exception => e $stderr.reopen orig_stderr raise e ensure $stderr.reopen orig_stderr end retval end end end # module VCenterDriver
core db. defined basic API.
require 'spec_helper' require 'rest-client' describe OpenAssets::Protocol::TransactionOutput do it "initialize" do target = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string("abcd"), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 9223372036854775807, OpenAssets::Protocol::OutputType::MARKER_OUTPUT) expect(target.output_type).to eq(OpenAssets::Protocol::OutputType::MARKER_OUTPUT) expect(target.asset_quantity).to eq(9223372036854775807) expect(target.asset_id).to eq("ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC") expect(target.script.to_string).to eq("abcd") expect(target.value).to eq(100) end it "invalid output type." do expect{OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string(""), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 100, 10)}.to raise_error(ArgumentError) end it "invalid asset quantity" do expect{OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string(""), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 9223372036854775808, 1)}.to raise_error(ArgumentError) end it 'metadata parse' do output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE) expect(output.asset_definition_url).to eq('') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'hoge') expect(output.asset_definition_url).to eq('Invalid metadata format.') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('The asset definition is invalid. http://goo.gl/fS4mEj') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('http://goo.gl/fS4mEj') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('http://goo.gl/fS4mEj') # for min asset definition spec output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AGHhobo7pVQN5fZWqv3rhdc324ryT7qVTB', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=https://goo.gl/Q0NZfe') expect(output.divisibility).to eq(0) expect(output.asset_amount).to eq(200) end it 'asset definition cache' do expect(RestClient::Request).to receive(:execute).twice OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjhoge') OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjk') OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjhoge') end it 'to hash', :network => :testnet do issue_spec = Bitcoin::Protocol::Tx.new('010000000154f5a67cb14d7e50056f53263b72165daaf438164e7e825b862b9062a4e40612000000006b48304502210098e16e338e9600876e30d9dc0894bcd1bbb612431e7a36732c5feab0686d0641022044e7dcd512073f31d0c67e0fbbf2269c4a31d5bf3bb1fcc8fbdd2e4d3c0d7e58012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffff0358020000000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac0000000000000000216a1f4f410100018f4e17753d68747470733a2f2f676f6f2e676c2f755667737434b8770700000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac00000000'.htb) transfer_spec = Bitcoin::Protocol::Tx.new('0100000002dd6cee22d848a609df2d316112ca26b569c97c189400ad6f01046d65aa7b5f52000000006a473044022021806c9f0d888862cb6e8eb3952c48499fe4c0bedc4fb3ef20743c418109a23b02206249fceeeb4c2f496a3a48b57087f97e540af465f8b9328919f6f536ba5346ed012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffffdd6cee22d848a609df2d316112ca26b569c97c189400ad6f01046d65aa7b5f52020000006b483045022100981c9757ddf1280a47e9274fae9ff331a1a5b750c7f0c2a18de0b18413a3121e0220395d8baeb7802f9f3947152098442144946987d6be4065a0febe20bc20ca55df012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffff0400000000000000000b6a094f4101000263ac4d0058020000000000001976a914e9ac589641f17a2286631c24d6d2d00b8c959eb588ac58020000000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac504e0700000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac00000000'.htb) metadata = OpenAssets::Protocol::MarkerOutput.deserialize_payload( OpenAssets::Protocol::MarkerOutput.parse_script(issue_spec.outputs[1].parsed_script.to_payload)).metadata output = OpenAssets::Protocol::TransactionOutput.new( 600, transfer_spec.outputs[2].parsed_script, 'oK31ByjFuNhfnFuRMmZgchsdiprYmRzuz5', 9900, OpenAssets::Protocol::OutputType::TRANSFER, metadata) output.account = 'hoge' expect(output.to_hash).to match( 'address' => 'mkgW6hNYBctmqDtTTsTJrsf2Gh2NPtoCU4', 'oa_address' => 'bWvePLsBsf6nThU3pWVZVWjZbcJCYQxHCpE', 'script' => '76a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac', 'amount' => '0.00000600', 'asset_id' => 'oK31ByjFuNhfnFuRMmZgchsdiprYmRzuz5', 'asset_quantity' => '9900', 'asset_amount' => '990.0', 'account' => 'hoge', 'asset_definition_url' => 'https://goo.gl/uVgst4', 'proof_of_authenticity' => false, 'output_type' => 'transfer') end describe 'to hash with multisig', :network => :testnet do subject { tx = Bitcoin::Protocol::Tx.new('0100000001aac9ccdb7eb7a47a35be4e1814c675658fd6de41d1747c8000e6ce09b9faa221000000006b483045022100a94d970535e58ca5e8df01e671806fb1d7bb2157492e8ec1b5dc7bc70c4cfb65022055a94e8679d0ad75b09c948efe8b8132d2cc5cc9a38a588cd1d8cd84e03c1cb50121030712171ff2a109f94ec422b9830c456a3c1f97eec253a0f09f024b50a895e3d8ffffffff02b0c0d617000000001976a9146409eb200880acae69f3458591c3a7f36c4c770288ac80f0fa020000000047522103cdd34ec0a05d91c026fe8cb74434923075d3acc20f3f673fb855c8f2c04ca5222103b99b5e2a06b41612a6235c0a536fabfd293d4fce5fe6a4ba3461ed6f07d5372052ae00000000'.htb) output = tx.outputs[1] OpenAssets::Protocol::TransactionOutput.new(output.value, output.parsed_script) } it do expect(subject.address.length).to eq(2) expect(subject.to_hash['address']).to eq(['mx8JNZiqmTEG7KcrL1PtWuAzU6adagE8V6', 'mzURevsZS7FZnBBuBVyCGrG2oRtWS9ArxV']) end end describe 'non standard output', :network => :testnet do subject { tx = Bitcoin::Protocol::Tx.new('01000000018177482b65ec42fc43c6b2ad13955d7fdec00edb5dc5ac483d9e31eb06a5a5d5010000006c493046022100955062369843b52db91eb9c1b8fb5ed20b346a62841edfb2ba2097d2a9bc31810221009ace1c91398620b4d1bfa559ca2abcaf6c1a524e606bb5fedf74c9a123ae4ec8012103046d258651af2fbb6acb63414a604314ce94d644a0efd8832ca5275f2bc207c6ffffffff05404b4c0000000000475221033423007d8f263819a2e42becaaf5b06f34cb09919e06304349d950668209eaed21021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae404b4c00000000002752010021021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae404b4c00000000004752210279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f8179821021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452aeb0f0c304000000001976a9146cce12229300b733cdf0c7ce3079c7503b080fca88ac404b4c000000000047522102c6047f9441ed7d6d3045406e95c07cd85c778e4b8cef3ca7abac09b95c709ee521021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae00000000'.htb) output = tx.outputs[1] OpenAssets::Protocol::TransactionOutput.new(output.value, output.parsed_script) } it do expect(subject.address).to be nil expect(subject.oa_address).to be nil end end end Refactoring spec constitution require 'spec_helper' require 'rest-client' describe OpenAssets::Protocol::TransactionOutput do describe 'initialize' do context 'standard output' do it do target = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string("abcd"), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 9223372036854775807, OpenAssets::Protocol::OutputType::MARKER_OUTPUT) expect(target.output_type).to eq(OpenAssets::Protocol::OutputType::MARKER_OUTPUT) expect(target.asset_quantity).to eq(9223372036854775807) expect(target.asset_id).to eq("ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC") expect(target.script.to_string).to eq("abcd") expect(target.value).to eq(100) end end context 'invalid output type.' do it do expect{OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string(""), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 100, 10)}.to raise_error(ArgumentError) end end context 'invalid asset quantity' do it do expect{OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.from_string(""), "ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC", 9223372036854775808, 1)}.to raise_error(ArgumentError) end end context 'metadata parse' do it do output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE) expect(output.asset_definition_url).to eq('') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'hoge') expect(output.asset_definition_url).to eq('Invalid metadata format.') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'ALn3aK1fSuG27N96UGYB1kUYUpGKRhBuBC', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('The asset definition is invalid. http://goo.gl/fS4mEj') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('http://goo.gl/fS4mEj') output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEj') expect(output.asset_definition_url).to eq('http://goo.gl/fS4mEj') # for min asset definition spec output = OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AGHhobo7pVQN5fZWqv3rhdc324ryT7qVTB', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=https://goo.gl/Q0NZfe') expect(output.divisibility).to eq(0) expect(output.asset_amount).to eq(200) end end end describe 'asset definition cache' do it do expect(RestClient::Request).to receive(:execute).twice OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjhoge') OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjk') OpenAssets::Protocol::TransactionOutput.new( 100, Bitcoin::Script.new('hoge'), 'AJk2Gx5V67S2wNuwTK5hef3TpHunfbjcmX', 200, OpenAssets::Protocol::OutputType::ISSUANCE, 'u=http://goo.gl/fS4mEjhoge') end end describe 'to hash', :network => :testnet do it do issue_spec = Bitcoin::Protocol::Tx.new('010000000154f5a67cb14d7e50056f53263b72165daaf438164e7e825b862b9062a4e40612000000006b48304502210098e16e338e9600876e30d9dc0894bcd1bbb612431e7a36732c5feab0686d0641022044e7dcd512073f31d0c67e0fbbf2269c4a31d5bf3bb1fcc8fbdd2e4d3c0d7e58012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffff0358020000000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac0000000000000000216a1f4f410100018f4e17753d68747470733a2f2f676f6f2e676c2f755667737434b8770700000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac00000000'.htb) transfer_spec = Bitcoin::Protocol::Tx.new('0100000002dd6cee22d848a609df2d316112ca26b569c97c189400ad6f01046d65aa7b5f52000000006a473044022021806c9f0d888862cb6e8eb3952c48499fe4c0bedc4fb3ef20743c418109a23b02206249fceeeb4c2f496a3a48b57087f97e540af465f8b9328919f6f536ba5346ed012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffffdd6cee22d848a609df2d316112ca26b569c97c189400ad6f01046d65aa7b5f52020000006b483045022100981c9757ddf1280a47e9274fae9ff331a1a5b750c7f0c2a18de0b18413a3121e0220395d8baeb7802f9f3947152098442144946987d6be4065a0febe20bc20ca55df012103e46fdcbf2062598a221c0e34d0505c270fb77c2c305c40ef0919f8efc0c7f959ffffffff0400000000000000000b6a094f4101000263ac4d0058020000000000001976a914e9ac589641f17a2286631c24d6d2d00b8c959eb588ac58020000000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac504e0700000000001976a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac00000000'.htb) metadata = OpenAssets::Protocol::MarkerOutput.deserialize_payload( OpenAssets::Protocol::MarkerOutput.parse_script(issue_spec.outputs[1].parsed_script.to_payload)).metadata output = OpenAssets::Protocol::TransactionOutput.new( 600, transfer_spec.outputs[2].parsed_script, 'oK31ByjFuNhfnFuRMmZgchsdiprYmRzuz5', 9900, OpenAssets::Protocol::OutputType::TRANSFER, metadata) output.account = 'hoge' expect(output.to_hash).to match( 'address' => 'mkgW6hNYBctmqDtTTsTJrsf2Gh2NPtoCU4', 'oa_address' => 'bWvePLsBsf6nThU3pWVZVWjZbcJCYQxHCpE', 'script' => '76a91438a6ebdf20cae2c9287ea014464042112ea3dbfd88ac', 'amount' => '0.00000600', 'asset_id' => 'oK31ByjFuNhfnFuRMmZgchsdiprYmRzuz5', 'asset_quantity' => '9900', 'asset_amount' => '990.0', 'account' => 'hoge', 'asset_definition_url' => 'https://goo.gl/uVgst4', 'proof_of_authenticity' => false, 'output_type' => 'transfer') end end describe 'to hash with multisig', :network => :testnet do subject { tx = Bitcoin::Protocol::Tx.new('0100000001aac9ccdb7eb7a47a35be4e1814c675658fd6de41d1747c8000e6ce09b9faa221000000006b483045022100a94d970535e58ca5e8df01e671806fb1d7bb2157492e8ec1b5dc7bc70c4cfb65022055a94e8679d0ad75b09c948efe8b8132d2cc5cc9a38a588cd1d8cd84e03c1cb50121030712171ff2a109f94ec422b9830c456a3c1f97eec253a0f09f024b50a895e3d8ffffffff02b0c0d617000000001976a9146409eb200880acae69f3458591c3a7f36c4c770288ac80f0fa020000000047522103cdd34ec0a05d91c026fe8cb74434923075d3acc20f3f673fb855c8f2c04ca5222103b99b5e2a06b41612a6235c0a536fabfd293d4fce5fe6a4ba3461ed6f07d5372052ae00000000'.htb) output = tx.outputs[1] OpenAssets::Protocol::TransactionOutput.new(output.value, output.parsed_script) } it do expect(subject.address.length).to eq(2) expect(subject.to_hash['address']).to eq(['mx8JNZiqmTEG7KcrL1PtWuAzU6adagE8V6', 'mzURevsZS7FZnBBuBVyCGrG2oRtWS9ArxV']) end end describe 'non standard output', :network => :testnet do subject { tx = Bitcoin::Protocol::Tx.new('01000000018177482b65ec42fc43c6b2ad13955d7fdec00edb5dc5ac483d9e31eb06a5a5d5010000006c493046022100955062369843b52db91eb9c1b8fb5ed20b346a62841edfb2ba2097d2a9bc31810221009ace1c91398620b4d1bfa559ca2abcaf6c1a524e606bb5fedf74c9a123ae4ec8012103046d258651af2fbb6acb63414a604314ce94d644a0efd8832ca5275f2bc207c6ffffffff05404b4c0000000000475221033423007d8f263819a2e42becaaf5b06f34cb09919e06304349d950668209eaed21021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae404b4c00000000002752010021021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae404b4c00000000004752210279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f8179821021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452aeb0f0c304000000001976a9146cce12229300b733cdf0c7ce3079c7503b080fca88ac404b4c000000000047522102c6047f9441ed7d6d3045406e95c07cd85c778e4b8cef3ca7abac09b95c709ee521021d69e2b68c3960903b702af7829fadcd80bd89b158150c85c4a75b2c8cb9c39452ae00000000'.htb) output = tx.outputs[1] OpenAssets::Protocol::TransactionOutput.new(output.value, output.parsed_script) } it do expect(subject.address).to be nil expect(subject.oa_address).to be nil end end end
require 'spec_helper' module Ransack module Adapters module ActiveRecord describe Context do subject { Context.new(Person) } describe '#relation_for' do it 'returns relation for given object' do subject.object.should be_an ::ActiveRecord::Relation end end describe '#evaluate' do it 'evaluates search obects' do search = Search.new(Person, :name_eq => 'Joe Blow') result = subject.evaluate(search) result.should be_an ::ActiveRecord::Relation result.to_sql.should match /"name" = 'Joe Blow'/ end it 'SELECTs DISTINCT when :distinct => true' do search = Search.new(Person, :name_eq => 'Joe Blow') result = subject.evaluate(search, :distinct => true) result.should be_an ::ActiveRecord::Relation result.to_sql.should match /SELECT DISTINCT/ end end it 'contextualizes strings to attributes' do attribute = subject.contextualize 'children_children_parent_name' attribute.should be_a Arel::Attributes::Attribute attribute.name.to_s.should eq 'name' attribute.relation.table_alias.should eq 'parents_people' end it 'builds new associations if not yet built' do attribute = subject.contextualize 'children_articles_title' attribute.should be_a Arel::Attributes::Attribute attribute.name.to_s.should eq 'title' attribute.relation.name.should eq 'articles' attribute.relation.table_alias.should be_nil end end end end end Fix typo require 'spec_helper' module Ransack module Adapters module ActiveRecord describe Context do subject { Context.new(Person) } describe '#relation_for' do it 'returns relation for given object' do subject.object.should be_an ::ActiveRecord::Relation end end describe '#evaluate' do it 'evaluates search objects' do search = Search.new(Person, :name_eq => 'Joe Blow') result = subject.evaluate(search) result.should be_an ::ActiveRecord::Relation result.to_sql.should match /"name" = 'Joe Blow'/ end it 'SELECTs DISTINCT when :distinct => true' do search = Search.new(Person, :name_eq => 'Joe Blow') result = subject.evaluate(search, :distinct => true) result.should be_an ::ActiveRecord::Relation result.to_sql.should match /SELECT DISTINCT/ end end it 'contextualizes strings to attributes' do attribute = subject.contextualize 'children_children_parent_name' attribute.should be_a Arel::Attributes::Attribute attribute.name.to_s.should eq 'name' attribute.relation.table_alias.should eq 'parents_people' end it 'builds new associations if not yet built' do attribute = subject.contextualize 'children_articles_title' attribute.should be_a Arel::Attributes::Attribute attribute.name.to_s.should eq 'title' attribute.relation.name.should eq 'articles' attribute.relation.table_alias.should be_nil end end end end end
Inline the ssl verification
require "json/stream" module Json module Streamer class JsonStreamer attr_reader :aggregator attr_reader :parser def initialize(file_io = nil, chunk_size = 1000) @parser = JSON::Stream::Parser.new @file_io = file_io @chunk_size = chunk_size @current_level = -1 @current_key = nil @aggregator = {} @aggregator_keys = {} @parser.start_object {start_object} @parser.start_array {start_array} @parser.key {|k| key(k)} end # Callbacks containing `yield` have to be defined in the method called via block otherwise yield won't work def get(nesting_level:-1, key:nil, yield_values:true) @yield_level = nesting_level @yield_key = key @parser.value do |v| value(v, yield_values, nesting_level) do |desired_object| yield desired_object end end @parser.end_object do end_level(Hash.new) do |desired_object| yield desired_object end end @parser.end_array do end_level(Array.new) do |desired_object| yield desired_object end end @file_io.each(@chunk_size) { |chunk| @parser << chunk } if @file_io end def start_object new_level(Hash.new) end def start_array new_level(Array.new) end def key(k) @current_key = k end def value(value, yield_values, yield_level) yield value if yield_value?(yield_values, yield_level) add_value(value) end def add_value(value) if array_level?(@current_level) @aggregator[@current_level] << value else @aggregator[@current_level][@current_key] = value end end def end_level(type) if yield_object?(@yield_level) yield @aggregator[@current_level].clone reset_current_level(type) else merge_up end @current_level -= 1 end def yield_object?(yield_level) @current_level.eql? yield_level or (not @yield_key.nil? and @yield_key == previous_key) end def yield_value?(yield_values, yield_level) yield_values and ((next_level).eql?(yield_level) or (not @yield_key.nil? and @yield_key == @current_key)) end def new_level(type) set_aggregator_key @current_level += 1 reset_current_level(type) end def reset_current_level(type) @aggregator[@current_level] = type end def set_aggregator_key reset_current_key if array_level?(@current_level) @aggregator_keys[@current_level] = @current_key end def reset_current_key @current_key = nil end def array_level?(nesting_level) @aggregator[nesting_level].is_a?(Array) end def merge_up return if @current_level.zero? if array_level?(previous_level) @aggregator[previous_level] << @aggregator[@current_level] else @aggregator[previous_level][@aggregator_keys[previous_level]] = @aggregator[@current_level] end @aggregator.delete(@current_level) end def previous_level @current_level - 1 end def next_level @current_level + 1 end def previous_key @aggregator_keys[@current_level - 1] end end end end Uses previous_level and previous_key in one more place require "json/stream" module Json module Streamer class JsonStreamer attr_reader :aggregator attr_reader :parser def initialize(file_io = nil, chunk_size = 1000) @parser = JSON::Stream::Parser.new @file_io = file_io @chunk_size = chunk_size @current_level = -1 @current_key = nil @aggregator = {} @aggregator_keys = {} @parser.start_object {start_object} @parser.start_array {start_array} @parser.key {|k| key(k)} end # Callbacks containing `yield` have to be defined in the method called via block otherwise yield won't work def get(nesting_level:-1, key:nil, yield_values:true) @yield_level = nesting_level @yield_key = key @parser.value do |v| value(v, yield_values, nesting_level) do |desired_object| yield desired_object end end @parser.end_object do end_level(Hash.new) do |desired_object| yield desired_object end end @parser.end_array do end_level(Array.new) do |desired_object| yield desired_object end end @file_io.each(@chunk_size) { |chunk| @parser << chunk } if @file_io end def start_object new_level(Hash.new) end def start_array new_level(Array.new) end def key(k) @current_key = k end def value(value, yield_values, yield_level) yield value if yield_value?(yield_values, yield_level) add_value(value) end def add_value(value) if array_level?(@current_level) @aggregator[@current_level] << value else @aggregator[@current_level][@current_key] = value end end def end_level(type) if yield_object?(@yield_level) yield @aggregator[@current_level].clone reset_current_level(type) else merge_up end @current_level -= 1 end def yield_object?(yield_level) @current_level.eql? yield_level or (not @yield_key.nil? and @yield_key == previous_key) end def yield_value?(yield_values, yield_level) yield_values and ((next_level).eql?(yield_level) or (not @yield_key.nil? and @yield_key == @current_key)) end def new_level(type) set_aggregator_key @current_level += 1 reset_current_level(type) end def reset_current_level(type) @aggregator[@current_level] = type end def set_aggregator_key reset_current_key if array_level?(@current_level) @aggregator_keys[@current_level] = @current_key end def reset_current_key @current_key = nil end def array_level?(nesting_level) @aggregator[nesting_level].is_a?(Array) end def merge_up return if @current_level.zero? if array_level?(previous_level) @aggregator[previous_level] << @aggregator[@current_level] else @aggregator[previous_level][previous_key] = @aggregator[@current_level] end @aggregator.delete(@current_level) end def previous_level @current_level - 1 end def next_level @current_level + 1 end def previous_key @aggregator_keys[previous_level] end end end end
require "spec_helper" describe Projects::UpdatePagesService do set(:project) { create(:project, :repository) } set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') } let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') } let(:extension) { 'zip' } let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") } let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") } let(:metadata) do filename = Rails.root + "spec/fixtures/pages.#{extension}.meta" fixture_file_upload(filename) if File.exist?(filename) end subject { described_class.new(project, build) } before do project.remove_pages end context 'legacy artifacts' do %w(tar.gz zip).each do |format| let(:extension) { format } context "for valid #{format}" do before do build.update_attributes(legacy_artifacts_file: file) build.update_attributes(legacy_artifacts_metadata: metadata) end describe 'pages artifacts' do context 'with expiry date' do before do build.artifacts_expire_in = "2 days" build.save! end it "doesn't delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(true) end end context 'without expiry date' do it "does delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(false) end end end it 'succeeds' do expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy # Check that all expected files are extracted %w[index.html zero .hidden/file].each do |filename| expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy end end it 'limits pages size' do stub_application_setting(max_pages_size: 1) expect(execute).not_to eq(:success) end it 'removes pages after destroy' do expect(PagesWorker).to receive(:perform_in) expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy project.destroy expect(project.pages_deployed?).to be_falsey end it 'fails if sha on branch is not latest' do build.update_attributes(ref: 'feature') expect(execute).not_to eq(:success) end it 'fails for empty file fails' do build.update_attributes(legacy_artifacts_file: empty_file) expect(execute).not_to eq(:success) end end end end context 'for new artifacts' do context "for a valid job" do before do create(:ci_job_artifact, file: file, job: build) create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build) build.reload end describe 'pages artifacts' do context 'with expiry date' do before do build.artifacts_expire_in = "2 days" build.save! end it "doesn't delete artifacts" do expect(execute).to eq(:success) expect(build.artifacts?).to eq(true) end end context 'without expiry date' do it "does delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(false) end end end it 'succeeds' do expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy # Check that all expected files are extracted %w[index.html zero .hidden/file].each do |filename| expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy end end it 'limits pages size' do stub_application_setting(max_pages_size: 1) expect(execute).not_to eq(:success) end it 'removes pages after destroy' do expect(PagesWorker).to receive(:perform_in) expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy project.destroy expect(project.pages_deployed?).to be_falsey end it 'fails if sha on branch is not latest' do build.update_attributes(ref: 'feature') expect(execute).not_to eq(:success) end it 'fails for empty file fails' do build.job_artifacts_archive.update_attributes(file: empty_file) expect(execute).not_to eq(:success) end context 'when timeout happens by DNS error' do before do allow_any_instance_of(Projects::UpdatePagesService) .to receive(:extract_zip_archive!).and_raise(SocketError) end it 'raises an error' do expect { execute }.to raise_error(SocketError) end end end end it 'fails to remove project pages when no pages is deployed' do expect(PagesWorker).not_to receive(:perform_in) expect(project.pages_deployed?).to be_falsey project.destroy end it 'fails if no artifacts' do expect(execute).not_to eq(:success) end it 'fails for invalid archive' do build.update_attributes(legacy_artifacts_file: invalid_file) expect(execute).not_to eq(:success) end describe 'maximum pages artifacts size' do let(:metadata) { spy('metadata') } before do file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip') metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') build.update_attributes(legacy_artifacts_file: file) build.update_attributes(legacy_artifacts_metadata: metafile) allow(build).to receive(:artifacts_metadata_entry) .and_return(metadata) end shared_examples 'pages size limit exceeded' do it 'limits the maximum size of gitlab pages' do subject.execute expect(deploy_status.description) .to match(/artifacts for pages are too large/) expect(deploy_status).to be_script_failure end end context 'when maximum pages size is set to zero' do before do stub_application_setting(max_pages_size: 0) end context 'when page size does not exceed internal maximum' do before do allow(metadata).to receive(:total_size).and_return(200.megabytes) end it 'updates pages correctly' do subject.execute expect(deploy_status.description).not_to be_present end end context 'when pages size does exceed internal maximum' do before do allow(metadata).to receive(:total_size).and_return(2.terabytes) end it_behaves_like 'pages size limit exceeded' end end context 'when pages size is greater than max size setting' do before do stub_application_setting(max_pages_size: 200) allow(metadata).to receive(:total_size).and_return(201.megabytes) end it_behaves_like 'pages size limit exceeded' end context 'when max size setting is greater than internal max size' do before do stub_application_setting(max_pages_size: 3.terabytes / 1.megabyte) allow(metadata).to receive(:total_size).and_return(2.terabytes) end it_behaves_like 'pages size limit exceeded' end end def deploy_status GenericCommitStatus.find_by(name: 'pages:deploy') end def execute subject.execute[:status] end end Fix staticanalysys require "spec_helper" describe Projects::UpdatePagesService do set(:project) { create(:project, :repository) } set(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit('HEAD').sha) } set(:build) { create(:ci_build, pipeline: pipeline, ref: 'HEAD') } let(:invalid_file) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png') } let(:extension) { 'zip' } let(:file) { fixture_file_upload(Rails.root + "spec/fixtures/pages.#{extension}") } let(:empty_file) { fixture_file_upload(Rails.root + "spec/fixtures/pages_empty.#{extension}") } let(:metadata) do filename = Rails.root + "spec/fixtures/pages.#{extension}.meta" fixture_file_upload(filename) if File.exist?(filename) end subject { described_class.new(project, build) } before do project.remove_pages end context 'legacy artifacts' do %w(tar.gz zip).each do |format| let(:extension) { format } context "for valid #{format}" do before do build.update_attributes(legacy_artifacts_file: file) build.update_attributes(legacy_artifacts_metadata: metadata) end describe 'pages artifacts' do context 'with expiry date' do before do build.artifacts_expire_in = "2 days" build.save! end it "doesn't delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(true) end end context 'without expiry date' do it "does delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(false) end end end it 'succeeds' do expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy # Check that all expected files are extracted %w[index.html zero .hidden/file].each do |filename| expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy end end it 'limits pages size' do stub_application_setting(max_pages_size: 1) expect(execute).not_to eq(:success) end it 'removes pages after destroy' do expect(PagesWorker).to receive(:perform_in) expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy project.destroy expect(project.pages_deployed?).to be_falsey end it 'fails if sha on branch is not latest' do build.update_attributes(ref: 'feature') expect(execute).not_to eq(:success) end it 'fails for empty file fails' do build.update_attributes(legacy_artifacts_file: empty_file) expect(execute).not_to eq(:success) end end end end context 'for new artifacts' do context "for a valid job" do before do create(:ci_job_artifact, file: file, job: build) create(:ci_job_artifact, file_type: :metadata, file: metadata, job: build) build.reload end describe 'pages artifacts' do context 'with expiry date' do before do build.artifacts_expire_in = "2 days" build.save! end it "doesn't delete artifacts" do expect(execute).to eq(:success) expect(build.artifacts?).to eq(true) end end context 'without expiry date' do it "does delete artifacts" do expect(execute).to eq(:success) expect(build.reload.artifacts?).to eq(false) end end end it 'succeeds' do expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy # Check that all expected files are extracted %w[index.html zero .hidden/file].each do |filename| expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy end end it 'limits pages size' do stub_application_setting(max_pages_size: 1) expect(execute).not_to eq(:success) end it 'removes pages after destroy' do expect(PagesWorker).to receive(:perform_in) expect(project.pages_deployed?).to be_falsey expect(execute).to eq(:success) expect(project.pages_deployed?).to be_truthy project.destroy expect(project.pages_deployed?).to be_falsey end it 'fails if sha on branch is not latest' do build.update_attributes(ref: 'feature') expect(execute).not_to eq(:success) end it 'fails for empty file fails' do build.job_artifacts_archive.update_attributes(file: empty_file) expect(execute).not_to eq(:success) end context 'when timeout happens by DNS error' do before do allow_any_instance_of(described_class) .to receive(:extract_zip_archive!).and_raise(SocketError) end it 'raises an error' do expect { execute }.to raise_error(SocketError) end end end end it 'fails to remove project pages when no pages is deployed' do expect(PagesWorker).not_to receive(:perform_in) expect(project.pages_deployed?).to be_falsey project.destroy end it 'fails if no artifacts' do expect(execute).not_to eq(:success) end it 'fails for invalid archive' do build.update_attributes(legacy_artifacts_file: invalid_file) expect(execute).not_to eq(:success) end describe 'maximum pages artifacts size' do let(:metadata) { spy('metadata') } before do file = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip') metafile = fixture_file_upload(Rails.root + 'spec/fixtures/pages.zip.meta') build.update_attributes(legacy_artifacts_file: file) build.update_attributes(legacy_artifacts_metadata: metafile) allow(build).to receive(:artifacts_metadata_entry) .and_return(metadata) end shared_examples 'pages size limit exceeded' do it 'limits the maximum size of gitlab pages' do subject.execute expect(deploy_status.description) .to match(/artifacts for pages are too large/) expect(deploy_status).to be_script_failure end end context 'when maximum pages size is set to zero' do before do stub_application_setting(max_pages_size: 0) end context 'when page size does not exceed internal maximum' do before do allow(metadata).to receive(:total_size).and_return(200.megabytes) end it 'updates pages correctly' do subject.execute expect(deploy_status.description).not_to be_present end end context 'when pages size does exceed internal maximum' do before do allow(metadata).to receive(:total_size).and_return(2.terabytes) end it_behaves_like 'pages size limit exceeded' end end context 'when pages size is greater than max size setting' do before do stub_application_setting(max_pages_size: 200) allow(metadata).to receive(:total_size).and_return(201.megabytes) end it_behaves_like 'pages size limit exceeded' end context 'when max size setting is greater than internal max size' do before do stub_application_setting(max_pages_size: 3.terabytes / 1.megabyte) allow(metadata).to receive(:total_size).and_return(2.terabytes) end it_behaves_like 'pages size limit exceeded' end end def deploy_status GenericCommitStatus.find_by(name: 'pages:deploy') end def execute subject.execute[:status] end end
module JSONAPI module Serializer VERSION = '0.12.0' end end 0.13.0 module JSONAPI module Serializer VERSION = '0.13.0' end end
require 'rails_helper' describe Specroutes::Constraints::BaseConstraint do let(:constraint) { described_class.new } context '#matches' do [nil, 'other', 1, Object.new].each do |val| it "should always return true (e.g. with #{val.inspect})" do expect(constraint.matches?(val)).to be(true) end end end context '#as_param!' do let(:params) { {} } let(:env) { {'action_dispatch.request.path_parameters' => params} } let(:request) { double('request', env: env) } it 'should contain no params in the env before' do expect(params).to be_empty end it 'should merge values into the params-hash' do constraint.send(:as_param!, request, key: 'value') expect(params).to include(key: 'value') end end end add specs for pos-params and query-param In constraints we should be able to access the positional params and the query params. require 'rails_helper' describe Specroutes::Constraints::BaseConstraint do let(:constraint) { described_class.new } context '#matches' do [nil, 'other', 1, Object.new].each do |val| it "should always return true (e.g. with #{val.inspect})" do expect(constraint.matches?(val)).to be(true) end end end context '#as_param!' do let(:params) { {} } let(:env) { {'action_dispatch.request.path_parameters' => params} } let(:request) { double('request', env: env) } it 'should contain no params in the env before' do expect(params).to be_empty end it 'should merge values into the params-hash' do constraint.send(:as_param!, request, key: 'value') expect(params).to include(key: 'value') end end let(:uri) { '/simple?positional;key=value;other_positional;id=1' } let(:request) { double('request', original_fullpath: uri) } context '#query_params' do let(:query_params) { constraint.send(:query_params, request) } it 'should not contain "positional"' do expect(query_params.keys).to_not include('positional') end it 'should contain the key => value mapping' do expect(query_params).to include('key' => 'value') end it 'should not contain "other_positional"' do expect(query_params.keys).to_not include('other_positional') end it 'should contain the id => 1 mapping' do expect(query_params).to include('id' => '1') end end context '#positional_params' do let(:positional_params) { constraint.send(:positional_params, request) } it 'should contain "positional" as a to-index mapping' do expect(positional_params).to include('positional' => 0) end it 'should not contain "key" as a key' do expect(positional_params.keys).to_not include('key') end it 'should contain "other_positional" as a to-index mapping' do expect(positional_params).to include('other_positional' => 2) end it 'should not contain "id" as a key' do expect(positional_params.keys).to_not include('id') end end end
require 'csv' require 'yaml' require 'rchardet' require 'htmlentities' module LeesToolbox def self.run(params) md = Markdown.new(params) md.translate end class Markdown SPECIAL_CHARS = { "&acute;" => "'", "&amp;" => "&", "&apos;" => "'", "&copy;" => "", "&ldquo;" => '"', "&lsquo;" => "'", "&nbsp;" => " ", "&ndash;" => "-", "&mdash;" => "—", "&rdquo;" => '"', "&reg;" => "", "&rsquo;" => "'", "&trade;" => "", "&quot;" => '"', "&lt;" => "<", "&gt;" => ">", "&frac12;" => "1/2", "&frac14;" => "1/4", "&frac34;" => "3/4", "&sup1;" => "1", "&sup2;" => "2", "&sup3;" => "3", "\r\n" => "\n", "\r" => "\n" } def initialize(params) @type = params[:type] @file = params[:source] path = File.dirname(params[:source]) filename = File.basename(params[:source],@type) @target = "#{path}/#{filename}-FORMATTED#{@type}" end def translate # Detect file encoding if CharDet.detect(File.read(@file))["encoding"] != "UTF-8" encoding = "Windows-1252:UTF-8" else encoding = "UTF-8:UTF-8" end # Open file if @type == ".csv" nospaces = Proc.new{ |head| head.gsub(" ","_") } # Special header filter data = CSV.open(@file, :headers => true, :header_converters => [:downcase, nospaces], :skip_blanks => true, :encoding => encoding) # Open CSV elsif @type == ".txt" data = File.open(@file, "r", :encoding => encoding) # Open File end write_to_file(parse(data)) # Parse data and write it to file end private ## # METHOD: parse(data) # def parse(data) if @type == ".csv" # If this is a CSV descriptions = get_descriptions(data) # We're gonna split it into rows output = ["Desc"] descriptions.each do |row| if row.nil? output << "" # Don't do blanks else output << format(row) # Format each line end end elsif @type == ".txt" # If this is just TXT output = format(data.read) # Just format it end output # And don't forget to return it end ## # METHOD: write_to_file(text) # Write text to file def write_to_file(data) if @type == ".csv" CSV.open(@target, "w", :encoding => "UTF-8", :headers => true) do |csv| data.each do |row| csv << [row] end end elsif @type == ".txt" File.open(@target, "w", :encoding => "UTF-8") do |file| file << data end end end ## # METHOD: format # Divide text into sections and then filter def format(text) output = "<ECI>\n<div><font face='verdana'>\n" # Divide into hash of sections and format each section sections = sectionize(text).to_a.map! { |section| filter(section) } # Wrap each section with a div and give it to output sections.each do |section| header = section[0]=="product_name" ? "" : "\t<u>#{section[0].capitalize!}</u>\n" output << "<div id=\"#{section[0]}\">\n" output << header output << "\t#{section[1]}\n" output << "</div>\n" end output << "</font></div>" end ## # METHOD: filter(section) # Format section into HTML def filter(section) # Find if there's a formatting rule for section section[0] = section[0].split("#") head = section[0][0] rule = section[0][1] if head == "product_name" body = form_of_title(section[1]) # product_name has but one format elsif head == "description" body = form_of_graf(section[1]) # And description is always a graf else # everything else is a list unless otherwise stated case rule when "graf" body = form_of_graf(section[1]) when "table" body = form_of_table(section[1]) when "list" body = form_of_list(section[1]) else body = form_of_list(section[1]) end end [ head, body ] # Return a binary array end ## # METHOD: form_of_graf(text) # Format text as a paragraph def form_of_graf(text) text = sanitize(text) # Clean up the text # If it's more than one graf, put it together with <p>s output = text.split("\n") output.map! do |line| line.strip! line.insert(0,"<p>") line.insert(-1,"</p>") end output.join("\n") end ## # METHOD: form_of_table(text) # Format text as a table def form_of_table(text) text.gsub!(/\r\n/,"\n") # Clean up newlines # Figure out what the seperator is commas = text.scan(",").length # How many commas? tabs = text.scan("\t").length # How many tabs? commas > tabs ? sep="," : text.gsub!("\t","|"); sep="|" # Whichever is more is the seperator text.strip! # Now take out white space table = text.split("\n").map! { |row| row.split(sep) } # Divide text into array of arrays rows = table.length # Count rows and columns columns = 0 table.each do |row| row.length > columns ? columns = row.length : columns end output = "<table>\n" r = 1 # Row counter table.each do |row| # Now build table if row.join.length < 1 then next end # If row is empty then skip output << "\t<tr>\n" # Start row c = 0 # Column counter colspan = row.length < columns ? columns-row.length+1 : false # Do we need colspan? code = r==1 ? "th" : "td" # HTML cell code row.each do |field| c += 1 output << "\t\t<#{code}" # Start tablecell if colspan && c == row.length # If we're on the last cell, and need a colspan output << " colspan=\"#{colspan}\">" # Add colspan else # Otherwise output << ">" # Just close bracket end output << sanitize(field) # Sanitize text output << "</#{code}>\n" # Close tablecell end output << "\t</tr>\n" # And close row r += 1 end output << "</table>\n" # And close table end ## # METHOD: form_of_list(text) # Formats block of text as a list def form_of_list(text) output = "<ul>\n" listdepth = 1 # Counter for sublists text.gsub!(/[:]+[ \n\t]*/,": ") # If colons, remove extra spaces and linebreaks text = text.split("\n") # Wrap each line in <li>s text.each do |line| if line.length < 2 then next end # Skip empty line line.strip! if line[0] == "*" # If line starts with *, start sublist line = sanitize(line) # Clean up the text line.sub!("*","\t\t<li style=\"list-style:none\"><strong>") output << "#{line}</strong>\n" output << "\t\t\t<ul>\n" listdepth += 1 # Bump listdepth elsif line[0] == "-" # If line starts with -, continue sublist line = sanitize(line) # Clean up the text line.sub!("-","\t\t\t\t<li>") output << "#{line}</li>\n" else # Otherwise, it's not a sublist if listdepth > 1 # Finish sublist if we need to listdepth -= 1 # Decrement listdepth output << "\t\t\t</ul>\n\t\t</li>\n" end output << "\t\t<li>#{sanitize(line)}</li>\n" end end if listdepth > 1 # If we get to end and haven't closed sublist, do it output << "\t\t\t</ul>\n\t\t</li>\n" end output << "\t</ul>\n" # And wrap the whole thing up end ## # METHOD: form_of_title(text) # Format text in title case def form_of_title(text) no_cap = ["a","an","the","with","and","but","or","on","in","at","to"] # Words we don't cap title = text.split # Take white space off ends title.map! do |word| # Cycle through words if word != title[0] # Skip: first word is Vendor name if word[0] == "*" # Skip: word with asterisk (*) thisword = word.sub!("*","") elsif word[0] =~ /[\.\d]/ # Skip: digits thisword = word elsif word.include?('-') && word.length > 1 # Capitalize both sides of hyphens (-) thisword = word.split('-').each{|i| i.capitalize!}.join('-') elsif no_cap.include?(word.downcase) # Lowercase thisword = word.downcase else thisword = word.downcase.capitalize # Capitalize everything else end else thisword = word end sanitize(thisword) # Clean up the text end "<h2> #{title.join(' ')} </h2>" # Wrap with <h2>s end ## # METHOD: get_descriptions(data) # Returns array of descriptions def get_descriptions(data) # Get just the descriptions column # If :desc is present, that's it data = data.read # Read data from CSV headers = data.headers # Get headers if headers.include?("desc") # If there's a column called "desc" descriptions = data["desc"] # That's the data we want else # Otherwise ... header = "" while !headers.include?(header) # We need to ask which column to use puts "Which column has product descriptions?" headers.each { |h| puts "\t#{h}" } # List column heads print "#: " # Make user choose column header = STDIN.gets.chomp descriptions = data[header] # Select that column end end descriptions # Don't forget to return data end ## # METHOD: sectionize(text) # Divide MD-formatted text into sections # Returns paired array def sectionize(text) sections = {} splits = text.split("{") splits.delete_at(0) splits.each do |splitted| part = splitted.split("}") sections[part[0]] = part[1] end sections end ## # METHOD: sanitize(input) # Replaces special characters with HTML def sanitize(input) encoder = HTMLEntities.new(:html4) output = encoder.encode(input, :named) # Convert special characters to HTML SPECIAL_CHARS.each do |k,v| # Go through and put some characters back output.gsub!(k,v) end while output.include?("\n\n") # Get rid of double returns output.gsub!("\n\n","\n") end while output.include?(" ") # get rid of double spaces output.gsub!(" "," ") end output.strip # Strip white space off sides end end end Fix: capitalization issue in encoding detection require 'csv' require 'yaml' require 'rchardet' require 'htmlentities' module LeesToolbox def self.run(params) md = Markdown.new(params) md.translate end class Markdown SPECIAL_CHARS = { "&acute;" => "'", "&amp;" => "&", "&apos;" => "'", "&copy;" => "", "&ldquo;" => '"', "&lsquo;" => "'", "&nbsp;" => " ", "&ndash;" => "-", "&mdash;" => "—", "&rdquo;" => '"', "&reg;" => "", "&rsquo;" => "'", "&trade;" => "", "&quot;" => '"', "&lt;" => "<", "&gt;" => ">", "&frac12;" => "1/2", "&frac14;" => "1/4", "&frac34;" => "3/4", "&sup1;" => "1", "&sup2;" => "2", "&sup3;" => "3", "\r\n" => "\n", "\r" => "\n" } def initialize(params) @type = params[:type] @file = params[:source] path = File.dirname(params[:source]) filename = File.basename(params[:source],@type) @target = "#{path}/#{filename}-FORMATTED#{@type}" end def translate # Detect file encoding if CharDet.detect(File.read(@file))["encoding"].upcase != "UTF-8" encoding = "Windows-1252:UTF-8" else encoding = "UTF-8:UTF-8" end # Open file if @type == ".csv" nospaces = Proc.new{ |head| head.gsub(" ","_") } # Special header filter data = CSV.open(@file, :headers => true, :header_converters => [:downcase, nospaces], :skip_blanks => true, :encoding => encoding) # Open CSV elsif @type == ".txt" data = File.open(@file, "r", :encoding => encoding) # Open File end write_to_file(parse(data)) # Parse data and write it to file end private ## # METHOD: parse(data) # def parse(data) if @type == ".csv" # If this is a CSV descriptions = get_descriptions(data) # We're gonna split it into rows output = ["Desc"] descriptions.each do |row| if row.nil? output << "" # Don't do blanks else output << format(row) # Format each line end end elsif @type == ".txt" # If this is just TXT output = format(data.read) # Just format it end output # And don't forget to return it end ## # METHOD: write_to_file(text) # Write text to file def write_to_file(data) if @type == ".csv" CSV.open(@target, "w", :encoding => "UTF-8", :headers => true) do |csv| data.each do |row| csv << [row] end end elsif @type == ".txt" File.open(@target, "w", :encoding => "UTF-8") do |file| file << data end end end ## # METHOD: format # Divide text into sections and then filter def format(text) output = "<ECI>\n<div><font face='verdana'>\n" # Divide into hash of sections and format each section sections = sectionize(text).to_a.map! { |section| filter(section) } # Wrap each section with a div and give it to output sections.each do |section| header = section[0]=="product_name" ? "" : "\t<u>#{section[0].capitalize!}</u>\n" output << "<div id=\"#{section[0]}\">\n" output << header output << "\t#{section[1]}\n" output << "</div>\n" end output << "</font></div>" end ## # METHOD: filter(section) # Format section into HTML def filter(section) # Find if there's a formatting rule for section section[0] = section[0].split("#") head = section[0][0] rule = section[0][1] if head == "product_name" body = form_of_title(section[1]) # product_name has but one format elsif head == "description" body = form_of_graf(section[1]) # And description is always a graf else # everything else is a list unless otherwise stated case rule when "graf" body = form_of_graf(section[1]) when "table" body = form_of_table(section[1]) when "list" body = form_of_list(section[1]) else body = form_of_list(section[1]) end end [ head, body ] # Return a binary array end ## # METHOD: form_of_graf(text) # Format text as a paragraph def form_of_graf(text) text = sanitize(text) # Clean up the text # If it's more than one graf, put it together with <p>s output = text.split("\n") output.map! do |line| line.strip! line.insert(0,"<p>") line.insert(-1,"</p>") end output.join("\n") end ## # METHOD: form_of_table(text) # Format text as a table def form_of_table(text) text.gsub!(/\r\n/,"\n") # Clean up newlines # Figure out what the seperator is commas = text.scan(",").length # How many commas? tabs = text.scan("\t").length # How many tabs? commas > tabs ? sep="," : text.gsub!("\t","|"); sep="|" # Whichever is more is the seperator text.strip! # Now take out white space table = text.split("\n").map! { |row| row.split(sep) } # Divide text into array of arrays rows = table.length # Count rows and columns columns = 0 table.each do |row| row.length > columns ? columns = row.length : columns end output = "<table>\n" r = 1 # Row counter table.each do |row| # Now build table if row.join.length < 1 then next end # If row is empty then skip output << "\t<tr>\n" # Start row c = 0 # Column counter colspan = row.length < columns ? columns-row.length+1 : false # Do we need colspan? code = r==1 ? "th" : "td" # HTML cell code row.each do |field| c += 1 output << "\t\t<#{code}" # Start tablecell if colspan && c == row.length # If we're on the last cell, and need a colspan output << " colspan=\"#{colspan}\">" # Add colspan else # Otherwise output << ">" # Just close bracket end output << sanitize(field) # Sanitize text output << "</#{code}>\n" # Close tablecell end output << "\t</tr>\n" # And close row r += 1 end output << "</table>\n" # And close table end ## # METHOD: form_of_list(text) # Formats block of text as a list def form_of_list(text) output = "<ul>\n" listdepth = 1 # Counter for sublists text.gsub!(/[:]+[ \n\t]*/,": ") # If colons, remove extra spaces and linebreaks text = text.split("\n") # Wrap each line in <li>s text.each do |line| if line.length < 2 then next end # Skip empty line line.strip! if line[0] == "*" # If line starts with *, start sublist line = sanitize(line) # Clean up the text line.sub!("*","\t\t<li style=\"list-style:none\"><strong>") output << "#{line}</strong>\n" output << "\t\t\t<ul>\n" listdepth += 1 # Bump listdepth elsif line[0] == "-" # If line starts with -, continue sublist line = sanitize(line) # Clean up the text line.sub!("-","\t\t\t\t<li>") output << "#{line}</li>\n" else # Otherwise, it's not a sublist if listdepth > 1 # Finish sublist if we need to listdepth -= 1 # Decrement listdepth output << "\t\t\t</ul>\n\t\t</li>\n" end output << "\t\t<li>#{sanitize(line)}</li>\n" end end if listdepth > 1 # If we get to end and haven't closed sublist, do it output << "\t\t\t</ul>\n\t\t</li>\n" end output << "\t</ul>\n" # And wrap the whole thing up end ## # METHOD: form_of_title(text) # Format text in title case def form_of_title(text) no_cap = ["a","an","the","with","and","but","or","on","in","at","to"] # Words we don't cap title = text.split # Take white space off ends title.map! do |word| # Cycle through words if word != title[0] # Skip: first word is Vendor name if word[0] == "*" # Skip: word with asterisk (*) thisword = word.sub!("*","") elsif word[0] =~ /[\.\d]/ # Skip: digits thisword = word elsif word.include?('-') && word.length > 1 # Capitalize both sides of hyphens (-) thisword = word.split('-').each{|i| i.capitalize!}.join('-') elsif no_cap.include?(word.downcase) # Lowercase thisword = word.downcase else thisword = word.downcase.capitalize # Capitalize everything else end else thisword = word end sanitize(thisword) # Clean up the text end "<h2> #{title.join(' ')} </h2>" # Wrap with <h2>s end ## # METHOD: get_descriptions(data) # Returns array of descriptions def get_descriptions(data) # Get just the descriptions column # If :desc is present, that's it data = data.read # Read data from CSV headers = data.headers # Get headers if headers.include?("desc") # If there's a column called "desc" descriptions = data["desc"] # That's the data we want else # Otherwise ... header = "" while !headers.include?(header) # We need to ask which column to use puts "Which column has product descriptions?" headers.each { |h| puts "\t#{h}" } # List column heads print "#: " # Make user choose column header = STDIN.gets.chomp descriptions = data[header] # Select that column end end descriptions # Don't forget to return data end ## # METHOD: sectionize(text) # Divide MD-formatted text into sections # Returns paired array def sectionize(text) sections = {} splits = text.split("{") splits.delete_at(0) splits.each do |splitted| part = splitted.split("}") sections[part[0]] = part[1] end sections end ## # METHOD: sanitize(input) # Replaces special characters with HTML def sanitize(input) encoder = HTMLEntities.new(:html4) output = encoder.encode(input, :named) # Convert special characters to HTML SPECIAL_CHARS.each do |k,v| # Go through and put some characters back output.gsub!(k,v) end while output.include?("\n\n") # Get rid of double returns output.gsub!("\n\n","\n") end while output.include?(" ") # get rid of double spaces output.gsub!(" "," ") end output.strip # Strip white space off sides end end end
class IgnitionTransport < Formula desc "Transport middleware for robotics" homepage "http://ignitionrobotics.org" url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport-1.0.1.tar.bz2" sha256 "584b9b2ff29c349893e8b97761acfea70097f45374554d5ed0b57abac1691e23" head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-transport/releases" cellar :any revision 1 sha256 "cb31fbfa2fb080fa837ac40b8773e3011d2279f32fc340597a08af70e6b59c16" => :yosemite end depends_on "cmake" => :build depends_on "doxygen" => [:build, :optional] depends_on "pkg-config" => :build depends_on "osrf/simulation/ignition-tools" depends_on "protobuf" depends_on "protobuf-c" => :build depends_on "ossp-uuid" depends_on "zeromq" depends_on "cppzmq" patch do # Fix for ignition-tools library suffix url "https://bitbucket.org/ignitionrobotics/ign-transport/commits/69e61049a4b15a93625a7dde4838a71354ed9551/raw/" sha256 "7e3135aa4ef1f884c2c8ab476f40974b0c9b687896115140eab5d34d07c06125" end def install system "cmake", ".", *std_cmake_args system "make", "install" end test do system "false" end end ignition-transport: new bottle class IgnitionTransport < Formula desc "Transport middleware for robotics" homepage "http://ignitionrobotics.org" url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport-1.0.1.tar.bz2" sha256 "584b9b2ff29c349893e8b97761acfea70097f45374554d5ed0b57abac1691e23" head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg bottle do root_url "http://gazebosim.org/distributions/ign-transport/releases" cellar :any revision 2 sha256 "3c59f42ce7a1f34a0d74cbce9d27e47f52e761dd0193f8e2b61aa48e0a458e34" => :yosemite end depends_on "cmake" => :build depends_on "doxygen" => [:build, :optional] depends_on "pkg-config" => :build depends_on "osrf/simulation/ignition-tools" depends_on "protobuf" depends_on "protobuf-c" => :build depends_on "ossp-uuid" depends_on "zeromq" depends_on "cppzmq" patch do # Fix for ignition-tools library suffix url "https://bitbucket.org/ignitionrobotics/ign-transport/commits/69e61049a4b15a93625a7dde4838a71354ed9551/raw/" sha256 "7e3135aa4ef1f884c2c8ab476f40974b0c9b687896115140eab5d34d07c06125" end def install system "cmake", ".", *std_cmake_args system "make", "install" end test do system "false" end end
require 'formhub_ruby' require 'spec_helper' require 'yaml' describe FormhubRuby::ApiConnector do before :each do credentials = YAML.load_file('spec/fixtures/test_credentials.yml') FormhubRuby.configure do |config| config.username = credentials['username'] config.password = credentials['password'] end end context 'when connecting to the API' do it 'successfully connects to the FormHub API and retrieve JSON Data' do VCR.use_cassette 'successful_connection' do connection = FormhubRuby::ApiConnector.new(formname: 'survey') connection.fetch connection.data.should be_a_kind_of(Array) connection.data[0].should be_a_kind_of(Object) end end it 'displays appropriate message if the JSON data is not successfully retrieved' do stub_request(:any, "http://formhub.org/#{FormhubRuby.configuration.username}/forms/survey/api").to_return(:body => 'NO JSON HERE') connection = FormhubRuby::ApiConnector.new(formname: 'fake') expect {connection.fetch}.to raise_error('API connection error') end end context 'when formulating a more complex query string' do let(:connection) {FormhubRuby::ApiConnector.new(formname: 'survey')} let(:username) {FormhubRuby.configuration.username} it "does not add any extraneaous query" do connection = FormhubRuby::ApiConnector.new(formname: 'survey') connection.api_uri.should == "http://formhub.org/#{username}/forms/survey/api" end it "does form a simple query" do connection.query = {age: 12} connection.api_uri.should == "http://formhub.org/#{username}/forms/survey/api?query=%7B%22age%22%3A%2212%22%7D" VCR.use_cassette 'age_query' do connection.fetch connection.data.length.should == 1 end end it "formulates a query with a start" do connection.start = 1 connection.api_uri.should == "http://formhub.org/#{username}/forms/survey/api?start=1" VCR.use_cassette 'query_start' do connection.fetch connection.data.length.should == 1 end end it "formulates a query with a limit" do connection.limit = 1 connection.api_uri.should == "http://formhub.org/#{username}/forms/survey/api?limit=1" VCR.use_cassette 'query_limit' do connection.fetch connection.data.length.should == 1 end end end end Replaces "should" with "Expect" statements require 'formhub_ruby' require 'spec_helper' require 'yaml' describe FormhubRuby::ApiConnector do before :each do credentials = YAML.load_file('spec/fixtures/test_credentials.yml') FormhubRuby.configure do |config| config.username = credentials['username'] config.password = credentials['password'] end end context 'when connecting to the API' do it 'successfully connects to the FormHub API and retrieve JSON Data' do VCR.use_cassette 'successful_connection' do connection = FormhubRuby::ApiConnector.new(formname: 'survey') connection.fetch expect(connection.data).to be_a_kind_of(Array) expect(connection.data[0]).to be_a_kind_of(Object) end end it 'displays appropriate message if the JSON data is not successfully retrieved' do stub_request(:any, "http://formhub.org/#{FormhubRuby.configuration.username}/forms/survey/api").to_return(:body => 'NO JSON HERE') connection = FormhubRuby::ApiConnector.new(formname: 'fake') expect {connection.fetch}.to raise_error('API connection error') end end context 'when formulating a more complex query string' do let(:connection) {FormhubRuby::ApiConnector.new(formname: 'survey')} let(:username) {FormhubRuby.configuration.username} it "does not add any extraneaous query" do connection = FormhubRuby::ApiConnector.new(formname: 'survey') expect(connection.api_uri).to eq("http://formhub.org/#{username}/forms/survey/api") end it "does form a simple query" do connection.query = {age: 12} expect(connection.api_uri).to eq("http://formhub.org/#{username}/forms/survey/api?query=%7B%22age%22%3A%2212%22%7D") VCR.use_cassette 'age_query' do connection.fetch expect(connection.data.length).to eq(1) end end it "formulates a query with a start" do connection.start = 1 expect(connection.api_uri).to eq("http://formhub.org/#{username}/forms/survey/api?start=1") VCR.use_cassette 'query_start' do connection.fetch expect(connection.data.length).to eq(1) end end it "formulates a query with a limit" do connection.limit = 1 expect(connection.api_uri).to eq("http://formhub.org/#{username}/forms/survey/api?limit=1") VCR.use_cassette 'query_limit' do connection.fetch expect(connection.data.length).to eq(1) end end end end
require "git" require_relative "../logging_module" require "securerandom" module FastlaneCI # Encapsulates all the data that is needed by GitRepo # We can have various provider_credentials, but they all need to be turned into `GitRepoAuth`s # This is because different git providers can have different needs for data # What github needs is an `api_token`, but a local git repo might only need a `password`. # We'll call both of these "auth_tokens" here, this way we can use GitRepoAuth # as a way to unify those, and prevent overloading names at the data source. # Otherwise, in the JSON we'd see "password" but for some repos that might be an auth_token, or an api_token, or password class GitRepoAuth attr_accessor :remote_host # in the case of github, this is usually `github.com` attr_accessor :username # whatever the git repo needs for a username, usually just an email, usually CI attr_accessor :full_name # whatever the git repo needs for a username, usually just an email, usually fastlane.CI attr_accessor :auth_token # usually an API key, but could be a password, usually fastlane.CI's auth_token def initialize(remote_host: nil, username: nil, full_name: nil, auth_token: nil) @remote_host = remote_host @username = username @full_name = full_name @auth_token = auth_token end end # Responsible for managing git repos # This includes the configs repo, but also the actual source code repos # This class makes sure to use the right credentials, does proper cloning, # pulling, pushing, git commit messages, etc. # TODO: @josh: do we need to move this somewhere? We only want to support git # so no need to have super class, etc, right? class GitRepo include FastlaneCI::Logging # @return [GitRepoConfig] attr_accessor :git_config # @return [GitRepoAuth] attr_accessor :repo_auth # whatever pieces of information that can change between git users attr_accessor :containing_path def initialize(git_config: nil, provider_credential: nil) self.validate_initialization_params!(git_config: git_config, provider_credential: provider_credential) @git_config = git_config # Ok, so now we need to pull the bit of information from the credentials that we know we need for git repos case provider_credential.type when FastlaneCI::ProviderCredential::PROVIDER_CREDENTIAL_TYPES[:github] # Package up the authentication parts that are required @repo_auth = GitRepoAuth.new( remote_host: provider_credential.remote_host, username: provider_credential.email, full_name: provider_credential.full_name, auth_token: provider_credential.api_token ) else # if we add another ProviderCredential type, we'll need to figure out what parts of the credential go where raise "unsupported credential type: #{provider_credential.type}" end if File.directory?(self.git_config.local_repo_path) # TODO: test if this crashes if it's not a git directory repo = Git.open(self.git_config.local_repo_path) if repo.index.writable? # Things are looking legit so far # Now we have to check if the repo is actually from the # same repo URL if repo.remote("origin").url == self.git_config.git_url self.pull else logger.debug("[#{self.git_config.id}] Repo URL seems to have changed... deleting the old directory and cloning again") clear_directory self.clone end else clear_directory self.clone end else self.clone end logger.debug("Using #{self.git_config.local_repo_path} for config repo") end # This is where we store the local git repo # fastlane.ci will also delete this directory if it breaks # and just re-clones. So make sure it's fine if it gets deleted def containing_path self.git_config.containing_path end def validate_initialization_params!(git_config: nil, provider_credential: nil) raise "No git config provided" if git_config.nil? raise "No provider_credential provided" if provider_credential.nil? credential_type = provider_credential.type git_config_credential_type = git_config.provider_credential_type_needed credential_mismatch = credential_type != git_config_credential_type raise "provider_credential.type and git_config.provider_credential_type_needed mismatch: #{credential_type} vs #{git_config_credential_type}" if credential_mismatch end def clear_directory FileUtils.rm_rf(self.git_config.local_repo_path) end # Returns the absolute path to a file from inside the git repo def file_path(file_path) File.join(self.git_config.local_repo_path, file_path) end def git if @_git.nil? @_git = Git.open(self.git_config.local_repo_path) end return @_git end # Responsible for setting the author information when committing a change def setup_author(full_name: self.repo_auth.full_name, username: self.repo_auth.username) # TODO: performance implications of settings this every time? # TODO: Set actual name + email here # TODO: see if we can set credentials here also logger.debug("Using #{full_name} with #{username} as author information") git.config("user.name", full_name) git.config("user.email", username) end def temporary_git_storage @temporary_git_storage ||= File.expand_path("~/.fastlane/.tmp") FileUtils.mkdir_p(@temporary_git_storage) return @temporary_git_storage end # Responsible for using the auth token to be able to push/pull changes # from git remote def setup_auth(repo_auth: self.repo_auth) # More details: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage storage_path = File.join(self.temporary_git_storage, "git-auth-#{SecureRandom.uuid}") local_repo_path = self.git_config.local_repo_path FileUtils.mkdir_p(local_repo_path) unless File.directory?(local_repo_path) store_credentials_command = "git credential-store --file #{storage_path.shellescape} store" content = [ "protocol=https", # TODO: we should be able to figure this out, maybe stuff it in GitRepoAuth? "host=#{repo_auth.remote_host}", "username=#{repo_auth.username}", "password=#{repo_auth.auth_token}", "" ].join("\n") use_credentials_command = "git config --local credential.helper 'store --file #{storage_path.shellescape}'" Dir.chdir(local_repo_path) do cmd = TTY::Command.new cmd.run(store_credentials_command, input: content) cmd.run(use_credentials_command) end return storage_path end def unset_auth(storage_path: nil) FileUtils.rm(storage_path) end def pull(repo_auth: self.repo_auth) if ENV["super_verbose"] # because this repeats a ton logger.debug("[#{self.git_config.id}]: Pulling latest changes") end storage_path = self.setup_auth(repo_auth: repo_auth) git.pull ensure unset_auth(storage_path: storage_path) end # This method commits and pushes all changes # if `file_to_commit` is `nil`, all files will be added # TODO: this method isn't actually tested yet def commit_changes!(commit_message: nil, file_to_commit: nil, repo_auth: self.repo_auth) raise "file_to_commit not yet implemented" if file_to_commit commit_message ||= "Automatic commit by fastlane.ci" self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username) git.add(all: true) # TODO: for now we only add all files git.commit(commit_message) git.push( remote_host: repo_auth.remote_host, username: repo_auth.username, auth_token: repo_auth.auth_token ) end def push(repo_auth: self.repo_auth) self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username) storage_path = self.setup_auth(repo_auth: repo_auth) # TODO: how do we handle branches self.git.push ensure unset_auth(storage_path: storage_path) end def clone(repo_auth: self.repo_auth) raise "No containing path available" unless self.containing_path storage_path = self.setup_auth(repo_auth: repo_auth) logger.debug("[#{self.git_config.id}]: Cloning git repo #{self.git_config.git_url}") Git.clone(self.git_config.git_url, self.git_config.id, path: self.containing_path) ensure unset_auth(storage_path: storage_path) end end end Case in-sensitive URL comparision require "git" require_relative "../logging_module" require "securerandom" module FastlaneCI # Encapsulates all the data that is needed by GitRepo # We can have various provider_credentials, but they all need to be turned into `GitRepoAuth`s # This is because different git providers can have different needs for data # What github needs is an `api_token`, but a local git repo might only need a `password`. # We'll call both of these "auth_tokens" here, this way we can use GitRepoAuth # as a way to unify those, and prevent overloading names at the data source. # Otherwise, in the JSON we'd see "password" but for some repos that might be an auth_token, or an api_token, or password class GitRepoAuth attr_accessor :remote_host # in the case of github, this is usually `github.com` attr_accessor :username # whatever the git repo needs for a username, usually just an email, usually CI attr_accessor :full_name # whatever the git repo needs for a username, usually just an email, usually fastlane.CI attr_accessor :auth_token # usually an API key, but could be a password, usually fastlane.CI's auth_token def initialize(remote_host: nil, username: nil, full_name: nil, auth_token: nil) @remote_host = remote_host @username = username @full_name = full_name @auth_token = auth_token end end # Responsible for managing git repos # This includes the configs repo, but also the actual source code repos # This class makes sure to use the right credentials, does proper cloning, # pulling, pushing, git commit messages, etc. # TODO: @josh: do we need to move this somewhere? We only want to support git # so no need to have super class, etc, right? class GitRepo include FastlaneCI::Logging # @return [GitRepoConfig] attr_accessor :git_config # @return [GitRepoAuth] attr_accessor :repo_auth # whatever pieces of information that can change between git users attr_accessor :containing_path def initialize(git_config: nil, provider_credential: nil) self.validate_initialization_params!(git_config: git_config, provider_credential: provider_credential) @git_config = git_config # Ok, so now we need to pull the bit of information from the credentials that we know we need for git repos case provider_credential.type when FastlaneCI::ProviderCredential::PROVIDER_CREDENTIAL_TYPES[:github] # Package up the authentication parts that are required @repo_auth = GitRepoAuth.new( remote_host: provider_credential.remote_host, username: provider_credential.email, full_name: provider_credential.full_name, auth_token: provider_credential.api_token ) else # if we add another ProviderCredential type, we'll need to figure out what parts of the credential go where raise "unsupported credential type: #{provider_credential.type}" end if File.directory?(self.git_config.local_repo_path) # TODO: test if this crashes if it's not a git directory repo = Git.open(self.git_config.local_repo_path) if repo.index.writable? # Things are looking legit so far # Now we have to check if the repo is actually from the # same repo URL if repo.remote("origin").url.downcase == self.git_config.git_url.downcase self.pull else logger.debug("[#{self.git_config.id}] Repo URL seems to have changed... deleting the old directory and cloning again") clear_directory self.clone end else clear_directory self.clone end else self.clone end logger.debug("Using #{self.git_config.local_repo_path} for config repo") end # This is where we store the local git repo # fastlane.ci will also delete this directory if it breaks # and just re-clones. So make sure it's fine if it gets deleted def containing_path self.git_config.containing_path end def validate_initialization_params!(git_config: nil, provider_credential: nil) raise "No git config provided" if git_config.nil? raise "No provider_credential provided" if provider_credential.nil? credential_type = provider_credential.type git_config_credential_type = git_config.provider_credential_type_needed credential_mismatch = credential_type != git_config_credential_type raise "provider_credential.type and git_config.provider_credential_type_needed mismatch: #{credential_type} vs #{git_config_credential_type}" if credential_mismatch end def clear_directory FileUtils.rm_rf(self.git_config.local_repo_path) end # Returns the absolute path to a file from inside the git repo def file_path(file_path) File.join(self.git_config.local_repo_path, file_path) end def git if @_git.nil? @_git = Git.open(self.git_config.local_repo_path) end return @_git end # Responsible for setting the author information when committing a change def setup_author(full_name: self.repo_auth.full_name, username: self.repo_auth.username) # TODO: performance implications of settings this every time? # TODO: Set actual name + email here # TODO: see if we can set credentials here also logger.debug("Using #{full_name} with #{username} as author information") git.config("user.name", full_name) git.config("user.email", username) end def temporary_git_storage @temporary_git_storage ||= File.expand_path("~/.fastlane/.tmp") FileUtils.mkdir_p(@temporary_git_storage) return @temporary_git_storage end # Responsible for using the auth token to be able to push/pull changes # from git remote def setup_auth(repo_auth: self.repo_auth) # More details: https://git-scm.com/book/en/v2/Git-Tools-Credential-Storage storage_path = File.join(self.temporary_git_storage, "git-auth-#{SecureRandom.uuid}") local_repo_path = self.git_config.local_repo_path FileUtils.mkdir_p(local_repo_path) unless File.directory?(local_repo_path) store_credentials_command = "git credential-store --file #{storage_path.shellescape} store" content = [ "protocol=https", # TODO: we should be able to figure this out, maybe stuff it in GitRepoAuth? "host=#{repo_auth.remote_host}", "username=#{repo_auth.username}", "password=#{repo_auth.auth_token}", "" ].join("\n") use_credentials_command = "git config --local credential.helper 'store --file #{storage_path.shellescape}'" Dir.chdir(local_repo_path) do cmd = TTY::Command.new cmd.run(store_credentials_command, input: content) cmd.run(use_credentials_command) end return storage_path end def unset_auth(storage_path: nil) FileUtils.rm(storage_path) end def pull(repo_auth: self.repo_auth) if ENV["super_verbose"] # because this repeats a ton logger.debug("[#{self.git_config.id}]: Pulling latest changes") end storage_path = self.setup_auth(repo_auth: repo_auth) git.pull ensure unset_auth(storage_path: storage_path) end # This method commits and pushes all changes # if `file_to_commit` is `nil`, all files will be added # TODO: this method isn't actually tested yet def commit_changes!(commit_message: nil, file_to_commit: nil, repo_auth: self.repo_auth) raise "file_to_commit not yet implemented" if file_to_commit commit_message ||= "Automatic commit by fastlane.ci" self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username) git.add(all: true) # TODO: for now we only add all files git.commit(commit_message) git.push( remote_host: repo_auth.remote_host, username: repo_auth.username, auth_token: repo_auth.auth_token ) end def push(repo_auth: self.repo_auth) self.setup_author(full_name: repo_auth.full_name, username: repo_auth.username) storage_path = self.setup_auth(repo_auth: repo_auth) # TODO: how do we handle branches self.git.push ensure unset_auth(storage_path: storage_path) end def clone(repo_auth: self.repo_auth) raise "No containing path available" unless self.containing_path storage_path = self.setup_auth(repo_auth: repo_auth) logger.debug("[#{self.git_config.id}]: Cloning git repo #{self.git_config.git_url}") Git.clone(self.git_config.git_url, self.git_config.id, path: self.containing_path) ensure unset_auth(storage_path: storage_path) end end end
require 'date' require 'sinatra/cross_origin' class BaseRouter < Sinatra::Base configure do register Sinatra::CrossOrigin enable :cross_origin end set :root, PiggyBanker.root set :public_folder, settings.root << PiggyBanker.settings['assetspath'] not_found do status 404 # erb :error # FIXME: how to stub erb end def respond_with(content = nil) content_type :json response = { version: "#{PiggyBanker.settings['version']}", timestamp: Time.now.to_s } response[:content] = content unless content.nil? response.to_json end def response_internal_error status 500 end def response_not_found status 404 end end allow cors option require 'date' require 'sinatra/cross_origin' class BaseRouter < Sinatra::Base configure do register Sinatra::CrossOrigin enable :cross_origin end set :root, PiggyBanker.root set :public_folder, settings.root << PiggyBanker.settings['assetspath'] not_found do status 404 # erb :error # FIXME: how to stub erb end options '*' do response.headers['Access-Control-Allow-Methods'] = 'GET, PATCH, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'X-Requested-With, X-HTTP-Method-Override, Content-Type, Cache-Control, Accept' 200 end def respond_with(content = nil) content_type :json response = { version: "#{PiggyBanker.settings['version']}", timestamp: Time.now.to_s } response[:content] = content unless content.nil? response.to_json end def response_internal_error status 500 end def response_not_found status 404 end end