Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Add the observation descriptions required for this display | require_dependency "renalware/pathology"
module Renalware
module Pathology
class ObservationsController < Pathology::BaseController
before_filter :load_patient
def index
query = Renalware::Pathology::ArchivedResultsQuery.new(patient: @patient).call
observation_descriptions = Renalware::Pathology::ObservationDescription.limit(10)
presenter = Renalware::Pathology::ArchivedResultsPresenter.new(
query, observation_descriptions
)
render :index, locals: { results: presenter, observation_descriptions: observation_descriptions }
end
end
end
end
| require_dependency "renalware/pathology"
module Renalware
module Pathology
class ObservationsController < Pathology::BaseController
before_filter :load_patient
def index
query = Renalware::Pathology::ArchivedResultsQuery.new(patient: @patient).call
observation_descriptions = Renalware::Pathology::ObservationDescription.for(description_codes.take(10))
presenter = Renalware::Pathology::ArchivedResultsPresenter.new(
query, observation_descriptions
)
render :index, locals: { results: presenter, observation_descriptions: observation_descriptions }
end
private
def description_codes
[
'AFP','ALB','ALT','AL','AMY','ALP','ASM','AST','B12','BIL','CAL', 'CCA',
'CHOL','CK','CRCL','CRE','CRP','CU','CYA','ESRR','FER','FIB','FOL','GGT',
'GLO','BGLU','HBA','HBAI','HB','BIC','HCO3','HDL','HYPO','LYM','NEUT','PHOS',
'RETA','POT','LDL','MCH', 'MCV','MG','NA','PGLU','PLT','PTHI','TP','TRIG',
'TSH','URAT','UREP','URE','URR',' WBC','ACRA','PCRAT'
]
end
end
end
end
|
Fix of Auth::Abstract::Handler to return headers within spec. 401 and 400 responses must have Content-Type and Content-Length defined. | module Rack
module Auth
# Rack::Auth::AbstractHandler implements common authentication functionality.
#
# +realm+ should be set for all handlers.
class AbstractHandler
attr_accessor :realm
def initialize(app, &authenticator)
@app, @authenticator = app, authenticator
end
private
def unauthorized(www_authenticate = challenge)
return [ 401, { 'WWW-Authenticate' => www_authenticate.to_s }, [] ]
end
def bad_request
[ 400, {}, [] ]
end
end
end
end
| module Rack
module Auth
# Rack::Auth::AbstractHandler implements common authentication functionality.
#
# +realm+ should be set for all handlers.
class AbstractHandler
attr_accessor :realm
def initialize(app, &authenticator)
@app, @authenticator = app, authenticator
end
private
def unauthorized(www_authenticate = challenge)
return [ 401,
{ 'Content-Type' => 'text/plain',
'Content-Length' => '0',
'WWW-Authenticate' => www_authenticate.to_s },
[]
]
end
def bad_request
return [ 400,
{ 'Content-Type' => 'text/plain',
'Content-Length' => '0' },
[]
]
end
end
end
end
|
Set instance variables instead of calling instance methods for on!, off! | module CachedResource
# The Configuration class contains global configuration options
# for CachedResource as well as class specific options.
class Configuration
# options attributes
ATTRIBUTES = [:enabled, :ttl, :logger, :cache]
# default options for cached resource
DEFAULTS = {
:enabled => true,
:ttl => 604800,
:cache => defined?(Rails.cache) && Rails.cache || ActiveSupport::Cache::MemoryStore.new,
:logger => defined?(Rails.logger) && Rails.logger || ActiveSupport::BufferedLogger.new(StringIO.new)
}
# prefix for log messages
LOGGER_PREFIX = "[cached_resource]"
attr_accessor *ATTRIBUTES
# initialize a configuration with the specified options.
# Falls back to the global configuration if an option is not present.
def initialize(options={})
@enabled = options[:enabled] || CachedResource.enabled
@ttl = options[:ttl] || CachedResource.ttl
@cache = options[:cache] || CachedResource.cache
@logger = options[:logger] || CachedResource.logger
end
# enable caching
def on!
enabled = true
end
# disable caching
def off!
enabled = false
end
end
end
| module CachedResource
# The Configuration class contains global configuration options
# for CachedResource as well as class specific options.
class Configuration
# options attributes
ATTRIBUTES = [:enabled, :ttl, :logger, :cache]
# default options for cached resource
DEFAULTS = {
:enabled => true,
:ttl => 604800,
:cache => defined?(Rails.cache) && Rails.cache || ActiveSupport::Cache::MemoryStore.new,
:logger => defined?(Rails.logger) && Rails.logger || ActiveSupport::BufferedLogger.new(StringIO.new)
}
# prefix for log messages
LOGGER_PREFIX = "[cached_resource]"
attr_accessor *ATTRIBUTES
# initialize a configuration with the specified options.
# Falls back to the global configuration if an option is not present.
def initialize(options={})
@enabled = options[:enabled] || CachedResource.enabled
@ttl = options[:ttl] || CachedResource.ttl
@cache = options[:cache] || CachedResource.cache
@logger = options[:logger] || CachedResource.logger
end
# enable caching
def on!
@enabled = true
end
# disable caching
def off!
@enabled = false
end
end
end
|
Add glib dep to prevent compile errors | require 'formula'
class KdePhonon <Formula
url 'ftp://ftp.kde.org/pub/kde/stable/phonon/4.4.4/src/phonon-4.4.4.tar.bz2'
homepage 'http://phonon.kde.org/'
md5 '1deb14ecb2185e1f2fe2741a0bd46852'
depends_on 'cmake' => :build
depends_on 'automoc4' => :build
depends_on 'qt'
keg_only "This package is already supplied by Qt and is only needed by KDE packages."
def install
system "cmake . #{std_cmake_parameters}"
system "make install"
end
end
| require 'formula'
class KdePhonon <Formula
url 'ftp://ftp.kde.org/pub/kde/stable/phonon/4.4.4/src/phonon-4.4.4.tar.bz2'
homepage 'http://phonon.kde.org/'
md5 '1deb14ecb2185e1f2fe2741a0bd46852'
depends_on 'cmake' => :build
depends_on 'automoc4' => :build
depends_on 'qt'
depends_on 'glib' => :build
keg_only "This package is already supplied by Qt and is only needed by KDE packages."
def install
system "cmake . #{std_cmake_parameters}"
system "make install"
end
end
|
Change spec to accurantly reflect test | require File.dirname(__FILE__) + '/fixtures/procs.rb'
require File.dirname(__FILE__) + '/../../spec_helper'
describe "Proc.new with an associated block" do
it "returns a proc that represents the block" do
Proc.new { }.call.should == nil
Proc.new { "hello" }.call.should == "hello"
end
it "raises a LocalJumpError when context of the block no longer exists" do
def some_method(&b) b end
a_proc = Proc.new { return }
res = some_method(&a_proc)
lambda { res.call }.should raise_error(LocalJumpError)
end
it "returns from within enclosing method when 'return' is used in the block" do
# we essentially verify that the created instance behaves like proc,
# not like lambda.
def some_method
Proc.new { return :proc_return_value }.call
:method_return_value
end
some_method.should == :proc_return_value
end
end
describe "Proc.new without a block" do
it "raises an ArgumentError" do
lambda { Proc.new }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if invoked from within a method with no block" do
lambda {
ProcSpecs.new_proc_in_method
}.should raise_error(ArgumentError)
end
it "returns a new Proc instance from the block passed to the containing method" do
ProcSpecs.new_proc_in_method { "hello" }.call.should == "hello"
end
end
| require File.dirname(__FILE__) + '/fixtures/procs.rb'
require File.dirname(__FILE__) + '/../../spec_helper'
describe "Proc.new with an associated block" do
it "returns a proc that represents the block" do
Proc.new { }.call.should == nil
Proc.new { "hello" }.call.should == "hello"
end
it "raises a LocalJumpError when context of the block no longer exists" do
def some_method
Proc.new { return }
end
res = some_method()
lambda { res.call }.should raise_error(LocalJumpError)
end
it "returns from within enclosing method when 'return' is used in the block" do
# we essentially verify that the created instance behaves like proc,
# not like lambda.
def some_method
Proc.new { return :proc_return_value }.call
:method_return_value
end
some_method.should == :proc_return_value
end
end
describe "Proc.new without a block" do
it "raises an ArgumentError" do
lambda { Proc.new }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if invoked from within a method with no block" do
lambda {
ProcSpecs.new_proc_in_method
}.should raise_error(ArgumentError)
end
it "returns a new Proc instance from the block passed to the containing method" do
ProcSpecs.new_proc_in_method { "hello" }.call.should == "hello"
end
end
|
Add an event handler for UnknownEvent | # frozen_string_literal: true
require 'discordrb/events/generic'
# Event classes and handlers
module Discordrb::Events
# Event raised when any dispatch is received
class RawEvent < Event
# @return [Symbol] the type of this dispatch.
attr_reader :type
alias_method :t, :type
# @return [Hash] the data of this dispatch.
attr_reader :data
alias_method :d, :data
def initialize(type, data, bot)
@type = type
@data = data
@bot = bot
end
end
# Event handler for {RawEvent}
class RawEventHandler < EventHandler
def matches?(event)
# Check for the proper event type
return false unless event.is_a? RawEvent
[
matches_all(@attributes[:type] || @attributes[:t], event.type) do |a, e|
if a.is_a? Regexp
# 24: update to matches?
match = a.match(e)
match ? (e == match[0]) : false
else
e.to_s.casecmp(a.to_s).zero?
end
end
].reduce(true, &:&)
end
end
# Event raised when an unknown dispatch is received
class UnknownEvent < RawEvent; end
end
| # frozen_string_literal: true
require 'discordrb/events/generic'
# Event classes and handlers
module Discordrb::Events
# Event raised when any dispatch is received
class RawEvent < Event
# @return [Symbol] the type of this dispatch.
attr_reader :type
alias_method :t, :type
# @return [Hash] the data of this dispatch.
attr_reader :data
alias_method :d, :data
def initialize(type, data, bot)
@type = type
@data = data
@bot = bot
end
end
# Event handler for {RawEvent}
class RawEventHandler < EventHandler
def matches?(event)
# Check for the proper event type
return false unless event.is_a? RawEvent
[
matches_all(@attributes[:type] || @attributes[:t], event.type) do |a, e|
if a.is_a? Regexp
# 24: update to matches?
match = a.match(e)
match ? (e == match[0]) : false
else
e.to_s.casecmp(a.to_s).zero?
end
end
].reduce(true, &:&)
end
end
# Event raised when an unknown dispatch is received
class UnknownEvent < RawEvent; end
# Event handler for {UnknownEvent}
class UnknownEventHandler < RawEventHandler; end
end
|
Use 409 to indicate that interface might be outdated | class Projects::RunnerProjectsController < Projects::ApplicationController
before_action :authorize_admin_build!
layout 'project_settings'
def create
@runner = Ci::Runner.find(params[:runner_project][:runner_id])
return head(403) if @runner.is_shared? || @runner.locked?
return head(403) unless current_user.ci_authorized_runners.include?(@runner)
path = runners_path(project)
runner_project = @runner.assign_to(project, current_user)
if runner_project.persisted?
redirect_to path
else
redirect_to path, alert: 'Failed adding runner to project'
end
end
def destroy
runner_project = project.runner_projects.find(params[:id])
runner_project.destroy
redirect_to runners_path(project)
end
end
| class Projects::RunnerProjectsController < Projects::ApplicationController
before_action :authorize_admin_build!
layout 'project_settings'
def create
@runner = Ci::Runner.find(params[:runner_project][:runner_id])
return head(409) if @runner.is_shared? || @runner.locked?
return head(409) unless current_user.ci_authorized_runners.include?(@runner)
path = runners_path(project)
runner_project = @runner.assign_to(project, current_user)
if runner_project.persisted?
redirect_to path
else
redirect_to path, alert: 'Failed adding runner to project'
end
end
def destroy
runner_project = project.runner_projects.find(params[:id])
runner_project.destroy
redirect_to runners_path(project)
end
end
|
Disable auth checks for basic pages | class PagesController < ApplicationController
def home
end
def error
redirect_to root_path if flash.empty?
end
end
| class PagesController < ApplicationController
skip_authorization_check
skip_before_action :authenticate_user!
def home
end
def error
redirect_to root_path if flash.empty?
end
end
|
Fix typo in user activity debug log message | module Users
class ActivityService
def initialize(author, activity)
@author = author.respond_to?(:user) ? author.user : author
@activity = activity
end
def execute
return unless @author && @author.is_a?(User)
record_activity
end
private
def record_activity
Gitlab::UserActivities.record(@author.id)
Rails.logger.debug("Recorded activity: #{@activity} for User ID: #{@author.id} (username: #{@author.username}")
end
end
end
| module Users
class ActivityService
def initialize(author, activity)
@author = author.respond_to?(:user) ? author.user : author
@activity = activity
end
def execute
return unless @author && @author.is_a?(User)
record_activity
end
private
def record_activity
Gitlab::UserActivities.record(@author.id)
Rails.logger.debug("Recorded activity: #{@activity} for User ID: #{@author.id} (username: #{@author.username})")
end
end
end
|
Add pending to feature spec | require 'rails_helper'
feature 'Upload data after creating a new app' do
background do
@token = FactoryGirl.create(:welcome_token)
end
scenario 'upload data and sign in'
visit welcome_path
expect(page).to redirect_to welcome_path(code: @token.code)
expect(page).to have_content 'Taxonomy'
expect(page).to have_content 'Organizations'
# attach_file('ok', File.absolute_path('./fileset/publisher/upload_pic.jpg'))
end
end
end | require 'rails_helper'
feature 'Upload data after creating a new app' do
background do
@token = FactoryGirl.create(:welcome_token)
end
scenario 'upload data and sign in'
pending
visit welcome_path
expect(page).to redirect_to welcome_path(code: @token.code)
expect(page).to have_content 'Taxonomy'
expect(page).to have_content 'Organizations'
# attach_file('ok', File.absolute_path('./fileset/publisher/upload_pic.jpg'))
end
end
end |
Remove trailing slash when getting mirrors | require 'yaml'
require 'yaml/store'
module Braid
class Config
class PathAlreadyInUse < BraidError
end
class MirrorDoesNotExist < BraidError
end
def initialize(config_file = CONFIG_FILE)
@db = YAML::Store.new(config_file)
end
def add_from_options(url, options)
mirror = Mirror.new_from_options(url, options)
add(mirror)
mirror
end
def mirrors
@db.transaction(true) do
@db.roots
end
end
def add(mirror)
@db.transaction do
raise PathAlreadyInUse if @db[mirror.path]
@db[mirror.path] = clean_attributes(mirror.attributes)
end
end
def get(path)
@db.transaction(true) do
if attributes = @db[path]
Mirror.new(path, attributes)
end
end
end
def get!(path)
mirror = get(path)
raise MirrorDoesNotExist unless mirror
mirror
end
def remove(mirror)
@db.transaction do
@db.delete(mirror.path)
end
end
def update(mirror)
@db.transaction do
raise MirrorDoesNotExist unless @db[mirror.path]
@db.delete(mirror)
@db[mirror.path] = clean_attributes(mirror.attributes)
end
end
private
def clean_attributes(hash)
(hash = hash.dup).each { |k,v| hash.delete(k) if v.nil? }
end
end
end
| require 'yaml'
require 'yaml/store'
module Braid
class Config
class PathAlreadyInUse < BraidError
end
class MirrorDoesNotExist < BraidError
end
def initialize(config_file = CONFIG_FILE)
@db = YAML::Store.new(config_file)
end
def add_from_options(url, options)
mirror = Mirror.new_from_options(url, options)
add(mirror)
mirror
end
def mirrors
@db.transaction(true) do
@db.roots
end
end
def add(mirror)
@db.transaction do
raise PathAlreadyInUse if @db[mirror.path]
@db[mirror.path] = clean_attributes(mirror.attributes)
end
end
def get(path)
@db.transaction(true) do
if attributes = @db[path.to_s.sub(/\/$/, '')]
Mirror.new(path, attributes)
end
end
end
def get!(path)
mirror = get(path)
raise MirrorDoesNotExist unless mirror
mirror
end
def remove(mirror)
@db.transaction do
@db.delete(mirror.path)
end
end
def update(mirror)
@db.transaction do
raise MirrorDoesNotExist unless @db[mirror.path]
@db.delete(mirror)
@db[mirror.path] = clean_attributes(mirror.attributes)
end
end
private
def clean_attributes(hash)
(hash = hash.dup).each { |k,v| hash.delete(k) if v.nil? }
end
end
end
|
Replace @@main_class through class instance variable | require 'shellwords'
require 'stringio'
require 'aruba/processes/basic_process'
module Aruba
module Processes
class InProcess < BasicProcess
class FakeKernel
attr_reader :exitstatus
def initialize
@exitstatus = 0
end
def exit(exitstatus)
@exitstatus = exitstatus
end
end
def self.main_class=(main_class)
@@main_class = main_class
end
def initialize(cmd, exit_timeout, io_wait, working_directory)
args = Shellwords.split(cmd)
@argv = args[1..-1]
@stdin = StringIO.new
@stdout = StringIO.new
@stderr = StringIO.new
@kernel = FakeKernel.new
super
end
def run!
raise "You need to call Aruba::InProcess.main_class = YourMainClass" unless @@main_class
Dir.chdir @working_directory do
before_run
@@main_class.new(@argv, @stdin, @stdout, @stderr, @kernel).execute!
after_run
yield self if block_given?
end
end
def stop(reader)
@kernel.exitstatus
end
def stdout
@stdout.string
end
def stderr
@stderr.string
end
end
end
end
| require 'shellwords'
require 'stringio'
require 'aruba/processes/basic_process'
module Aruba
module Processes
class InProcess < BasicProcess
class FakeKernel
attr_reader :exitstatus
def initialize
@exitstatus = 0
end
def exit(exitstatus)
@exitstatus = exitstatus
end
end
class << self
attr_accessor :main_class
end
def initialize(cmd, exit_timeout, io_wait, working_directory)
args = Shellwords.split(cmd)
@argv = args[1..-1]
@stdin = StringIO.new
@stdout = StringIO.new
@stderr = StringIO.new
@kernel = FakeKernel.new
super
end
def run!
raise "You need to call Aruba::InProcess.main_class = YourMainClass" unless self.class.main_class
Dir.chdir @working_directory do
before_run
self.class.main_class.new(@argv, @stdin, @stdout, @stderr, @kernel).execute!
after_run
yield self if block_given?
end
end
def stop(reader)
@kernel.exitstatus
end
def stdout
@stdout.string
end
def stderr
@stderr.string
end
end
end
end
|
Add an explicit check for MRI, don't define TS::Array or TS::Hash for other platforms. | require 'thread_safe/version'
module ThreadSafe
autoload :Cache, 'thread_safe/cache'
if defined?(JRUBY_VERSION)
require 'jruby/synchronized'
# A thread-safe subclass of Array. This version locks
# against the object itself for every method call,
# ensuring only one thread can be reading or writing
# at a time. This includes iteration methods like
# #each.
class Array < ::Array
include JRuby::Synchronized
end
# A thread-safe subclass of Hash. This version locks
# against the object itself for every method call,
# ensuring only one thread can be reading or writing
# at a time. This includes iteration methods like
# #each.
class Hash < ::Hash
include JRuby::Synchronized
end
else
# Because MRI never runs code in parallel, the existing
# non-thread-safe structures should usually work fine.
Array = ::Array
Hash = ::Hash
end
end | require 'thread_safe/version'
module ThreadSafe
autoload :Cache, 'thread_safe/cache'
if defined?(JRUBY_VERSION)
require 'jruby/synchronized'
# A thread-safe subclass of Array. This version locks
# against the object itself for every method call,
# ensuring only one thread can be reading or writing
# at a time. This includes iteration methods like
# #each.
class Array < ::Array
include JRuby::Synchronized
end
# A thread-safe subclass of Hash. This version locks
# against the object itself for every method call,
# ensuring only one thread can be reading or writing
# at a time. This includes iteration methods like
# #each.
class Hash < ::Hash
include JRuby::Synchronized
end
elsif defined?(RUBY_ENGINE) && RUBY_ENGINE == 'ruby'
# Because MRI never runs code in parallel, the existing
# non-thread-safe structures should usually work fine.
Array = ::Array
Hash = ::Hash
end
end |
Add Petite Chez Scheme, version 8.4 | class PetiteChezScheme < Cask
url 'http://www.scheme.com/download/pcsv8.4-ta6osx-1.pkg.tar.gz'
homepage 'http://www.scheme.com/petitechezscheme.html'
version '8.4'
sha256 '3ed7200c3e265d36d03129569a78bfde8aedaea1ec7565d3c7a4daca26389701'
install 'pcsv8.4-ta6osx-1.pkg'
uninstall :pkgutil => 'com.scheme.chezscheme'
end
| |
Add list hosts for given pool | require 'chef/knife'
require 'chef/knife/base_vsphere_command'
require 'rbvmomi'
require 'netaddr'
#list hosts belonging to pool
class Chef::Knife::VsphereHostsList < Chef::Knife::BaseVsphereCommand
banner "knife vsphere hosts list"
get_common_options
option :pool,
:long => "--pool pool",
:short => "-h",
:description => "Target pool"
def traverse_folders_for_pool(folder, poolname)
children = folder.children.find_all
children.each do |child|
if child.class == RbVmomi::VIM::ClusterComputeResource || child.class == RbVmomi::VIM::ComputeResource || child.class == RbVmomi::VIM::ResourcePool
if child.name == poolname then return child end
elsif child.class == RbVmomi::VIM::Folder
pool = traverse_folders_for_pool(child, poolname)
if pool then return pool end
end
end
return false
end
def run
poolname = config[:pool]
if poolname.nil?
show_usage
fatal_exit("You must specify a resource pool or cluster name (see knife vsphere pool list)")
end
vim = get_vim_connection
dc = get_datacenter
folder = dc.hostFolder
pool = traverse_folders_for_pool(folder, poolname) or abort "Pool #{poolname} not found"
hosts=pool.host
unless hosts.nil?
hosts.each do |hostc|
#puts "#{ui.color("#{hostc.class}, :cyan)}:"+"#{hostc.name}")"
#puts "#{ui.color(#{hostc.class}, :cyan)}"
puts "#{ui.color("Host", :cyan)}: #{hostc.name}"
end
end
end
end
| |
Initialize secret key base token from environmental variable | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
LocalSupport::Application.config.secret_token = ENV['MYAPP_SECRET_TOKEN'] || '98f7bafde9e9f5cab69c8e3f25a0f70bf8f97f69594f87fa3caa42e58aa0b1508f1c2efac3123a6975386fc8b9c742df9a844b0024eea553a7eb6808a4a9a02a'
| # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
LocalSupport::Application.config.secret_token = ENV['MYAPP_SECRET_TOKEN'] || '98f7bafde9e9f5cab69c8e3f25a0f70bf8f97f69594f87fa3caa42e58aa0b1508f1c2efac3123a6975386fc8b9c742df9a844b0024eea553a7eb6808a4a9a02a'
LocalSupport::Application.config.secret_key_base = ENV['MYAPP_SECRET_KEY_BASE'] || '20af600cc96db70dedfb207b9608c9ee0b0ad0dcfed270c02c870b0a6d45312821f996cc04aa43e7d0a504d6d6c3b514c07529bad3fb1e7f262f88df615b73b5'
|
Fix LibreOffice cask for older OS X versions | class Libreoffice < Cask
url 'http://download.documentfoundation.org/libreoffice/stable/4.2.3/mac/x86_64/LibreOffice_4.2.3_MacOS_x86-64.dmg'
homepage 'http://www.libreoffice.org/'
version '4.2.3'
sha256 'b54917f4784b17c2c6a0778559f3f9de03480bc4528b181681336d9cc2dba904'
link 'LibreOffice.app'
end
| class Libreoffice < Cask
if Hardware::CPU.is_64_bit? && OS::Mac.version >= '10.8'
url 'http://download.documentfoundation.org/libreoffice/stable/4.2.3/mac/x86_64/LibreOffice_4.2.3_MacOS_x86-64.dmg'
sha256 'b54917f4784b17c2c6a0778559f3f9de03480bc4528b181681336d9cc2dba904'
else
url 'http://download.documentfoundation.org/libreoffice/stable/4.2.3/mac/x86/LibreOffice_4.2.3_MacOS_x86.dmg'
sha256 '6b5bf833824076acf0cdb86b869ba418043295855f1a539fa0d7bf18eba3de13'
end
homepage 'http://www.libreoffice.org/'
version '4.2.3'
link 'LibreOffice.app'
end
|
Disable warming of the asset cache in Spinach tests under CI | require 'spinach/capybara'
require 'capybara/poltergeist'
# Give CI some extra time
timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 90 : 15
Capybara.javascript_driver = :poltergeist
Capybara.register_driver :poltergeist do |app|
Capybara::Poltergeist::Driver.new(app, js_errors: true, timeout: timeout, window_size: [1366, 768])
end
Capybara.default_max_wait_time = timeout
Capybara.ignore_hidden_elements = false
unless ENV['CI'] || ENV['CI_SERVER']
require 'capybara-screenshot/spinach'
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run
end
Spinach.hooks.before_run do
TestEnv.warm_asset_cache
end
| require 'spinach/capybara'
require 'capybara/poltergeist'
# Give CI some extra time
timeout = (ENV['CI'] || ENV['CI_SERVER']) ? 90 : 15
Capybara.javascript_driver = :poltergeist
Capybara.register_driver :poltergeist do |app|
Capybara::Poltergeist::Driver.new(app, js_errors: true, timeout: timeout, window_size: [1366, 768])
end
Capybara.default_max_wait_time = timeout
Capybara.ignore_hidden_elements = false
unless ENV['CI'] || ENV['CI_SERVER']
require 'capybara-screenshot/spinach'
# Keep only the screenshots generated from the last failing test suite
Capybara::Screenshot.prune_strategy = :keep_last_run
end
Spinach.hooks.before_run do
TestEnv.warm_asset_cache unless ENV['CI'] || ENV['CI_SERVER']
end
|
Add an empty line item for new credit invoices. | class CreditInvoicesController < InvoicesController
# Actions
def new
# Allow pre-seeding some parameters
invoice_params = {
:customer_id => current_tenant.company.id,
:state => 'booked',
:value_date => Date.today,
:due_date => Date.today.in(30.days).to_date
}
# Set default parameters
invoice_params.merge!(params[:invoice]) if params[:invoice]
@credit_invoice = CreditInvoice.new(invoice_params)
# Prebuild an empty attachment instance
@credit_invoice.attachments.build
new!
end
def create
@credit_invoice = CreditInvoice.new(params[:credit_invoice])
@credit_invoice.build_booking
create!
end
end
| class CreditInvoicesController < InvoicesController
# Actions
def new
# Allow pre-seeding some parameters
invoice_params = {
:customer_id => current_tenant.company.id,
:state => 'booked',
:value_date => Date.today,
:due_date => Date.today.in(30.days).to_date
}
# Set default parameters
invoice_params.merge!(params[:invoice]) if params[:invoice]
@credit_invoice = CreditInvoice.new(invoice_params)
@credit_invoice.line_items.build(
:times => 1,
:quantity => 'x',
:vat_rate_code => 'vat:full'
)
# Prebuild an empty attachment instance
@credit_invoice.attachments.build
new!
end
def create
@credit_invoice = CreditInvoice.new(params[:credit_invoice])
@credit_invoice.build_booking
create!
end
end
|
Make ErrorDumper to notify Honeybadger | class ErrorDumper
include Callee
UNDEFINED_EXCEPTION = ''.freeze
option :exception, optional: true, default: -> { UNDEFINED_EXCEPTION }
option :file_name, optional: true, default: -> { location.try(:path).to_s }
option :label, optional: true, default: -> { location.try(:label).to_s }
option :line_number, optional: true, default: -> { location.try(:lineno) }
option :occured_at, optional: true, default: -> { DateTime.now }
option :target, optional: true, default: -> { nil }
option :context, optional: true, default: -> { {} }
option(
:message,
optional: true,
default: -> { exception.try(:message) || exception.to_s }
)
def call
Error.create!(
backtrace: backtrace,
context: context,
exception: exception_name,
file_name: file_name,
label: label,
line_number: line_number,
message: message,
occured_at: occured_at,
status: ErrorStatus.pending,
target: target
)
end
def location
exception.try(:locations).try(:first)
end
def backtrace
exception.try(:backtrace) || []
end
def exception_name
return exception.try(:class).try(:name) if exception.is_a?(Exception)
exception.to_s
end
end
| class ErrorDumper
include Callee
UNDEFINED_EXCEPTION = ''.freeze
option :exception, optional: true, default: -> { UNDEFINED_EXCEPTION }
option :file_name, optional: true, default: -> { location.try(:path).to_s }
option :label, optional: true, default: -> { location.try(:label).to_s }
option :line_number, optional: true, default: -> { location.try(:lineno) }
option :occured_at, optional: true, default: -> { DateTime.now }
option :target, optional: true, default: -> { nil }
option :context, optional: true, default: -> { {} }
option(
:message,
optional: true,
default: -> { exception.try(:message) || exception.to_s }
)
def call
persist_error
notify_honeybadger
end
def persist_error
Error.create!(
backtrace: backtrace,
context: context,
exception: exception_name,
file_name: file_name,
label: label,
line_number: line_number,
message: message,
occured_at: occured_at,
status: ErrorStatus.pending,
target: target
)
end
def notify_honeybadger
Honeybadger.notify(exception, error_message: message)
end
def location
exception.try(:locations).try(:first)
end
def backtrace
exception.try(:backtrace) || []
end
def exception_name
return exception.try(:class).try(:name) if exception.is_a?(Exception)
exception.to_s
end
end
|
Put back the redirection route from admin to refinery | ActionController::Routing::Routes.draw do |map|
# NB: Engine routes are loaded FIRST from Rails v2.3 onward.
# These routes are contained within vendor/plugins/engine_name/config/routes.rb
# The priority is based upon order of creation: first created -> highest priority.
map.root :controller => "pages", :action => "home"
map.namespace(:admin, :path_prefix => 'refinery') do |admin|
admin.root :controller => 'dashboard', :action => 'index'
end
# Install the default routes as the lowest priority.
map.connect ':controller/:action/:id'
map.connect ':controller/:action/:id.:format'
map.connect 'refinery/*path', :controller => 'admin/base', :action => 'error_404'
map.connect '*path', :controller => 'application', :action => 'error_404'
end
| ActionController::Routing::Routes.draw do |map|
# NB: Engine routes are loaded FIRST from Rails v2.3 onward.
# These routes are contained within vendor/plugins/engine_name/config/routes.rb
# The priority is based upon order of creation: first created -> highest priority.
map.root :controller => "pages", :action => "home"
map.namespace(:admin, :path_prefix => 'refinery') do |admin|
admin.root :controller => 'dashboard', :action => 'index'
end
# Install the default routes as the lowest priority.
map.connect ':controller/:action/:id'
map.connect ':controller/:action/:id.:format'
map.redirect 'admin/*path', :controller => 'admin/base'
map.connect 'refinery/*path', :controller => 'admin/base', :action => 'error_404'
map.connect '*path', :controller => 'application', :action => 'error_404'
end
|
Add default 'Portfolio' collection for user upon sign up | class UsersController < ApplicationController
def index
@artists = User.where(artist: true)
end
def show
@user = User.find_by(id: params[:id])
@collections = Collection.where(user_id: @user.id)
end
def new
@user = User.new
end
def create
params[:user].merge!(artist: params[:artist])
@user = User.new(user_params)
if @user.save
session[:user_id] = @user.id
flash[:message] = "Your account was saved successfully"
redirect_to artists_path
else
@errors = @user.errors.full_messages
render 'new'
end
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find_by(id: params[:id])
if @user.update_attributes(user_params)
redirect_to @user
else
@errors = @user.errors.full_messages
render 'edit'
end
end
def random_artist
artists = User.where(artist: true)
@user = artists.sample
redirect_to @user
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :avatar, :artist)
end
end
| class UsersController < ApplicationController
def index
@artists = User.where(artist: true)
end
def show
@user = User.find_by(id: params[:id])
@collections = Collection.where(user_id: @user.id)
end
def new
@user = User.new
end
def create
params[:user].merge!(artist: params[:artist])
@user = User.new(user_params)
if @user.save
create_default_collection(@user.id)
session[:user_id] = @user.id
flash[:message] = "Your account was saved successfully"
redirect_to artists_path
else
@errors = @user.errors.full_messages
render 'new'
end
end
def edit
@user = User.find(params[:id])
end
def update
@user = User.find_by(id: params[:id])
if @user.update_attributes(user_params)
redirect_to @user
else
@errors = @user.errors.full_messages
render 'edit'
end
end
def random_artist
artists = User.where(artist: true)
@user = artists.sample
redirect_to @user
end
private
def user_params
params.require(:user).permit(:name, :email, :password, :avatar, :artist)
end
def create_default_collection(user_id)
Collection.create!(user_id: user_id, name: "Portfolio")
end
end
|
Add validation for email and password | class User < ActiveRecord::Base
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
end
| class User < ActiveRecord::Base
validates_presence_of :email, :encrypted_password
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
end
|
Add route for comment creation | require 'json'
post '/questions/:id/comments/new' do
if question = Question.find(params[:id])
comment = Comment.new(content: params[:content])
question.comments << comment
end
if request.xhr?
id = comment.id
content = comment.content
content_type :json
JSON.generate(id: id, content: content)
else
redirect "/questions/#{question.id}"
end
end | |
Check if there's at least one payment in array | # require 'pry'
module QBIntegration
module Service
class PaymentMethod < Base
attr_reader :order
def initialize(config, payload)
super("PaymentMethod", config)
@order = payload[:order]
end
def augury_name
if order.has_key?("credit_cards") && !order["credit_cards"].empty?
order["credit_cards"].first["cc_type"]
elsif order["payments"]
order["payments"].first["payment_method"]
else
"None"
end
end
def qb_name
# NOTE due to bug which might send the mapping as a string. e.g.
#
# "[{\"visa\":\"credit-card\",\"master-card\":\"credit-card\"}]"
#
if config.fetch("quickbooks_payment_method_name").is_a? String
payment_method_name_mapping = JSON.parse(config.fetch("quickbooks_payment_method_name"))
else
payment_method_name_mapping = config.fetch("quickbooks_payment_method_name")
end
lookup_value!(payment_method_name_mapping.first, augury_name)
end
def matching_payment
quickbooks.fetch_by_name(qb_name) ||
(raise Exception.new("No PaymentMethod '#{qb_name}' defined in Quickbooks"))
end
end
end
end
| # require 'pry'
module QBIntegration
module Service
class PaymentMethod < Base
attr_reader :order
def initialize(config, payload)
super("PaymentMethod", config)
@order = payload[:order]
end
def augury_name
if order.has_key?("credit_cards") && !order["credit_cards"].empty?
order["credit_cards"].first["cc_type"]
elsif order["payments"] && order["payments"].first.is_a?(Hash)
order["payments"].first["payment_method"]
else
"None"
end
end
def qb_name
# NOTE due to bug which might send the mapping as a string. e.g.
#
# "[{\"visa\":\"credit-card\",\"master-card\":\"credit-card\"}]"
#
if config.fetch("quickbooks_payment_method_name").is_a? String
payment_method_name_mapping = JSON.parse(config.fetch("quickbooks_payment_method_name"))
else
payment_method_name_mapping = config.fetch("quickbooks_payment_method_name")
end
lookup_value!(payment_method_name_mapping.first, augury_name)
end
def matching_payment
quickbooks.fetch_by_name(qb_name) ||
(raise Exception.new("No PaymentMethod '#{qb_name}' defined in Quickbooks"))
end
end
end
end
|
Check in for Sagar to fix hamburger menu | class ApplicationGroupsController < ApplicationController
def index
@application_groups = ApplicationGroup.page(params[:page]).per(15)
respond_to do |format|
format.html # index.html.erb
format.json { render json: @employers }
end
end
def show
@application_group = ApplicationGroup.find(params[:id])
end
def edit
@edit_form = EditApplicationGroupForm.new(params)
end
def update
@application_group = ApplicationGroup.find(params[:id])
people_to_remove.each { |p| @application_group.people.delete(p) }
@application_group.save
end
private
def people_to_remove
ppl_hash = params[:edit_application_group_form].fetch(:people_attributes) { {} }
ids = []
ppl_hash.each_pair do |index, person|
ids << person[:person_id] if(person[:remove_selected] == "1")
end
@application_group.people.select { |p| ids.include?(p._id.to_s) }
end
end
| class ApplicationGroupsController < ApplicationController
def index
@application_groups = ApplicationGroup.page(params[:page]).per(15)
respond_to do |format|
format.html # index.html.erb
format.json { render json: @employers }
end
end
def show
@application_group = ApplicationGroup.find(params[:id])
@application_group = ApplicationGroup.first
@primary_applicant = @application_group.primary_applicant
end
def edit
@edit_form = EditApplicationGroupForm.new(params)
end
def update
@application_group = ApplicationGroup.find(params[:id])
people_to_remove.each { |p| @application_group.people.delete(p) }
@application_group.save
end
private
def people_to_remove
ppl_hash = params[:edit_application_group_form].fetch(:people_attributes) { {} }
ids = []
ppl_hash.each_pair do |index, person|
ids << person[:person_id] if(person[:remove_selected] == "1")
end
@application_group.people.select { |p| ids.include?(p._id.to_s) }
end
end
|
Use minitap for test output. | abort "Remove -realms from RUBYOPT before running these tests." if ENV['RUBYOPT'].index('-realms')
$realms_root = File.expand_path(File.dirname(__FILE__) + '/..')
#ENV['XDG_CONFIG_HOME'] = File.join($project_root, "tmp/config")
ENV['XDG_CACHE_HOME'] = File.join($realms_root, "tmp/cache")
ENV['RUBY_LIBRARY'] = File.join($realms_root, "spec/fixtures/projects/*")
# Link tmp/projects to spec/fixtures/projects.
require 'fileutils'
FileUtils.ln_sf(File.join($realms_root, 'spec/fixtures/projects'), 'tmp/projects')
# test from within tmp directoy
Dir.chdir('tmp')
# Make sure we use local version of files.
$:.unshift('../lib')
require 'realms'
#require 'realms/shell'
# pre-start checks
raise unless Realms::Library::Utils.tmpdir == File.join($realms_root, "tmp/cache/ruby")
# include Realms at top-level for convenience
include Realms
require 'minitest/autorun'
| abort "Remove -realms from RUBYOPT before running these tests." if ENV['RUBYOPT'].index('-realms')
$realms_root = File.expand_path(File.dirname(__FILE__) + '/..')
#ENV['XDG_CONFIG_HOME'] = File.join($project_root, "tmp/config")
ENV['XDG_CACHE_HOME'] = File.join($realms_root, "tmp/cache")
ENV['RUBY_LIBRARY'] = File.join($realms_root, "spec/fixtures/projects/*")
# Link tmp/projects to spec/fixtures/projects.
require 'fileutils'
FileUtils.ln_sf(File.join($realms_root, 'spec/fixtures/projects'), 'tmp/projects')
# test from within tmp directoy
Dir.chdir('tmp')
# Make sure we use local version of files.
$:.unshift('../lib')
require 'minitap'
require 'realms'
#require 'realms/shell'
# pre-start checks
raise unless Realms::Library::Utils.tmpdir == File.join($realms_root, "tmp/cache/ruby")
# include Realms at top-level for convenience
include Realms
require 'minitest/autorun'
MiniTest::Unit.runner = MiniTest::TapY.new
|
Add only to before_action in the product controller | class ProductsController < ApplicationController
before_action :product_find
def index
@users = Product.all
end
def show
render json: {product: @product}
end
def new
@product = Product.new
end
def create
@product = Product.new(id: params[:id], shopstyle_id: params[:shopstyle_id])
if @product.save
render json: { product: @product }, status: :created
else
render json: @product.errors.full_messages, status: :unprocessable_entity
end
end
def destroy
@product.destroy
end
private
def product_find
@product = Product.find(params[:id])
end
end
| class ProductsController < ApplicationController
before_action :product_find, only: [:show, :edit, :destroy]
def index
@products = Product.all
render json: @products
end
def show
render json: {product: @product}
end
def new
@product = Product.new
end
def create
@product = Product.new(id: params[:id], shopstyle_id: params[:shopstyle_id])
if @product.save
render json: { product: @product }, status: :created
else
render json: @product.errors.full_messages, status: :unprocessable_entity
end
end
def destroy
@product.destroy
end
private
def product_find
@product = Product.find(params[:id])
end
end
|
Fix CSVReader to handle the latest browscap.csv files. | # encoding: BINARY
if RUBY_VERSION.to_f < 1.9
require 'fastercsv'
else
require 'csv'
end
module Browscap
module CSVReader
CSV_ENGINE = if defined?(FasterCSV)
FasterCSV
else
CSV
end
class << self
include Reader
def load(file)
csv = CSV_ENGINE.open(file, 'r')
# skip header
2.times { csv.shift }
headers = csv.shift
entries = Hash.new
csv.each do |l|
entry = UserAgent.new
headers.each_with_index do |v, i|
entry[v] = case l[i]
when 'false'
false
when 'true'
true
when /^\d+$/
l[i].to_i
else
l[i]
end
end
entry.user_agent.sub!(/^\[(.+)\]$/, '\1')
entry.pattern = pattern_to_regexp(entry.user_agent)
if entries[entry.parent]
entry.merge!(entries[entry.parent])
end
entries[entry.user_agent] = entry
end
entries
end
end
end
end
| # encoding: BINARY
if RUBY_VERSION.to_f < 1.9
require 'fastercsv'
else
require 'csv'
end
module Browscap
module CSVReader
CSV_ENGINE = if defined?(FasterCSV)
FasterCSV
else
CSV
end
class << self
include Reader
def load(file)
csv = CSV_ENGINE.open(file, 'rb')
# skip header
2.times { csv.shift }
headers = csv.shift
entries = Hash.new
csv.each do |l|
entry = UserAgent.new
headers.each_with_index do |v, i|
entry[v] = case l[i]
when 'false'
false
when 'true'
true
when /^\d+$/
l[i].to_i
else
l[i]
end
end
entry.user_agent = if entry.user_agent
entry.user_agent.sub!(/^\[(.+)\]$/, '\1')
elsif entry.browser
entry.browser
end
entry.pattern = pattern_to_regexp(entry.user_agent.to_s)
if entries[entry.parent]
entry.merge!(entries[entry.parent])
end
entries[entry.user_agent] = entry
end
entries
end
end
end
end
|
Test "text" value of response hash | require 'json'
require 'uri'
require 'net/http'
#require_relative './dns'
get '/' do
erb :index
end
get '/oauth' do
auth_code = params['code']
client_id = "19358800983.40999931552"
client_secret = ENV['client_secret']
uri = URI("https://slack.com/api/oauth.access")
params = {:code => auth_code, :client_id=>client_id, :client_secret => client_secret}
uri.query = URI.encode_www_form(params)
res = Net::HTTP.get_response(uri)
#TODO: add a success view
"Success!" if res.is_a?(Net::HTTPSuccess)
end
post '/dns' do
# A, CNAME, AAAA, MX, NS
p params.to_json
respond_message "DNS Lookup"
end
post '/domain/' do
# is domain taken or not, suggest to use whois if not
respond_message "domain"
end
post '/whois' do
respond_message "whois"
end
post '/ping' do
respond_message "ping"
end
post '/net' do
respond_message "Help & feedback"
end
def respond_message message
content_type :json
{:text => message, :response_type => "in_channel"}.to_json
end
| require 'json'
require 'uri'
require 'net/http'
#require_relative './dns'
get '/' do
erb :index
end
get '/oauth' do
auth_code = params['code']
client_id = "19358800983.40999931552"
client_secret = ENV['client_secret']
uri = URI("https://slack.com/api/oauth.access")
params = {:code => auth_code, :client_id=>client_id, :client_secret => client_secret}
uri.query = URI.encode_www_form(params)
res = Net::HTTP.get_response(uri)
#TODO: add a success view
"Success!" if res.is_a?(Net::HTTPSuccess)
end
post '/dns' do
# A, CNAME, AAAA, MX, NS
p params["text"]
respond_message "DNS Lookup"
end
post '/domain/' do
# is domain taken or not, suggest to use whois if not
respond_message "domain"
end
post '/whois' do
respond_message "whois"
end
post '/ping' do
respond_message "ping"
end
post '/net' do
respond_message "Help & feedback"
end
def respond_message message
content_type :json
{:text => message, :response_type => "in_channel"}.to_json
end
|
Allow to pass options in nested_form method | module FormObjects
module Nesting
def nested_form(attribute, form)
attribute(attribute, form)
validates_associated(attribute)
define_nested_writer_method(attribute)
end
def define_nested_writer_method(method_name)
alias_method :"#{method_name}_attributes=", :"#{method_name}="
end
end
end
| module FormObjects
module Nesting
def nested_form(attribute, form, options = {})
attribute(attribute, form, options)
validates_associated(attribute)
define_nested_writer_method(attribute)
end
def define_nested_writer_method(method_name)
alias_method :"#{method_name}_attributes=", :"#{method_name}="
end
end
end
|
Add test for NOT finding PT story number | require 'git_tracker/commit_message'
require 'commit_message_helper'
describe GitTracker::CommitMessage do
include CommitMessageHelper
it "requires path to the temporary commit message file" do
-> { GitTracker::CommitMessage.new }.should raise_error ArgumentError
end
describe "#contains?" do
subject { described_class.new(file) }
let(:file) { "COMMIT_EDITMSG" }
before do
File.stub(:read).with(file) { example_commit_message("[#8675309]") }
end
context "commit message contains the special Pivotal Tracker story syntax" do
it { subject.should be_contains("[#8675309]") }
end
end
end
| require 'git_tracker/commit_message'
require 'commit_message_helper'
describe GitTracker::CommitMessage do
include CommitMessageHelper
it "requires path to the temporary commit message file" do
-> { GitTracker::CommitMessage.new }.should raise_error ArgumentError
end
describe "#contains?" do
subject { described_class.new(file) }
let(:file) { "COMMIT_EDITMSG" }
before do
File.stub(:read).with(file) { commit_message_text }
end
context "commit message contains the special Pivotal Tracker story syntax" do
let(:commit_message_text) { example_commit_message("[#8675309]") }
it { subject.should be_contains("[#8675309]") }
end
context "commit message doesn't contain the special Pivotal Tracker story syntax" do
let(:commit_message_text) { example_commit_message("[#not_it]") }
it { subject.should_not be_contains("[#8675309]") }
end
end
end
|
Remove useless variables assignments for @houndci | require "rake"
describe "trailmix:schedule_all_prompts", sidekiq: :inline do
before do
load "tasks/trailmix.rake"
Rake::Task.define_task(:environment)
end
it "sends prompts to all users that would like a prompt" do
Timecop.freeze(Time.utc(2014, 1, 1, 8)) do # 8AM UTC
utc_7am = create(:user, time_zone: "UTC", prompt_delivery_hour: 7)
utc_8am = create(:user, time_zone: "UTC", prompt_delivery_hour: 8)
arz_1am = create(:user, time_zone: "Arizona", prompt_delivery_hour: 1)
utc_9am = create(:user, time_zone: "UTC", prompt_delivery_hour: 9)
Rake::Task["trailmix:schedule_all_prompts"].invoke
expect(emailed_addresses).to eq([utc_8am.email, arz_1am.email])
end
end
def emailed_addresses
ActionMailer::Base.deliveries.map(&:to).flatten
end
end
| require "rake"
describe "trailmix:schedule_all_prompts", sidekiq: :inline do
before do
load "tasks/trailmix.rake"
Rake::Task.define_task(:environment)
end
it "sends prompts to all users that would like a prompt" do
Timecop.freeze(Time.utc(2014, 1, 1, 8)) do # 8AM UTC
create(:user, time_zone: "UTC", prompt_delivery_hour: 7)
utc_8am = create(:user, time_zone: "UTC", prompt_delivery_hour: 8)
arz_1am = create(:user, time_zone: "Arizona", prompt_delivery_hour: 1)
create(:user, time_zone: "UTC", prompt_delivery_hour: 9)
Rake::Task["trailmix:schedule_all_prompts"].invoke
expect(emailed_addresses).to eq([utc_8am.email, arz_1am.email])
end
end
def emailed_addresses
ActionMailer::Base.deliveries.map(&:to).flatten
end
end
|
Add news items controller specs. | require "spec_helper"
module Refinery
module News
describe ItemsController do
let!(:item) { FactoryGirl.create(:news_item) }
let(:page) { Refinery::Page.where(:link_url => "/news").first }
describe "#index" do
it "assigns items and page" do
get :index
assigns(:items).first.should eq(item)
assigns(:page).should eq(page)
end
it "renders 'index' template" do
get :index
response.should render_template(:index)
end
end
describe "#show" do
it "assigns item and page" do
get :show, :id => item.id
assigns(:item).should eq(item)
assigns(:page).should eq(page)
end
it "renders 'show' template" do
get :show, :id => item.id
response.should render_template(:show)
end
end
describe "#archive" do
context "when month is present" do
it "assigns archive_date and items" do
Refinery::News::Item.stub_chain(:live, :by_archive, :page).and_return(item)
get :archive, :month => 05, :year => 1999
assigns(:archive_date).should eq(Time.parse("05/1999"))
assigns(:items).should eq(item)
end
end
context "when month isnt present" do
it "assigns archive_date and items" do
Refinery::News::Item.stub_chain(:live, :by_year, :page).and_return(item)
get :archive, :year => 1999
assigns(:archive_date).should eq(Time.parse("01/1999"))
assigns(:items).should eq(item)
end
end
it "renders 'archive' template" do
get :archive, :year => 1999
response.should render_template(:archive)
end
it "assigns page" do
get :archive, :year => 1999
assigns(:page).should eq(page)
end
end
end
end
end
| |
Fix the bug who was raise cause the assigns didnt existed anymore. | require 'spec_helper'
shared_examples "customer actions" do
describe "show" do
it "the customer" do
get :show, :id => @customer.id
response.should render_template('show')
assigns(:customer).should_not be_nil
assigns(:customer).should be_an_instance_of(Customer)
end
it "the open debit invoices" do
get :show, :id => @customer.id
response.should render_template('show')
assigns(:open_debit_invoices).should_not be_empty
assigns(:open_debit_invoices).count.should be(3)
assigns(:open_debit_invoices).first.should be_an_instance_of(DebitInvoice)
end
it "the paid debit invoices" do
get :show, :id => @customer.id
response.should render_template('show')
assigns(:paid_debit_invoices).should_not be_empty
assigns(:paid_debit_invoices).count.should be(3)
assigns(:paid_debit_invoices).first.should be_an_instance_of(DebitInvoice)
end
end
end
describe CustomersController do
before(:all) do
@customer = Factory.create(:customer)
(0..2).each do
Factory.create(:open_debit_invoice, :customer => @customer)
Factory.create(:paid_debit_invoice, :customer => @customer)
end
end
context "as admin" do
login_admin
it_behaves_like "customer actions"
end
context "as accountant" do
login_accountant
it_behaves_like "customer actions"
end
end
| require 'spec_helper'
shared_examples "customer actions" do
describe "show" do
it "the customer" do
get :show, :id => 20000
response.should render_template('show')
assigns(:customer).should_not be_nil
assigns(:customer).should be_an_instance_of(Customer)
end
end
end
describe CustomersController do
before(:all) do
Factory.create(:customer, :id => 20000)
end
context "as admin" do
login_admin
it_behaves_like "customer actions"
end
context "as accountant" do
login_accountant
it_behaves_like "customer actions"
end
end
|
Revert "Set up temporary redirect" | Rails.application.routes.draw do
get "/government/uploads/system/uploads/attachment_data/file/724982/The_future_relationship_between_the_United_Kingdom_and_the_European_Union_WEB_VERSION.pdf" => redirect("/government/uploads/system/uploads/attachment_data/file/725288/The_future_relationship_between_the_United_Kingdom_and_the_European_Union.pdf")
resources :assets, only: %i(show create update destroy) do
member do
post :restore
end
end
resources :whitehall_assets, only: %i(create)
get '/whitehall_assets/*path' => 'whitehall_assets#show'
get "/media/:id/:filename" => "media#download", :constraints => { filename: /.*/ }
get "/government/uploads/*path" => "whitehall_media#download"
if AssetManager.s3.fake?
mount Rack::File.new(AssetManager.fake_s3.root), at: AssetManager.fake_s3.path_prefix, as: 'fake_s3'
end
get "/healthcheck", to: "healthcheck#check"
end
| Rails.application.routes.draw do
resources :assets, only: %i(show create update destroy) do
member do
post :restore
end
end
resources :whitehall_assets, only: %i(create)
get '/whitehall_assets/*path' => 'whitehall_assets#show'
get "/media/:id/:filename" => "media#download", :constraints => { filename: /.*/ }
get "/government/uploads/*path" => "whitehall_media#download"
if AssetManager.s3.fake?
mount Rack::File.new(AssetManager.fake_s3.root), at: AssetManager.fake_s3.path_prefix, as: 'fake_s3'
end
get "/healthcheck", to: "healthcheck#check"
end
|
Add age column to Character | class CreateCharacters < ActiveRecord::Migration
def change
create_table :characters do |t|
t.string :char_id
t.string :name
t.text :description
end
end
end
| class CreateCharacters < ActiveRecord::Migration
def change
create_table :characters do |t|
t.string :char_id
t.string :name
t.integer :age
t.text :description
end
end
end
|
Document the options for date filter | module Mutations
class DateFilter < InputFilter
@default_options = {
:nils => false, # true allows an explicit nil to be valid. Overrides any other options
}
def filter(data)
# Handle nil case
if data.nil?
return [nil, nil] if options[:nils]
return [nil, :nils]
end
begin
if not data.is_a?(Date)
if options[:format]
actual_date = Date.strptime(data, options[:format])
else
actual_date = Date.parse(data)
end
else
actual_date = data
end
rescue ArgumentError
return [nil, :date]
end
# Ok, its a valid date, check if it falls within the range
if options[:after]
if actual_date <= options[:after]
return [nil, :after]
end
end
if options[:before]
if actual_date >= options[:before]
return [nil, :before]
end
end
# We win, it's valid!
[actual_date, nil]
end
end
end
| module Mutations
class DateFilter < InputFilter
@default_options = {
:nils => false, # true allows an explicit nil to be valid. Overrides any other options
:format => nil, # If nil, Date.parse will be used for coercsion. If something like "%Y-%m-%d", Date.strptime is used
:after => nil, # A date object, representing the minimum date allowed, inclusive
:before => nil # A date object, representing the maximum date allowed, inclusive
}
def filter(data)
# Handle nil case
if data.nil?
return [nil, nil] if options[:nils]
return [nil, :nils]
end
begin
if !data.is_a?(Date)
if options[:format]
actual_date = Date.strptime(data, options[:format])
else
actual_date = Date.parse(data)
end
else
actual_date = data
end
rescue ArgumentError
return [nil, :date]
end
# Ok, its a valid date, check if it falls within the range
if options[:after]
if actual_date <= options[:after]
return [nil, :after]
end
end
if options[:before]
if actual_date >= options[:before]
return [nil, :before]
end
end
# We win, it's valid!
[actual_date, nil]
end
end
end
|
Change generated FGRs to match Jadu XSD. i.e, /\d{10}00/ | require 'json'
class FeeGroupReference < Struct.new(:postcode)
def status
error? ? 500 : 201
end
def payload
body.to_json
end
def body
error? ? error_body : success_body
end
def success_body
{
"fgr" => SecureRandom.hex(8),
"ETOfficeCode" => 22,
"ETOfficeName" => "Birmingham",
"ETOfficeAddress" => "Centre City Tower, 57 Hill Street, Birmingham B5 4UU",
"ETOfficeTelephone" => "0121 600 7780"
}
end
def error_body
{
"status" => "error",
"errorCode" => 1001,
"errorDescription" => "Unable to connect to ETFees database"
}
end
private def error?
postcode == 'ER0 0RR'
end
end
| require 'json'
class FeeGroupReference < Struct.new(:postcode)
def status
error? ? 500 : 201
end
def payload
body.to_json
end
def body
error? ? error_body : success_body
end
def success_body
{
"fgr" => fee_group_reference,
"ETOfficeCode" => 22,
"ETOfficeName" => "Birmingham",
"ETOfficeAddress" => "Centre City Tower, 57 Hill Street, Birmingham B5 4UU",
"ETOfficeTelephone" => "0121 600 7780"
}
end
def error_body
{
"status" => "error",
"errorCode" => 1001,
"errorDescription" => "Unable to connect to ETFees database"
}
end
private
def error?
postcode == 'ER0 0RR'
end
def fee_group_reference
"%010d00" % rand(9999999999)
end
end
|
Add back in empty migration methods | # This has now been replaced by a newer migration: 20180727140643_remove_change_note_v2
# Remove a public change note (dated 20 July 2018) for "VAT Notice 708: buildings and construction"
# Ticket: https://govuk.zendesk.com/agent/tickets/2927623
# Page: https://www.gov.uk/government/publications/vat-notice-708-buildings-and-construction
#
# Prior steps:
# Queried `Document` for the `content_id: "5f623c6e-7631-11e4-a3cb-005056011aef"`
| # This has now been replaced by a newer migration: 20180727140643_remove_change_note_v2
# Remove a public change note (dated 20 July 2018) for "VAT Notice 708: buildings and construction"
# Ticket: https://govuk.zendesk.com/agent/tickets/2927623
# Page: https://www.gov.uk/government/publications/vat-notice-708-buildings-and-construction
#
# Prior steps:
# Queried `Document` for the `content_id: "5f623c6e-7631-11e4-a3cb-005056011aef"`
class RemoveChangeNote < ActiveRecord::Migration[5.1]
def up
# This has now been replaced by a newer migration: 20180727140643_remove_change_note_v2
end
def down
# This migration is not reversible
end
end
|
Add spec for the handling of puts in step definitions. | require 'spec_helper'
require 'cucumber/formatter/spec_helper'
require 'cucumber/formatter/pretty'
module Cucumber
module RbSupport
describe RbWorld do
extend Cucumber::Formatter::SpecHelperDsl
include Cucumber::Formatter::SpecHelper
describe 'Handling puts in step definitions' do
before(:each) do
Cucumber::Term::ANSIColor.coloring = false
@out = StringIO.new
@formatter = Cucumber::Formatter::Pretty.new(runtime, @out, {})
run_defined_feature
end
describe 'when modifying the printed variable after the call to puts' do
define_feature <<-FEATURE
Feature: Banana party
Scenario: Monkey eats banana
When puts is called twice for the same variable
FEATURE
define_steps do
When(/^puts is called twice for the same variable$/) do
foo = 'a'
puts foo
foo.upcase!
puts foo
end
end
it 'prints the variable value at the time puts was called' do
expect( @out.string ).to include <<OUTPUT
When puts is called twice for the same variable
a
A
OUTPUT
end
end
end
end
end
end
| |
Delete an unexisted key does not raise an error | shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [:foo]
expect(subject.restore uuid).to eq [:foo]
end
end
end
| shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
it "delete an unexisted key does not raise an error" do
expect { subject.delete "does not exist" }.not_to raise_error
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [1,2,3]
expect(subject.restore uuid).to eq [1,2,3]
end
end
end
|
Remove more duplication from API application controller | class Api::ApplicationController < ApplicationController
skip_before_action :verify_authenticity_token
before_action :check_api_key
rescue_from ActiveRecord::RecordNotFound do |e|
render json: { message: e.message }, status: :not_found
end
rescue_from ActiveRecord::RecordInvalid do |e|
render json: { message: e.message }, status: :unprocessable_entity
end
private
def max_page
1000
end
def check_api_key
return true if params[:api_key].nil?
render :json => error_message, :status => :bad_request unless valid_api_key_present?
end
def require_api_key
render :json => error_message, :status => :bad_request unless valid_api_key_present?
end
def valid_api_key_present?
params[:api_key].present? && current_api_key
end
def current_api_key
return nil if params[:api_key].blank?
@current_api_key ||= ApiKey.active.find_by_access_token(params[:api_key])
end
def current_user
current_api_key.try(:user)
end
def error_message
{ error: "Error 403, you don't have permissions for this operation." }
end
def es_query(klass, query, filters)
klass.search(query, filters: filters,
sort: format_sort,
order: format_order, api: true).paginate(page: page_number, per_page: per_page_number)
end
end
| class Api::ApplicationController < ApplicationController
skip_before_action :verify_authenticity_token
before_action :check_api_key
rescue_from ActiveRecord::RecordNotFound do |e|
render json: { message: e.message }, status: :not_found
end
rescue_from ActiveRecord::RecordInvalid do |e|
render json: { message: e.message }, status: :unprocessable_entity
end
private
def max_page
1000
end
def check_api_key
return true if params[:api_key].nil?
require_api_key
end
def require_api_key
render json: { error: "Error 403, you don't have permissions for this operation." }, status: :bad_request unless valid_api_key_present?
end
def valid_api_key_present?
params[:api_key].present? && current_api_key
end
def current_api_key
return nil if params[:api_key].blank?
@current_api_key ||= ApiKey.active.find_by_access_token(params[:api_key])
end
def current_user
current_api_key.try(:user)
end
def es_query(klass, query, filters)
klass.search(query, filters: filters,
sort: format_sort,
order: format_order, api: true).paginate(page: page_number, per_page: per_page_number)
end
end
|
Set default :pty value to false | set :scm, :git
set :branch, :master
set :deploy_to, "/var/www/#{fetch(:application)}"
set :tmp_dir, "/tmp"
set :default_env, {}
set :keep_releases, 5
set :format, :pretty
set :log_level, :debug
set :pty, true
| set :scm, :git
set :branch, :master
set :deploy_to, "/var/www/#{fetch(:application)}"
set :tmp_dir, "/tmp"
set :default_env, {}
set :keep_releases, 5
set :format, :pretty
set :log_level, :debug
set :pty, false
|
Add sorting to category & album listing | class CategoriesController < ApplicationController
respond_to :html, :json, :js
load_and_authorize_resource find_by: :slug
def index
@categories = @categories.roots
respond_with(@categories)
end
def new
respond_with(@category)
end
def show
@child_categories = @category.children
@category_albums = @category.albums
respond_with(@category)
end
def edit
respond_with(@category)
end
def create
@category.parent = Category.find_by_slug(params[:category_id])
@category.save
respond_with(@category)
end
def update
@category.update_attributes(params[:category])
respond_with(@category)
end
def destroy
@category.destroy
respond_with(@category)
end
end | class CategoriesController < ApplicationController
respond_to :html, :json, :js
load_and_authorize_resource find_by: :slug
def index
@categories = @categories.roots.asc(:title)
respond_with(@categories)
end
def new
respond_with(@category)
end
def show
@child_categories = @category.children.asc(:title)
@category_albums = @category.albums.asc(:title)
respond_with(@category)
end
def edit
respond_with(@category)
end
def create
@category.parent = Category.find_by_slug(params[:category_id])
@category.save
respond_with(@category)
end
def update
@category.update_attributes(params[:category])
respond_with(@category)
end
def destroy
@category.destroy
respond_with(@category)
end
end |
Update React Native documentation (0.36) | module Docs
class ReactNative < React
self.name = 'React Native'
self.slug = 'react_native'
self.type = 'react'
self.release = '0.35'
self.base_url = 'https://facebook.github.io/react-native/docs/'
self.root_path = 'getting-started.html'
self.links = {
home: 'https://facebook.github.io/react-native/',
code: 'https://github.com/facebook/react-native'
}
html_filters.push 'react_native/clean_html'
options[:root_title] = 'React Native Documentation'
options[:only_patterns] = nil
options[:skip_patterns] = [/\Asample\-/]
options[:skip] = %w(
videos.html
transforms.html
troubleshooting.html
more-resources.html
)
options[:fix_urls] = ->(url) {
url.sub! 'docs/docs', 'docs'
url
}
options[:attribution] = <<-HTML
© 2016 Facebook Inc.<br>
Licensed under the Creative Commons Attribution 4.0 International Public License.
HTML
end
end
| module Docs
class ReactNative < React
self.name = 'React Native'
self.slug = 'react_native'
self.type = 'react'
self.release = '0.36'
self.base_url = 'https://facebook.github.io/react-native/docs/'
self.root_path = 'getting-started.html'
self.links = {
home: 'https://facebook.github.io/react-native/',
code: 'https://github.com/facebook/react-native'
}
html_filters.push 'react_native/clean_html'
options[:root_title] = 'React Native Documentation'
options[:only_patterns] = nil
options[:skip_patterns] = [/\Asample\-/]
options[:skip] = %w(
videos.html
transforms.html
troubleshooting.html
more-resources.html
)
options[:fix_urls] = ->(url) {
url.sub! 'docs/docs', 'docs'
url
}
options[:attribution] = <<-HTML
© 2016 Facebook Inc.<br>
Licensed under the Creative Commons Attribution 4.0 International Public License.
HTML
end
end
|
Add a tool to process logs looking for frequency of method calls | #!/usr/bin/env ruby
RAILS_ROOT = File.expand_path(File.join(__dir__, %w(.. ..)))
require 'manageiq-gems-pending'
require 'miq_logger_processor'
logfile = ARGV.shift if ARGV[0] && File.file?(ARGV[0])
logfile ||= File.join(RAILS_ROOT, "log/evm.log")
logfile = File.expand_path(logfile)
puts "Gathering method calls..."
method_call_hash = MiqLoggerProcessor.new(logfile).each_with_object({}) do |line, hash|
next unless line.fq_method
hash[line.fq_method] ||= 0
hash[line.fq_method] += 1
end
require 'pp'
puts method_call_hash.sort_by { |_key, value| value }.reverse.to_h.pretty_inspect
| |
Remove unneeded player query from invites | class Tournaments::InvitesController < ApplicationController
before_filter :authenticate_user!
before_filter :find_invite_for_tournament, :only => [:show, :update]
before_filter :find_tournament_with_current_user, :only => [:new, :create]
layout 'tournament_title', :only => [:new, :create]
def show
end
def update
if @invite.update_attributes(:user => current_user)
redirect_to tournament_path(@tournament)
else
render :show
end
end
def new
@invite = @tournament.invites.build
end
def create
@invite = @tournament.invites.build(params.require(:invite).permit(:email))
@invite.owner = current_user
if @invite.save
Notifications.tournament_invitation(@invite).deliver_now
redirect_to tournament_path(@tournament),
:notice => t('tournaments.invites.create.success', :email => @invite.email)
else
render :new
end
end
private
def find_invite_for_tournament
@tournament = Tournament.friendly.find(params[:tournament_id])
@invite = @tournament.invites.available.find_by_code!(params[:id])
end
def find_tournament_with_current_user
@tournament = Tournament.participant(current_user).friendly.find(params[:tournament_id])
@player = @tournament.players.active.find_by!(:user_id => current_user)
end
end
| class Tournaments::InvitesController < ApplicationController
before_filter :authenticate_user!
before_filter :find_invite_for_tournament, :only => [:show, :update]
before_filter :find_tournament, :only => [:new, :create]
layout 'tournament_title', :only => [:new, :create]
def show
end
def update
if @invite.update_attributes(:user => current_user)
redirect_to tournament_path(@tournament)
else
render :show
end
end
def new
@invite = @tournament.invites.build
end
def create
@invite = @tournament.invites.build(params.require(:invite).permit(:email))
@invite.owner = current_user
if @invite.save
Notifications.tournament_invitation(@invite).deliver_now
redirect_to tournament_path(@tournament),
:notice => t('tournaments.invites.create.success', :email => @invite.email)
else
render :new
end
end
private
def find_invite_for_tournament
@tournament = Tournament.friendly.find(params[:tournament_id])
@invite = @tournament.invites.available.find_by_code!(params[:id])
end
def find_tournament
@tournament = Tournament.participant(current_user).friendly.find(params[:tournament_id])
end
end
|
Make Tfl widget use async HTTP | require 'yajl'
require 'httparty'
module Sonia
module Widgets
class Tfl < Sonia::Widget
def initial_push
fetch_data
EventMachine::add_periodic_timer(150) { fetch_data }
end
def format_lines(line)
{
:status_requested => line["status_requested"],
:id => line["id"],
:name => line["name"],
:status => line["status"],
:messages => line["messages"].join("\n")
}
end
private
def fetch_data
lines = Yajl::Parser.parse(HTTParty.get(config[:url]).to_s)["response"]["lines"].map do |line|
format_lines(line)
end
push lines
end
end
end
end
| require 'yajl'
require 'em-http'
module Sonia
module Widgets
class Tfl < Sonia::Widget
def initial_push
fetch_data
EventMachine::add_periodic_timer(150) { fetch_data }
end
def format_lines(line)
{
:status_requested => line["status_requested"],
:id => line["id"],
:name => line["name"],
:status => line["status"],
:messages => line["messages"].join("\n")
}
end
private
def fetch_data
http = EventMachine::HttpRequest.new(config[:url]).get
http.callback {
lines = Yajl::Parser.parse(http.response)["response"]["lines"].map do |line|
format_lines(line)
end
push lines
}
end
end
end
end
|
Update AWS SDK to v3 | name 'aws'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache-2.0'
description 'Provides resources for managing AWS resources'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '7.2.2'
%w(ubuntu debian centos redhat amazon scientific fedora oracle freebsd windows suse opensuse opensuseleap).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/aws'
issues_url 'https://github.com/chef-cookbooks/aws/issues'
chef_version '>= 12.9' if respond_to?(:chef_version)
gem 'aws-sdk', '~> 2'
| name 'aws'
maintainer 'Chef Software, Inc.'
maintainer_email 'cookbooks@chef.io'
license 'Apache-2.0'
description 'Provides resources for managing AWS resources'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '7.2.2'
%w(ubuntu debian centos redhat amazon scientific fedora oracle freebsd windows suse opensuse opensuseleap).each do |os|
supports os
end
source_url 'https://github.com/chef-cookbooks/aws'
issues_url 'https://github.com/chef-cookbooks/aws/issues'
chef_version '>= 12.9' if respond_to?(:chef_version)
gem 'aws-sdk', '~> 3'
|
Fix "ERROR: Chef::Exceptions::InvalidCookbookVersion: '6' does not match 'x.y.z' or 'x.y'" | name "cernvm-fs"
maintainer "GSI, HPC Department"
maintainer_email "hpc@gsi.de"
license "Apache 2.0"
description "Deploy and configure CernVM-FS clients and servers."
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.3.0"
depends "sys", '>= 1.51.0'
depends "apache2", '< 6'
supports "debian", ">= 7.0"
| name "cernvm-fs"
maintainer "GSI, HPC Department"
maintainer_email "hpc@gsi.de"
license "Apache 2.0"
description "Deploy and configure CernVM-FS clients and servers."
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.3.0"
depends "sys", '>= 1.51.0'
depends "apache2", '< 6.0'
supports "debian", ">= 7.0"
|
Check RUBY_VERSION for 1.9 support | #
require('rubygems')
module Twitter; end
def require_local(suffix)
require(File.expand_path(File.join(File.dirname(__FILE__), suffix)))
end
# For better unicode support
$KCODE = 'u'
require 'jcode'
# External requires
require('yaml')
require('date')
require('time')
require('net/https')
require('uri')
require('cgi')
require('json')
require('yaml')
# Ordering matters...pay attention here!
require_local('twitter/ext')
require_local('twitter/version')
require_local('twitter/meta')
require_local('twitter/core')
require_local('twitter/model')
require_local('twitter/config')
require_local('twitter/client')
require_local('twitter/console')
| #
require('rubygems')
module Twitter; end
def require_local(suffix)
require(File.expand_path(File.join(File.dirname(__FILE__), suffix)))
end
# For better unicode support in 1.8
if RUBY_VERSION < '1.9'
$KCODE = 'u'
require 'jcode'
end
# External requires
require('yaml')
require('date')
require('time')
require('net/https')
require('uri')
require('cgi')
require('json')
require('yaml')
# Ordering matters...pay attention here!
require_local('twitter/ext')
require_local('twitter/version')
require_local('twitter/meta')
require_local('twitter/core')
require_local('twitter/model')
require_local('twitter/config')
require_local('twitter/client')
require_local('twitter/console')
|
Simplify page_entries_info by adding entry_name interface to each ORM | module Kaminari
module PluckyCriteriaMethods
include Kaminari::PageScopeMethods
delegate :default_per_page, :max_per_page, :max_pages, :to => :model
def limit_value #:nodoc:
options[:limit]
end
def offset_value #:nodoc:
options[:skip]
end
def total_count #:nodoc:
count
end
end
end
| module Kaminari
module PluckyCriteriaMethods
include Kaminari::PageScopeMethods
delegate :default_per_page, :max_per_page, :max_pages, :to => :model
def entry_name
model.model_name.human.downcase
end
def limit_value #:nodoc:
options[:limit]
end
def offset_value #:nodoc:
options[:skip]
end
def total_count #:nodoc:
count
end
end
end
|
Add localhost to page slug helper assertion | describe PagesHelper do
class Tester
include PagesHelper
end
context 'new record' do
let(:new_record) { true }
let(:expected) { { dough_component: 'MirrorInputValue Slugifier' } }
it 'returns a dough component hash' do
expect(Tester.new.dough_component(true, %w(MirrorInputValue Slugifier))).to eq(expected)
end
end
context 'existing record' do
let(:new_record) { false }
let(:expected) { {} }
it 'returns an empty hash' do
expect(Tester.new.dough_component(new_record)).to eq(expected)
end
end
context 'url preview' do
let(:site) { 'en' }
let(:page) { 'slug' }
let(:presenter) { 'articles' }
let(:expected) { 'www.moneyadviceservice.org.uk/en/<span data-dough-urlformatter-url-display="true">articles/slug</span>' }
it 'returns an article URL' do
expect(helper.page_slug(site, presenter, page)).to eq(expected)
end
end
end
| describe PagesHelper do
class Tester
include PagesHelper
end
context 'new record' do
let(:new_record) { true }
let(:expected) { { dough_component: 'MirrorInputValue Slugifier' } }
it 'returns a dough component hash' do
expect(Tester.new.dough_component(true, %w(MirrorInputValue Slugifier))).to eq(expected)
end
end
context 'existing record' do
let(:new_record) { false }
let(:expected) { {} }
it 'returns an empty hash' do
expect(Tester.new.dough_component(new_record)).to eq(expected)
end
end
context 'url preview' do
let(:site) { 'en' }
let(:page) { 'slug' }
let(:presenter) { 'articles' }
let(:expected) do
'localhost:5000/en/<span data-dough-urlformatter-url-display="true">articles/slug</span>'
end
it 'returns an article URL' do
expect(helper.page_slug(site, presenter, page)).to eq(expected)
end
end
end
|
Remove `:branch`, actually use `:revision`. | class Carthage < Formula
desc "Decentralized dependency manager for Cocoa"
homepage "https://github.com/Carthage/Carthage"
head "https://github.com/jdhealy/Carthage.git",
:branch => 'fix-git-ls-tree',
:revision => 'dcfd2444176dd42da3f4877faf95f28e8488d268',
:using => :git,
:shallow => false
depends_on :xcode => ["8.2", :build]
def install
system "make", "prefix_install", "PREFIX=#{prefix}"
bash_completion.install "Source/Scripts/carthage-bash-completion" => "carthage"
zsh_completion.install "Source/Scripts/carthage-zsh-completion" => "_carthage"
fish_completion.install "Source/Scripts/carthage-fish-completion" => "carthage.fish"
end
test do
(testpath/"Cartfile").write 'github "jspahrsummers/xcconfigs"'
system bin/"carthage", "update"
end
end
| class Carthage < Formula
desc "Decentralized dependency manager for Cocoa"
homepage "https://github.com/Carthage/Carthage"
head "https://github.com/jdhealy/Carthage.git",
:revision => 'dcfd2444176dd42da3f4877faf95f28e8488d268',
:using => :git,
:shallow => false
depends_on :xcode => ["8.2", :build]
def install
system "make", "prefix_install", "PREFIX=#{prefix}"
bash_completion.install "Source/Scripts/carthage-bash-completion" => "carthage"
zsh_completion.install "Source/Scripts/carthage-zsh-completion" => "_carthage"
fish_completion.install "Source/Scripts/carthage-fish-completion" => "carthage.fish"
end
test do
(testpath/"Cartfile").write 'github "jspahrsummers/xcconfigs"'
system bin/"carthage", "update"
end
end
|
Use ActionController::API instead of ::Base | module Toke
class ApplicationController < ActionController::Base
protect_from_forgery with: :null_session
include Toke::TokenAuthentication
end
end
| module Toke
class ApplicationController < ActionController::API
include ActionController::HttpAuthentication::Basic::ControllerMethods
include Toke::TokenAuthentication
end
end
|
Copy script after build and use build version | name 'coopr-cli'
default_version 'develop'
source :git => 'git://github.com/caskdata/coopr.git'
relative_path 'coopr-cli'
build do
mkdir "#{install_dir}/bin"
copy "#{project_dir}/coopr-cli/bin/*.sh", "#{install_dir}/bin"
command "chmod +x #{install_dir}/bin/*"
link "#{install_dir}/bin/coopr-cli.sh", "#{install_dir}/bin/coopr"
mkdir "#{install_dir}/lib"
command 'PATH=/usr/local/maven-3.1.1/bin:$PATH mvn clean package -DskipTests -pl coopr-cli -am'
%w(javadoc sources tests).each do |jar|
command "rm -f #{project_dir}/coopr-cli/target/coopr-cli-*-#{jar}.jar"
end
copy "#{project_dir}/coopr-cli/target/coopr-cli-*.jar", "#{install_dir}/lib"
command "rm -rf #{install_dir}/embedded"
command "find #{install_dir} -type f -name .gitkeep | xargs rm -f"
end
| name 'coopr-cli'
default_version 'develop'
source :git => 'git://github.com/caskdata/coopr.git'
relative_path 'coopr-cli'
build do
mkdir "#{install_dir}/bin"
mkdir "#{install_dir}/lib"
command 'PATH=/usr/local/maven-3.1.1/bin:$PATH mvn clean package -DskipTests -pl coopr-cli -am'
%w(javadoc sources tests).each do |jar|
command "rm -f #{project_dir}/coopr-cli/target/coopr-cli-*-#{jar}.jar"
end
copy "#{project_dir}/coopr-cli/target/coopr-cli-*.jar", "#{install_dir}/lib"
copy "#{project_dir}/coopr-cli/target/bin/*.sh", "#{install_dir}/bin"
command "chmod +x #{install_dir}/bin/*"
link "#{install_dir}/bin/coopr-cli.sh", "#{install_dir}/bin/coopr"
command "rm -rf #{install_dir}/embedded"
command "find #{install_dir} -type f -name .gitkeep | xargs rm -f"
end
|
Use " " as delimiter. | # lib/frecon/mongoid/criteria.rb
#
# Copyright (C) 2015 Christopher Cooper, Sam Craig, Tiger Huang, Vincent Mai, Sam Mercier, and Kristofer Rye
#
# This file is part of FReCon, an API for scouting at FRC Competitions, which is
# licensed under the MIT license. You should have received a copy of the MIT
# license with this program. If not, please see
# <http://opensource.org/licenses/MIT>.
require "mongoid"
module Mongoid
class Criteria
def psv_filter(psv_parameters = {})
collection = self
psv_parameters.each do |psv_string, comparison_value|
psv_keys = psv_string.split("+").map do |psv_key|
psv_key.to_sym
end.reverse
comparison_key = psv_keys.shift
comparison_hash = {comparison_key => comparison_value}
p comparison_hash
psv_keys.each do |model|
model_id = (model.to_s + '_id').to_sym
model_class = ("FReCon::" + model.to_s.capitalize).constantize
comparison_hash = {model_id => model_class.in(comparison_hash).map(&:id)}
p comparison_hash
end
collection = collection.in(comparison_hash)
end
collection
end
end
end
| # lib/frecon/mongoid/criteria.rb
#
# Copyright (C) 2015 Christopher Cooper, Sam Craig, Tiger Huang, Vincent Mai, Sam Mercier, and Kristofer Rye
#
# This file is part of FReCon, an API for scouting at FRC Competitions, which is
# licensed under the MIT license. You should have received a copy of the MIT
# license with this program. If not, please see
# <http://opensource.org/licenses/MIT>.
require "mongoid"
module Mongoid
class Criteria
def psv_filter(psv_parameters = {})
collection = self
psv_parameters.each do |psv_string, comparison_value|
psv_keys = psv_string.split(" ").map do |psv_key|
psv_key.to_sym
end.reverse
comparison_key = psv_keys.shift
comparison_hash = {comparison_key => comparison_value}
p comparison_hash
psv_keys.each do |model|
model_id = (model.to_s + '_id').to_sym
model_class = ("FReCon::" + model.to_s.capitalize).constantize
comparison_hash = {model_id => model_class.in(comparison_hash).map(&:id)}
p comparison_hash
end
collection = collection.in(comparison_hash)
end
collection
end
end
end
|
Fix spec around Phase title editing | class PhaseFragment < PageFragment
text_assertions :card, '.card'
def new_card(**params)
find('a', text: 'ADD NEW CARD').click
new_card = params[:overlay].launch(session)
new_card.create(params)
end
def remove_card(card_name)
container = find('.card', text: card_name)
container.hover
container.find('.card-remove').click
end
def card_count
find_all('.card').count
end
def has_remove_icon?
has_css? '.remove-icon', visible: false
end
def has_no_remove_icon?
has_no_css? '.remove-icon', visible: false
end
# add a phase AFTER this phase.
def add_phase
container = find('.add-column', visible: false)
container.hover
find('.add-column', visible: false).click
end
def remove_phase
retry_stale_element do
container = find('.column-title')
container.hover
remove = find('.remove-icon')
remove.click
end
end
def rename(new_name)
field = find('h2')
field.click
reversed_name = new_name.reverse
field.set(reversed_name)
synchronize_content!(new_name)
find('.column-header-update-save').click
end
end
| class PhaseFragment < PageFragment
text_assertions :card, '.card'
def new_card(**params)
find('a', text: 'ADD NEW CARD').click
new_card = params[:overlay].launch(session)
new_card.create(params)
end
def remove_card(card_name)
container = find('.card', text: card_name)
container.hover
container.find('.card-remove').click
end
def card_count
find_all('.card').count
end
def has_remove_icon?
has_css? '.remove-icon', visible: false
end
def has_no_remove_icon?
has_no_css? '.remove-icon', visible: false
end
# add a phase AFTER this phase.
def add_phase
container = find('.add-column', visible: false)
container.hover
find('.add-column', visible: false).click
end
def remove_phase
retry_stale_element do
container = find('.column-title')
container.hover
remove = find('.remove-icon')
remove.click
end
end
def rename(new_name)
field = find('h2')
field.click
reversed_name = new_name.reverse
field.set(reversed_name)
synchronize_content!(reversed_name)
find('.column-header-update-save').click
end
end
|
Add symbol to (now sorted) list of 'as-is' values | module Appsignal
class ParamsSanitizer
class << self
def sanitize(params)
ParamsSanitizerCopy.sanitize_value(params)
end
def sanitize!(params)
ParamsSanitizerDestructive.sanitize_value(params)
end
protected
def sanitize_value(value)
case value
when Hash
sanitize_hash(value)
when Array
sanitize_array(value)
when String, Fixnum
value
else
value.inspect
end
end
def sanitize_hash_with_target(source_hash, target_hash)
source_hash.each_pair do |key, value|
target_hash[key] = sanitize_value(value)
end
target_hash
end
def sanitize_array_with_target(source_array, target_array)
source_array.each_with_index do |item, index|
target_array[index] = sanitize_value(item)
end
target_array
end
end
end
class ParamsSanitizerCopy < ParamsSanitizer
class << self
protected
def sanitize_hash(hash)
sanitize_hash_with_target(hash, {})
end
def sanitize_array(array)
sanitize_array_with_target(array, [])
end
end
end
class ParamsSanitizerDestructive < ParamsSanitizer
class << self
protected
def sanitize_hash(hash)
sanitize_hash_with_target(hash, hash)
end
def sanitize_array(array)
sanitize_array_with_target(array, array)
end
end
end
end
| module Appsignal
class ParamsSanitizer
class << self
def sanitize(params)
ParamsSanitizerCopy.sanitize_value(params)
end
def sanitize!(params)
ParamsSanitizerDestructive.sanitize_value(params)
end
protected
def sanitize_value(value)
case value
when Hash
sanitize_hash(value)
when Array
sanitize_array(value)
when Fixnum, String, Symbol
value
else
value.inspect
end
end
def sanitize_hash_with_target(source_hash, target_hash)
source_hash.each_pair do |key, value|
target_hash[key] = sanitize_value(value)
end
target_hash
end
def sanitize_array_with_target(source_array, target_array)
source_array.each_with_index do |item, index|
target_array[index] = sanitize_value(item)
end
target_array
end
end
end
class ParamsSanitizerCopy < ParamsSanitizer
class << self
protected
def sanitize_hash(hash)
sanitize_hash_with_target(hash, {})
end
def sanitize_array(array)
sanitize_array_with_target(array, [])
end
end
end
class ParamsSanitizerDestructive < ParamsSanitizer
class << self
protected
def sanitize_hash(hash)
sanitize_hash_with_target(hash, hash)
end
def sanitize_array(array)
sanitize_array_with_target(array, array)
end
end
end
end
|
Update update method in gateway backend | module Gateways
class QuotesGatewayBackend < Backend
def initialize
super
@table = @database[:quotes]
end
def insert(quote)
ensure_valid!(quote)
@table.insert(quote)
end
def get(id)
@table.first(:id => id)
end
def update(quote)
ensure_persisted!(quote)
@table.update(quote)
end
def all
@table.all
end
private
def ensure_valid!(quote)
ensure_kind_of!(quote)
ensure_not_persisted!(quote)
end
def ensure_kind_of!(quote)
reason = "Only Hashes can be inserted"
unless quote.kind_of? Hash
raise_argument_error(reason, quote)
end
end
def ensure_not_persisted!(quote)
reason = "Quotes can't be added twice. Use #update instead"
raise_argument_error(reason, quote) unless quote[:id].nil?
end
def ensure_persisted!(quote)
reason = "Quotes must exist to update them. Use #insert instead"
raise_argument_error(reason, quote) if quote[:id].nil?
end
end
end | module Gateways
class QuotesGatewayBackend < Backend
def initialize
super
@table = @database[:quotes]
end
def insert(quote)
ensure_valid!(quote)
@table.insert(quote)
end
def get(id)
@table.first(:id => id)
end
def update(quote)
ensure_persisted!(quote)
@table.where(:id => quote[:id]).update(quote)
end
def all
@table.all
end
private
def ensure_valid!(quote)
ensure_kind_of!(quote)
ensure_not_persisted!(quote)
end
def ensure_kind_of!(quote)
reason = "Only Hashes can be inserted"
unless quote.kind_of? Hash
raise_argument_error(reason, quote)
end
end
def ensure_not_persisted!(quote)
reason = "Quotes can't be added twice. Use #update instead"
raise_argument_error(reason, quote) unless quote[:id].nil?
end
def ensure_persisted!(quote)
reason = "Quotes must exist to update them. Use #insert instead"
raise_argument_error(reason, quote) if quote[:id].nil?
end
end
end |
Add console script used to mass-change content expert for courses | # frozen_string_literal: true
# This can be used to shift all of one Content Expert's courses to another,
# for a certain campaign.
old_id = User.find_by(username: 'Ian (Wiki Ed)').id
new_id = User.find_by(username: 'Sage (Wiki Ed)').id
Campaign.find_by(slug: 'spring_2017').courses.map do |course|
ce = course.courses_users.find_by(user_id: old_id, role: 4)
next unless ce
ce.user_id = new_id
ce.save
end
| |
Make the single failing spec pending for now to see whether the auto deploy script does anything. | require 'rails_helper'
describe ChampaignQueue::Clients::Sqs do
context "with SQS_QUEUE_URL" do
it "delivers payload to AWS SQS Queue" do
expected_arguments = {
queue_url: "http://example.com",
message_body: {foo: :bar}.to_json
}
expect_any_instance_of(Aws::SQS::Client).to(
receive(:send_message).with( expected_arguments )
)
ChampaignQueue::Clients::Sqs.push({foo: :bar})
end
end
context "without SQS_QUEUE_URL" do
before do
allow(ENV).to receive(:[]).with("SQS_QUEUE_URL"){ nil }
end
it "does not deliver payload to AWS SQS Queue" do
expect_any_instance_of(Aws::SQS::Client).to_not receive(:send_message)
ChampaignQueue::Clients::Sqs.push({foo: :bar})
end
end
end
| require 'rails_helper'
describe ChampaignQueue::Clients::Sqs do
context "with SQS_QUEUE_URL" do
xit "delivers payload to AWS SQS Queue" do
expected_arguments = {
queue_url: "http://example.com",
message_body: {foo: :bar}.to_json
}
expect_any_instance_of(Aws::SQS::Client).to(
receive(:send_message).with( expected_arguments )
)
ChampaignQueue::Clients::Sqs.push({foo: :bar})
end
end
context "without SQS_QUEUE_URL" do
before do
allow(ENV).to receive(:[]).with("SQS_QUEUE_URL"){ nil }
end
it "does not deliver payload to AWS SQS Queue" do
expect_any_instance_of(Aws::SQS::Client).to_not receive(:send_message)
ChampaignQueue::Clients::Sqs.push({foo: :bar})
end
end
end
|
Make redis key less obscure | require 'redis'
module Travis
module Logs
class Existence
attr_reader :redis
def initialize
@redis = Redis.new(url: Logs.config.redis.url)
end
def occupied!(channel_name)
redis.set(key(channel_name), true)
end
def occupied?(channel_name)
redis.get(key(channel_name))
end
def vacant?(channel_name)
!occupied?(channel_name)
end
def vacant!(channel_name)
redis.del(key(channel_name))
end
def key(channel_name)
"logs:ch-occ:#{channel_name}"
end
end
end
end
| require 'redis'
module Travis
module Logs
class Existence
attr_reader :redis
def initialize
@redis = Redis.new(url: Logs.config.redis.url)
end
def occupied!(channel_name)
redis.set(key(channel_name), true)
end
def occupied?(channel_name)
redis.get(key(channel_name))
end
def vacant?(channel_name)
!occupied?(channel_name)
end
def vacant!(channel_name)
redis.del(key(channel_name))
end
def key(channel_name)
"logs:channel-occupied:#{channel_name}"
end
end
end
end
|
Add upload field configuration options. | require "formalist/element"
require "formalist/elements"
require "formalist/types"
module Formalist
class Elements
class UploadField < Field
attribute :presign_url, Types::String
attribute :render_uploaded_as, Types::String
attribute :upload_prompt, Types::String
attribute :upload_action_label, Types::String
end
register :upload_field, UploadField
end
end
| require "formalist/element"
require "formalist/elements"
require "formalist/types"
module Formalist
class Elements
class UploadField < Field
attribute :presign_url, Types::String
attribute :render_uploaded_as, Types::String
attribute :upload_prompt, Types::String
attribute :upload_action_label, Types::String
attribute :max_file_size, Types::String
attribute :max_file_size_message, Types::String
attribute :permitted_file_type_message, Types::String
attribute :permitted_file_type_regex, Types::String
end
register :upload_field, UploadField
end
end
|
Deal with Travis CI's out-of-date bundler | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "contribution-checker/version"
Gem::Specification.new do |spec|
spec.name = "contribution-checker"
spec.version = ContributionChecker::VERSION
spec.authors = ["James Dennes"]
spec.email = ["jdennes@gmail.com"]
spec.summary = %q{Check whether a commit is counted as a contribution.}
spec.description = %q{Check whether a GitHub commit is counted as a contribution for a specific GitHub user.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "octokit", "~> 3.8"
spec.add_development_dependency "bundler", "~> 1.8"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "webmock"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
end
| # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "contribution-checker/version"
Gem::Specification.new do |spec|
spec.name = "contribution-checker"
spec.version = ContributionChecker::VERSION
spec.authors = ["James Dennes"]
spec.email = ["jdennes@gmail.com"]
spec.summary = %q{Check whether a commit is counted as a contribution.}
spec.description = %q{Check whether a GitHub commit is counted as a contribution for a specific GitHub user.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "octokit", "~> 3.8"
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec"
spec.add_development_dependency "webmock"
spec.add_development_dependency "simplecov"
spec.add_development_dependency "coveralls"
end
|
Extend the contacts filtering specs to include filter by topic | require "spec_helper"
describe "Contacts" do
include Public::ContactSteps
let!(:contact) { create(:phone_number).contact }
let!(:contact2) { create(:phone_number).contact }
before { Department.first.contacts.count.should eq(2) }
before { ensure_on contacts_path(Department.first) }
context "list" do
it { verify contacts_exist([contact, contact2]) }
end
context "filter", js: true do
before { fill_in "Contains", with: contact.title } # filter
it { verify contacts_exist([contact]) }
it { expect(page).to_not have_content(contact2.title) }
end
end
| require "spec_helper"
describe "Contacts" do
include Public::ContactSteps
let!(:contact) { create(:contact, :with_phone_numbers, :with_contact_group) }
let!(:contact2) { create(:contact, :with_phone_numbers, :with_contact_group) }
before { ensure_on contacts_path(Department.first) }
context "list" do
it { verify contacts_exist([contact, contact2]) }
end
context "filter", js: true do
context "by title" do
before { fill_in "Contains", with: contact.title }
it { verify contacts_exist([contact]) }
it { expect(page).to_not have_content(contact2.title) }
end
context "by topic" do
before { select contact.contact_groups.first, from: "Topic" }
it { verify contacts_exist([contact]) }
it { expect(page).to_not have_content(contact2.title) }
end
context "by both title and topic" do
before {
fill_in "Contains", with: contact.title
select contact.contact_groups.first, from: "Topic"
}
it { verify contacts_exist([contact]) }
it { expect(page).to_not have_content(contact2.title) }
end
end
end
|
Change the output of Exif.creation_date to be a DateTime rather than a raw string | require_relative 'exif_reader'
require_relative 'exif_jpeg'
module FileData
# Convenience class for extracting exif data from a file or stream
class Exif
['.jpeg', '.jpg'].each { |e| FileInfo.info_maps[e] = Exif }
# Create methods that forward to ExifReader
# Each method requires the stream as a parameter to help the user
# fall into a "pit of success" by only opening and closing
# the stream once to get data
ExifReader.public_instance_methods(false).each do |method_name|
define_singleton_method(method_name) do |input, *other_args|
delegate_to_exif_reader(input, method_name, other_args)
end
end
def self.delegate_to_exif_reader(input, name, other_args)
streamify(input) do |stream|
exif = ExifJpeg.new(stream).exif
ExifReader.new(exif).send(name, *other_args)
end
end
def self.streamify(input)
if input.is_a?(String)
::File.open(input, 'rb') { |f| yield f }
else
yield input
end
end
def self.creation_date(input)
FileData::Exif.only_image_tag(input, [34_665, 36_867])
end
def self.origin_date(input)
creation_date(input)
end
end
end
| require_relative 'exif_reader'
require_relative 'exif_jpeg'
module FileData
# Convenience class for extracting exif data from a file or stream
class Exif
['.jpeg', '.jpg'].each { |e| FileInfo.info_maps[e] = Exif }
# Create methods that forward to ExifReader
# Each method requires the stream as a parameter to help the user
# fall into a "pit of success" by only opening and closing
# the stream once to get data
ExifReader.public_instance_methods(false).each do |method_name|
define_singleton_method(method_name) do |input, *other_args|
delegate_to_exif_reader(input, method_name, other_args)
end
end
def self.delegate_to_exif_reader(input, name, other_args)
streamify(input) do |stream|
exif = ExifJpeg.new(stream).exif
ExifReader.new(exif).send(name, *other_args)
end
end
def self.streamify(input)
if input.is_a?(String)
::File.open(input, 'rb') { |f| yield f }
else
yield input
end
end
def self.creation_date(input)
raw_tag = FileData::Exif.only_image_tag(input, [34_665, 36_867])
DateTime.strptime(raw_tag, '%Y:%m:%d %H:%M:%S')
end
def self.origin_date(input)
creation_date(input)
end
end
end
|
Add Sqlite3 adapter, analogues to the postgres one. | require 'do_sqlite3'
require 'data_mapper/support/veritas/adapter'
require 'data_mapper/support/veritas/adapter/data_objects'
module Veritas
module Adapter
# A veritas adapter for sqlite3
#
class Sqlite3 < DataObjects
uri_scheme :sqlite3
end # class Sqlite3
end # module Adapter
end # module Veritas
| |
Add event aggregate_id and sequence_number to spec | require 'spec_helper'
describe Sequent::Core::SerializesCommand do
class RecordMock
include Sequent::Core::SerializesCommand
attr_accessor :aggregate_id,
:created_at,
:user_id,
:command_type,
:command_json
end
class RecordValueObject < Sequent::Core::ValueObject
attrs value: String
end
class RecordCommand < Sequent::Core::Command
attrs value: RecordValueObject
end
let(:value_object) { RecordValueObject.new }
let(:command) { RecordCommand.new(aggregate_id: "1",
user_id: "ben en kim",
value: value_object) }
describe ".command" do
it 'only serializes declared attrs' do
# call valid to let AM generate @errors and @validation_context
command.valid?
record = RecordMock.new
record.command = command
payload = Sequent::Core::Oj.strict_load(record.command_json)
expect(payload).to have_key("aggregate_id")
expect(payload).to have_key("value")
expect(payload["value"]).to_not have_key("errors")
expect(payload["value"]).to have_key("value")
end
end
end
| require 'spec_helper'
describe Sequent::Core::SerializesCommand do
class RecordMock
include Sequent::Core::SerializesCommand
attr_accessor :aggregate_id,
:created_at,
:user_id,
:command_type,
:command_json,
:event_aggregate_id,
:event_sequence_number
end
class RecordValueObject < Sequent::Core::ValueObject
attrs value: String
end
class RecordCommand < Sequent::Core::Command
attrs value: RecordValueObject
end
let(:value_object) { RecordValueObject.new }
let(:command) { RecordCommand.new(aggregate_id: "1",
user_id: "ben en kim",
value: value_object) }
describe ".command" do
it 'only serializes declared attrs' do
# call valid to let AM generate @errors and @validation_context
command.valid?
record = RecordMock.new
record.command = command
payload = Sequent::Core::Oj.strict_load(record.command_json)
expect(payload).to have_key("aggregate_id")
expect(payload).to have_key("value")
expect(payload["value"]).to_not have_key("errors")
expect(payload["value"]).to have_key("value")
end
end
end
|
Remove unused to_s method for now | module Hector
class Request
attr_reader :line, :command, :args, :text
def initialize(line)
@line = line
parse
end
def to_s
line
end
def event_name
"on_#{command.downcase}"
end
protected
def parse
source = line.dup
@command = extract!(source, /^ *([^ ]+)/, "").upcase
@text = extract!(source, / :(.*)$/)
@args = source.strip.split(" ")
@text ||= @args.last
end
def extract!(line, regex, default = nil)
result = nil
line.gsub!(regex) do |match|
result = $~[1]
""
end
result || default
end
end
end
| module Hector
class Request
attr_reader :line, :command, :args, :text
def initialize(line)
@line = line
parse
end
def event_name
"on_#{command.downcase}"
end
protected
def parse
source = line.dup
@command = extract!(source, /^ *([^ ]+)/, "").upcase
@text = extract!(source, / :(.*)$/)
@args = source.strip.split(" ")
@text ||= @args.last
end
def extract!(line, regex, default = nil)
result = nil
line.gsub!(regex) do |match|
result = $~[1]
""
end
result || default
end
end
end
|
Improve 'some stats exist' step to handle 2 different steps equally | Given(/^that no stats exist$/) do
end
Given(/^that some stats exist$/) do
@stats_csv = "aardsda01,2009,AL,SEA,73,0,0,0,0,0,0,0,0,0"
end
When(/^I display the stats report$/) do
@report = CalculationReport.new(@stats_csv)
@report.display_report
end
Then(/^I should see that no stats are available to calculate$/) do
@report.message.should eq "There are no stats to calculate."
end
Then(/^I should see the calculated stats$/) do
@report.message.should include "Most improved batting average from 2009 to 2010:"
end
| Given(/^that no stats exist$/) do
end
Given(/^that some stats exist(?: for one player)$/) do
@stats_csv = "aardsda01,2009,AL,SEA,73,0,0,0,0,0,0,0,0,0"
end
When(/^I display the stats report$/) do
@report = CalculationReport.new(@stats_csv)
@report.display_report
end
Then(/^I should see that no stats are available to calculate$/) do
@report.message.should eq "There are no stats to calculate."
end
Then(/^I should see the calculated stats$/) do
@report.message.should include "Most improved batting average from 2009 to 2010:"
end
|
Use proper comparison for ruby version | # Disable XML parameter parsing, see:
# http://www.insinuator.net/2013/01/rails-yaml/
ActionDispatch::ParamsParser::DEFAULT_PARSERS.delete(Mime::XML)
# Patches for Ruby 2.4
if RUBY_VERSION >= "2.4.0"
# ActiveSupport dates, e.g: 3.days (fixed in Rails 5, and no sooner)
class ActiveSupport::Duration
def coerce(other)
[other, to_i]
end
end
# ActiveRecord 3.2. Fixed in Arel 7.1.0
require 'arel'
module Arel
module Visitors
class Dot
alias :visit_Integer :visit_String
end
class ToSql
alias :visit_Integer :literal
end
class DepthFirst
alias :visit_Integer :terminal
end
end
end
end
| # Disable XML parameter parsing, see:
# http://www.insinuator.net/2013/01/rails-yaml/
ActionDispatch::ParamsParser::DEFAULT_PARSERS.delete(Mime::XML)
# Patches for Ruby 2.4
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new("2.4")
# ActiveSupport dates, e.g: 3.days (fixed in Rails 5, and no sooner)
class ActiveSupport::Duration
def coerce(other)
[other, to_i]
end
end
# ActiveRecord 3.2. Fixed in Arel 7.1.0
require 'arel'
module Arel
module Visitors
class Dot
alias :visit_Integer :visit_String
end
class ToSql
alias :visit_Integer :literal
end
class DepthFirst
alias :visit_Integer :terminal
end
end
end
end
|
Rename tests to reflect helper being tested | require 'test_helper'
class WickedPdfHelperTest < ActionView::TestCase
test 'should return the same as stylesheet_link_tag when passed a full path' do
assert_equal wicked_pdf_stylesheet_link_tag('pdf'),
stylesheet_link_tag('pdf', Rails.root.join('public','stylesheets','pdf').to_s)
end
test 'should return the same as image_tag when passed a full path' do
assert_equal wicked_pdf_image_tag('pdf'),
image_tag(Rails.root.join('public','images','pdf').to_s)
end
test 'should return the same as javascript_src_tag when passed a full path' do
assert_equal wicked_pdf_javascript_src_tag('pdf'),
javascript_src_tag(Rails.root.join('public','javascripts','pdf').to_s, {})
end
test 'should return many wicked_pdf_javascript_src_tags on wicked_pdf_javascript_include_tag' do
assert_equal wicked_pdf_javascript_include_tag('foo', 'bar'),
[wicked_pdf_javascript_src_tag('foo'), wicked_pdf_javascript_src_tag('bar')].join("\n")
end
end
| require 'test_helper'
class WickedPdfHelperTest < ActionView::TestCase
test 'wicked_pdf_stylesheet_link_tag should return the same as stylesheet_link_tag when passed a full path' do
assert_equal wicked_pdf_stylesheet_link_tag('pdf'),
stylesheet_link_tag('pdf', Rails.root.join('public','stylesheets','pdf').to_s)
end
test 'wicked_pdf_image_tag should return the same as image_tag when passed a full path' do
assert_equal wicked_pdf_image_tag('pdf'),
image_tag(Rails.root.join('public','images','pdf').to_s)
end
test 'wicked_pdf_javascript_src_tag should return the same as javascript_src_tag when passed a full path' do
assert_equal wicked_pdf_javascript_src_tag('pdf'),
javascript_src_tag(Rails.root.join('public','javascripts','pdf').to_s, {})
end
test 'wicked_pdf_include_tag should return many wicked_pdf_javascript_src_tags' do
assert_equal wicked_pdf_javascript_include_tag('foo', 'bar'),
[wicked_pdf_javascript_src_tag('foo'), wicked_pdf_javascript_src_tag('bar')].join("\n")
end
end
|
Update dev logging config to support unicorn | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = { host: ENV['MAIL_HOST'] }
# Location of mailer previews
config.action_mailer.preview_path = 'test/mailers/previews'
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
| Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Logging
config.log_level = :debug
config.logger = ActiveSupport::Logger.new(STDOUT)
config.colorize_logging = true
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = { host: ENV['MAIL_HOST'] }
# Location of mailer previews
config.action_mailer.preview_path = 'test/mailers/previews'
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
end
|
Exclude .specjour dir when rsyncing project | module Specjour
class RsyncDaemon
require 'fileutils'
attr_reader :project_path, :project_name
def initialize(project_path, project_name)
@project_path = project_path
@project_name = project_name
end
def config_directory
@config_directory ||= File.join(project_path, ".specjour")
end
def config_file
@config_file ||= File.join(config_directory, "rsyncd.conf")
end
def start
write_config
system("rsync", "--daemon", "--config=#{config_file}", "--port=8989")
at_exit { stop }
end
def stop
if pid
Process.kill("TERM", pid)
FileUtils.rm(pid_file)
end
end
protected
def write_config
unless File.exists? config_file
FileUtils.mkdir_p config_directory
File.open(config_file, 'w') do |f|
f.write config
end
end
end
def pid
if File.exists?(pid_file)
File.read(pid_file).strip.to_i
end
end
def pid_file
File.join(config_directory, "rsync_daemon.pid")
end
def config
<<-CONFIG
# global configuration
use chroot = no
timeout = 60
read only = yes
pid file = #{pid_file}
[#{project_name}]
path = #{project_path}
exclude = .git* doc tmp/* log script
CONFIG
end
end
end
| module Specjour
class RsyncDaemon
require 'fileutils'
attr_reader :project_path, :project_name
def initialize(project_path, project_name)
@project_path = project_path
@project_name = project_name
end
def config_directory
@config_directory ||= File.join(project_path, ".specjour")
end
def config_file
@config_file ||= File.join(config_directory, "rsyncd.conf")
end
def start
write_config
system("rsync", "--daemon", "--config=#{config_file}", "--port=8989")
at_exit { stop }
end
def stop
if pid
Process.kill("TERM", pid)
FileUtils.rm(pid_file)
end
end
protected
def write_config
unless File.exists? config_file
FileUtils.mkdir_p config_directory
File.open(config_file, 'w') do |f|
f.write config
end
end
end
def pid
if File.exists?(pid_file)
File.read(pid_file).strip.to_i
end
end
def pid_file
File.join(config_directory, "rsync_daemon.pid")
end
def config
<<-CONFIG
# global configuration
use chroot = no
timeout = 60
read only = yes
pid file = #{pid_file}
[#{project_name}]
path = #{project_path}
exclude = .git* .specjour doc tmp/* log script
CONFIG
end
end
end
|
Add new report class for MI provisional assessments | module Reports
class ProvisionalAssessments
NAME = 'provisional_assessment'.freeze
COLUMNS = %w[provider_name provider_type provider_claims supplier_number
supplier_claims claimed authorised percent].freeze
def self.call
new.call
end
def call
Stats::MIData.connection.execute(query).to_a
end
private
def query
%{SELECT supplier_data.provider_name, provider_type,
provider_claims, supplier_number,
supplier_claims, claimed,
authorised, percent
FROM (#{suppliers_totals_query}) as supplier_data
INNER JOIN (#{providers_totals_query}) as provider_data
ON (supplier_data.provider_name = provider_data.provider_name)}
end
def providers_totals_query
%{SELECT provider_name, count(provider_name) as provider_claims FROM mi_data GROUP BY provider_name}
end
def suppliers_totals_query
%{SELECT provider_name,
provider_type,
supplier_number,
count(supplier_number) as supplier_claims,
sum(amount_claimed) as claimed,
sum(amount_authorised) as authorised,
sum(amount_authorised)/sum(amount_claimed) as percent
FROM mi_data
GROUP BY provider_name, provider_type, supplier_number}
end
end
end
| |
Create user: aact when db dropped/re-created | class CreateAdminTables < ActiveRecord::Migration
def change
create_table "load_events", force: :cascade do |t|
t.string "event_type"
t.string "status"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "completed_at"
t.string "load_time"
t.integer "new_studies"
t.integer "changed_studies"
end
create_table "sanity_checks", force: :cascade do |t|
t.text "report", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "statistics", force: :cascade do |t|
t.date "start_date"
t.date "end_date"
t.string "sponsor_type"
t.string "stat_category"
t.string "stat_value"
t.integer "number_of_studies"
end
create_table "study_xml_records", force: :cascade do |t|
t.xml "content"
t.string "nct_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
end
| class CreateAdminTables < ActiveRecord::Migration
def change
create_table "load_events", force: :cascade do |t|
t.string "event_type"
t.string "status"
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "completed_at"
t.string "load_time"
t.integer "new_studies"
t.integer "changed_studies"
end
create_table "sanity_checks", force: :cascade do |t|
t.text "report", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "statistics", force: :cascade do |t|
t.date "start_date"
t.date "end_date"
t.string "sponsor_type"
t.string "stat_category"
t.string "stat_value"
t.integer "number_of_studies"
end
create_table "study_xml_records", force: :cascade do |t|
t.xml "content"
t.string "nct_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
execute <<-SQL
CREATE USER aact WITH PASSWORD 'aact';
GRANT SELECT ON ALL TABLES IN SCHEMA public TO aact;
SQL
end
end
|
Update model_options in example app | # Be sure to restart your server when you modify this file. Action Cable runs in a loop that does not support auto reloading.
class TodoChannel < ApplicationCable::Channel
include ActionCableNotifications::Channel
def subscribed
stream_notifications_for Todo,
model_options: {
scope: {
select: [:id, :title, :completed],
where: {
completed: false
}
}
}
end
def unsubscribed
# Any cleanup needed when channel is unsubscribed
end
end
| # Be sure to restart your server when you modify this file. Action Cable runs in a loop that does not support auto reloading.
class TodoChannel < ApplicationCable::Channel
include ActionCableNotifications::Channel
def subscribed
stream_notifications_for Todo,
model_options: {
scope: {
select: [:id, :title, :completed]
}
}
end
def unsubscribed
# Any cleanup needed when channel is unsubscribed
end
end
|
Fix outdated reference to racers in migration | class AddStatusToRacers < ActiveRecord::Migration
def self.up
return if ASSOCIATION.short_name == "MBRA"
add_column :racers, :status, :string
end
def self.down
remove_column :racers, :status
end
end
| class AddStatusToRacers < ActiveRecord::Migration
def self.up
return if ASSOCIATION.short_name == "MBRA"
add_column :people, :status, :string
end
def self.down
remove_column :people, :status
end
end
|
Add middle name to license text | # Copyright 2007-2022 James (Jamie) Orchard-Hays
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module XTF
end
# Try loading from local environment, ie vendor/rails, if error, use rubygems
begin
require "active_support"
rescue LoadError => e
require "rubygems"
require "active_support"
end
require "active_support/core_ext/string"
# $:.unshift(File.dirname(__FILE__))
require_relative "xtf/xml"
require_relative "xtf/search"
require_relative "xtf/result"
| # Copyright 2007-2022 James (Jamie) Edward Orchard-Hays
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module XTF
end
# Try loading from local environment, ie vendor/rails, if error, use rubygems
begin
require "active_support"
rescue LoadError => e
require "rubygems"
require "active_support"
end
require "active_support/core_ext/string"
# $:.unshift(File.dirname(__FILE__))
require_relative "xtf/xml"
require_relative "xtf/search"
require_relative "xtf/result"
|
Use update_column to set :last_activity_at for project | class ProjectActivityCacheObserver < BaseObserver
observe :event
def after_create(event)
event.project.update_attribute(:last_activity_at, event.created_at) if event.project
end
end
| class ProjectActivityCacheObserver < BaseObserver
observe :event
def after_create(event)
event.project.update_column(:last_activity_at, event.created_at) if event.project
end
end
|
Revert "try deploy heroku removing add_providers" | class OmniauthCallbacksController < Devise::OmniauthCallbacksController
skip_before_filter :set_locale
#def self.add_providers
#OauthProvider.all.each do |p|
#define_method p.name.downcase do
#omniauth = request.env['omniauth.auth']
#@user = User.
#select('users.*').
#joins('JOIN authorizations ON authorizations.user_id = users.id').
#joins('JOIN oauth_providers ON oauth_providers.id = authorizations.oauth_provider_id').
#where("authorizations.uid = :uid AND oauth_providers.name = :provider", {uid: omniauth[:uid], provider: p.name}).
#first || User.create_with_omniauth(omniauth, current_user)
#flash[:notice] = I18n.t("devise.omniauth_callbacks.success", kind: p.name.capitalize)
#sign_in @user, event: :authentication
#if @user.email
#redirect_to(session[:return_to] || root_path)
#session[:return_to] = nil
#else
#render 'users/set_email'
#end
#end
#end
#end
#add_providers
end
| class OmniauthCallbacksController < Devise::OmniauthCallbacksController
skip_before_filter :set_locale
def self.add_providers
OauthProvider.all.each do |p|
define_method p.name.downcase do
omniauth = request.env['omniauth.auth']
@user = User.
select('users.*').
joins('JOIN authorizations ON authorizations.user_id = users.id').
joins('JOIN oauth_providers ON oauth_providers.id = authorizations.oauth_provider_id').
where("authorizations.uid = :uid AND oauth_providers.name = :provider", {uid: omniauth[:uid], provider: p.name}).
first || User.create_with_omniauth(omniauth, current_user)
flash[:notice] = I18n.t("devise.omniauth_callbacks.success", kind: p.name.capitalize)
sign_in @user, event: :authentication
if @user.email
redirect_to(session[:return_to] || root_path)
session[:return_to] = nil
else
render 'users/set_email'
end
end
end
end
add_providers
end
|
Add spec for user default address setter | # frozen_string_literal: true
require 'spec_helper'
describe UserDefaultAddressSetter do
let(:customer_address) { create(:address, address1: "customer road") }
let(:order_address) { create(:address, address1: "order road") }
let(:customer) do
create(:customer, bill_address: customer_address, ship_address: customer_address)
end
let(:order) do
create(:order, customer: customer, bill_address: order_address, ship_address: order_address)
end
let(:user) { create(:user) }
let(:setter) { UserDefaultAddressSetter.new(order, user) }
describe '#set_default_bill_address' do
it "sets the user and customer bill address to the order bill address" do
setter.set_default_bill_address
expect(user.bill_address).to eq order.bill_address
expect(order.customer.bill_address).to eq order.bill_address
end
end
describe '#set_default_ship_address' do
it "sets the user and customer ship address to the order ship address" do
setter.set_default_ship_address
expect(user.ship_address).to eq order.ship_address
expect(order.customer.ship_address).to eq order.ship_address
end
end
end
| |
Add specs for CloudObjectContainerStoreContainer queue method. | RSpec.describe CloudObjectStoreContainer do
let(:ems) { FactoryBot.create(:ems_vmware) }
let(:user) { FactoryBot.create(:user, :userid => 'test') }
context "queued methods" do
it 'queues a create task with cloud_object_store_container_create_queue' do
task_id = described_class.cloud_object_store_container_create_queue(user.userid, ems)
expect(MiqTask.find(task_id)).to have_attributes(
:name => "creating Cloud Object Store Container for user #{user.userid}",
:state => "Queued",
:status => "Ok"
)
expect(MiqQueue.where(:class_name => described_class.name).first).to have_attributes(
:class_name => described_class.name,
:method_name => 'cloud_object_store_container_create',
:role => 'ems_operations',
:queue_name => 'generic',
:zone => ems.my_zone,
:args => [ems.id, {}]
)
end
it 'requires a userid and ems for a queued create task' do
expect { described_class.cloud_object_store_container_create_queue }.to raise_error(ArgumentError)
expect { described_class.cloud_object_store_container_create_queue(user.userid) }.to raise_error(ArgumentError)
end
end
end
| |
Add in a note to the Blog::Output::Page class | require_relative 'file_base'
require_relative 'post_template'
module Everything
class Blog
module Output
class Page < Everything::Blog::Output::FileBase
def should_generate_output?
# TODO: Also need to know if the template changed, because we also
# want to generate in that case too.
markdown_newer_than_output? || metadata_newer_than_output?
end
def template_context
source_file.post
end
private
def markdown_newer_than_output?
content_mtime > output_mtime
end
def metadata_newer_than_output?
metadata_mtime > output_mtime
end
def output_mtime
@output_time ||=
if File.exist?(output_file_path)
File.mtime(output_file_path)
else
Time.at(0)
end
end
def content_mtime
@content_mtime ||= File.mtime(
source_file.post.piece.content.file_path
)
end
def metadata_mtime
@metadata_mtime ||= File.mtime(
source_file.post.piece.metadata.file_path
)
end
def template_klass
Everything::Blog::Output::PostTemplate
end
end
end
end
end
| require_relative 'file_base'
require_relative 'post_template'
module Everything
class Blog
module Output
class Page < Everything::Blog::Output::FileBase
def should_generate_output?
# TODO: Also need to know if the template changed, because we also
# want to generate in that case too.
# TODO: Having a way to force regeneration of all pages would also be
# nice to have.
markdown_newer_than_output? || metadata_newer_than_output?
# true
end
def template_context
source_file.post
end
private
def markdown_newer_than_output?
content_mtime > output_mtime
end
def metadata_newer_than_output?
metadata_mtime > output_mtime
end
def output_mtime
@output_time ||=
if File.exist?(output_file_path)
File.mtime(output_file_path)
else
Time.at(0)
end
end
def content_mtime
@content_mtime ||= File.mtime(
source_file.post.piece.content.file_path
)
end
def metadata_mtime
@metadata_mtime ||= File.mtime(
source_file.post.piece.metadata.file_path
)
end
def template_klass
Everything::Blog::Output::PostTemplate
end
end
end
end
end
|
Fix backtrace for running the bundle’s own specs. | if defined?(RSpec)
bundle_patterns = [%r{/RSpec\.tmbundle/}, %r{^/tmp/textmate-command}]
if RSpec.configuration.respond_to?(:backtrace_exclusion_patterns)
RSpec.configuration.backtrace_exclusion_patterns += bundle_patterns
elsif RSpec.configuration.respond_to?(:backtrace_clean_patterns)
RSpec.configuration.backtrace_clean_patterns += bundle_patterns
end
end
| if defined?(RSpec)
bundle_patterns = [%r{^/tmp/textmate-command}]
bundle_patterns << %r{/RSpec\.tmbundle/} unless ENV['TM_PROJECT_DIRECTORY'].include?("RSpec.tmbundle")
if RSpec.configuration.respond_to?(:backtrace_exclusion_patterns)
RSpec.configuration.backtrace_exclusion_patterns += bundle_patterns
elsif RSpec.configuration.respond_to?(:backtrace_clean_patterns)
RSpec.configuration.backtrace_clean_patterns += bundle_patterns
end
end
|
Add `:if_exists => true` to RemoveGeniModels Migration | class RemoveGeniModels < ActiveRecord::Migration[6.1]
def up
drop_table :geniverse_activities
drop_table :geniverse_articles
drop_table :geniverse_cases
drop_table :geniverse_dragons
drop_table :geniverse_help_messages
drop_table :geniverse_unlockables
drop_table :geniverse_users
end
def down
# no going back!
raise ActiveRecord::IrreversibleMigration
end
end
| class RemoveGeniModels < ActiveRecord::Migration[6.1]
def up
drop_table :geniverse_activities, :if_exists => true
drop_table :geniverse_articles, :if_exists => true
drop_table :geniverse_cases, :if_exists => true
drop_table :geniverse_dragons, :if_exists => true
drop_table :geniverse_help_messages, :if_exists => true
drop_table :geniverse_unlockables, :if_exists => true
drop_table :geniverse_users, :if_exists => true
end
def down
# no going back!
raise ActiveRecord::IrreversibleMigration
end
end
|
Use same 30 seconds satellite timeout for all actions | module Gitlab
module Satellite
class FileAction < Action
attr_accessor :file_path, :ref
def initialize(user, project, ref, file_path)
super user, project, git_timeout: 10.seconds
@file_path = file_path
@ref = ref
end
def safe_path?(path)
File.absolute_path(path) == path
end
def write_file(abs_file_path, content, file_encoding = 'text')
if file_encoding == 'base64'
File.open(abs_file_path, 'wb') { |f| f.write(Base64.decode64(content)) }
else
File.open(abs_file_path, 'w') { |f| f.write(content) }
end
end
end
end
end
| module Gitlab
module Satellite
class FileAction < Action
attr_accessor :file_path, :ref
def initialize(user, project, ref, file_path)
super user, project
@file_path = file_path
@ref = ref
end
def safe_path?(path)
File.absolute_path(path) == path
end
def write_file(abs_file_path, content, file_encoding = 'text')
if file_encoding == 'base64'
File.open(abs_file_path, 'wb') { |f| f.write(Base64.decode64(content)) }
else
File.open(abs_file_path, 'w') { |f| f.write(content) }
end
end
end
end
end
|
Add more log to send request | class SendRequestEndPoint < Goliath::API
include Neighborparrot::Connection
include Neighborparrot::Auth
# Default headers
HEADERS = { 'Access-Control-Allow-Origin' => '*',
'Content-Type' => 'text/event-stream',
'Cache-Control' => 'no-cache',
'Connection' => 'keep-alive',
'Transfer-Encoding' => 'chunked',
'X-STREAM' => 'Neighborparrot',
'SERVER' => 'Neighborparrot'
}
# on close action
def on_close(env)
end
# Prepare the event source connection
def response(env)
env.trace 'open send connection'
validate_send_params # Ensure required parameters
EM.next_tick do
auth_request do |app|
message_id = prepare_send_request env
env.chunked_stream_send message_id.to_s
env.chunked_stream_close
end
end
chunked_streaming_response(200, HEADERS)
end
end
| class SendRequestEndPoint < Goliath::API
include Neighborparrot::Connection
include Neighborparrot::Auth
# Default headers
HEADERS = { 'Access-Control-Allow-Origin' => '*',
'Content-Type' => 'text/event-stream',
'Cache-Control' => 'no-cache',
'Connection' => 'keep-alive',
'Transfer-Encoding' => 'chunked',
'X-STREAM' => 'Neighborparrot',
'SERVER' => 'Neighborparrot'
}
# on close action
def on_close(env)
end
# Prepare the event source connection
def response(env)
env.trace 'open send connection'
env.logger.debug "Begin send request"
validate_send_params # Ensure required parameters
EM.next_tick do
auth_request do |app|
message_id = prepare_send_request env
env.chunked_stream_send message_id.to_s
env.chunked_stream_close
end
end
chunked_streaming_response(200, HEADERS)
end
end
|
Make interface of Nothing the same as for Monad | module M
class Maybe < Monad
def self.return(value)
return value if value == Nothing
Just(value)
end
def self.join(value)
return Nothing if value == Nothing
super
end
end
class Just < Maybe; end
class Nothing < Maybe
class << self
def bind(_); self; end
def fmap(_); self; end
def >=(_); self; end
def to_s(*)
"Nothing"
end
def inspect
"Nothing"
end
end
end
def Maybe(v); Maybe.return(v); end
def Just(v); Just.new(v); end
end
| module M
class Maybe < Monad
def self.return(value)
return value if value == Nothing
Just(value)
end
def self.join(value)
return Nothing if value == Nothing
super
end
end
class Just < Maybe; end
class Nothing < Maybe
class << self
def bind(fn=nil, &block); self; end
def fmap(fn=nil, &block); self; end
def >=(fn=nil, &block); self; end
def to_s(*)
"Nothing"
end
def inspect
"Nothing"
end
end
end
def Maybe(v); Maybe.return(v); end
def Just(v); Just.new(v); end
end
|
Add spec for post forms on relative/absolute time | require 'spec_helper'
require './generate_flatfile'
describe "Post Timescales", :js => true do
it "Returns valid data using the GUI and POST searches" do
metric = ["thing_one"]
url_metrics = metric.map{|x| "#{x}~#{x}"}
nice_metrics = metric.map{|x| "#{x} - #{x}"}
backends = []
metric.each do |m|
backends << "{type: 'Flatfile', alias: '#{m}', settings: { file_name: 'public/flatfile_1s.csv', metric: '#{m}'}}"
end
add_config "backends: [#{backends.join(",")}]"
test_config metric[0]
visit "/?metric=#{url_metrics[0]}"
# Relative time is default
expect(page.body).to include "icon-calendar"
click_icon "calendar"
expect(current_url).to include "&time=absolute"
click_icon "rocket"
expect(current_url).to include "&time=relative"
# Relative Time
click_dropdown
fill_in "time_number", with: "20"
click_on "min"
expect(current_url).to include "&stop=20min"
click_dropdown
click_on "now"
expect(current_url).not_to include "&stop="
# Absolute Time
click_icon "calendar"
now = Time.now().to_i
start = now - 5400
stop = now - 600
fill_in "time_start_time", with: epoch_to_local_date(start)
fill_in "time_stop_time", with: epoch_to_local_date(stop)
click_on "Go"
expect(current_url).to include "&time=absolute"
expect(current_url).to include "&start=#{start}"
expect(current_url).to include "&stop=#{stop}"
end
end
def epoch_to_local_date d
Time.at(d).strftime("%d/%m/%Y %H:%M:%S %p")
end
def click_icon icon
# Using the font-awesome class "icon-<variable>", find that <i>
# element, and then walk up to the parent (`//..`), which is a button,
# and click it
first(:xpath, "//i[@class='icon-#{icon}']//..").click
end
def click_dropdown
first(:xpath, "//span[@class='caret']//..").click
end
| |
Exclude deauthorize spec from live test | require 'spec_helper'
shared_examples 'Account API' do
it 'retrieves a stripe account', live: true do
account = Stripe::Account.retrieve
expect(account).to be_a Stripe::Account
expect(account.id).to match /acct\_/
end
it 'all', live: true do
accounts = Stripe::Account.all
expect(accounts).to be_a Stripe::ListObject
expect(accounts.data).to eq []
end
it 'deauthorizes the stripe account', live: true do
account = Stripe::Account.retrieve
result = account.deauthorize('CLIENT_ID')
expect(result).to be_a Stripe::StripeObject
expect(result[:stripe_user_id]).to eq account[:id]
end
end
| require 'spec_helper'
shared_examples 'Account API' do
it 'retrieves a stripe account', live: true do
account = Stripe::Account.retrieve
expect(account).to be_a Stripe::Account
expect(account.id).to match /acct\_/
end
it 'all', live: true do
accounts = Stripe::Account.all
expect(accounts).to be_a Stripe::ListObject
expect(accounts.data).to eq []
end
it 'deauthorizes the stripe account', live: false do
account = Stripe::Account.retrieve
result = account.deauthorize('CLIENT_ID')
expect(result).to be_a Stripe::StripeObject
expect(result[:stripe_user_id]).to eq account[:id]
end
end
|
Fix pivotal-tracker version for now | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/pt-flow/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Jens Balvig"]
gem.email = ["jens@balvig.com"]
gem.description = %q{Some extra methods for the pt gem to use in our dev flow.}
gem.summary = %q{Some extra methods for the pt gem to use in our dev flow.}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "pt-flow"
gem.require_paths = ["lib"]
gem.version = PT::Flow::VERSION
gem.add_dependency 'pt'
gem.add_dependency 'hub'
gem.add_dependency 'rest-client'
gem.add_dependency 'active_support'
gem.add_dependency 'i18n'
gem.add_dependency 'hirb-colors'
gem.add_development_dependency 'rspec', '~> 2.9'
gem.add_development_dependency 'webmock'
gem.add_development_dependency 'guard'
gem.add_development_dependency 'guard-rspec'
gem.add_development_dependency 'rake'
end
| # -*- encoding: utf-8 -*-
require File.expand_path('../lib/pt-flow/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Jens Balvig"]
gem.email = ["jens@balvig.com"]
gem.description = %q{Some extra methods for the pt gem to use in our dev flow.}
gem.summary = %q{Some extra methods for the pt gem to use in our dev flow.}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "pt-flow"
gem.require_paths = ["lib"]
gem.version = PT::Flow::VERSION
gem.add_dependency 'pt'
gem.add_dependency 'hub'
gem.add_dependency 'pivotal-tracker', '< 0.5.9'
gem.add_dependency 'active_support'
gem.add_dependency 'i18n'
gem.add_dependency 'hirb-colors'
gem.add_development_dependency 'rspec', '~> 2.9'
gem.add_development_dependency 'webmock'
gem.add_development_dependency 'guard'
gem.add_development_dependency 'guard-rspec'
gem.add_development_dependency 'rake'
end
|
Remove FIXME from primitive type validation | # -*- encoding: utf-8 -*-
require 'data_mapper/validation/rule'
module DataMapper
module Validation
class Rule
class PrimitiveType < Rule
def valid?(resource)
property = get_resource_property(resource, attribute_name)
value = resource.validation_property_value(attribute_name)
# FIXME: Deprecation message said Property#valid? was replaced
# with Property#value_dumped? but IMHO value_loaded? would be correct here?
value.nil? || property.value_dumped?(value)
end
def violation_type(resource)
:primitive
end
def violation_data(resource)
property = get_resource_property(resource, attribute_name)
[ [ :primitive, property.load_as ] ]
end
end # class PrimitiveType
end # class Rule
end # module Validation
end # module DataMapper
| # -*- encoding: utf-8 -*-
require 'data_mapper/validation/rule'
module DataMapper
module Validation
class Rule
class PrimitiveType < Rule
def valid?(resource)
property = get_resource_property(resource, attribute_name)
value = resource.validation_property_value(attribute_name)
value.nil? || property.value_dumped?(value)
end
def violation_type(resource)
:primitive
end
def violation_data(resource)
property = get_resource_property(resource, attribute_name)
[ [ :primitive, property.load_as ] ]
end
end # class PrimitiveType
end # class Rule
end # module Validation
end # module DataMapper
|
Define new tests to cover the case with redirection and setting cookies | require 'spec_helper'
module RackPassword
describe Block do
describe "success rack response" do
let(:block){ Block.new("app") }
it "return 200 status code" do
expect(block.success_rack_response[0]).to eq 200
end
it "return html" do
expect(block.success_rack_response[2][0]).to include("password")
end
it "fills in application name if used as Rails middleware" do
app_name = 'TestAppName'
class Rails; end
allow(Rails).to receive_message_chain(:application, :class, :parent_name) { app_name }
expect(block.success_rack_response[2][0]).to include(app_name)
end
end
end
end
| require 'spec_helper'
module RackPassword
describe Block do
let(:app_name) { 'TestAppName' }
before do
class Rails; end
allow(Rails).to receive_message_chain(:application, :class, :parent_name) { app_name }
allow(Rack::Request).to receive(:new).and_return(request)
end
describe 'for not post requests' do
let(:block) { Block.new('app', auth_codes: ['Janusz']) }
let(:request) do
double(
cookies: {},
host: 'localhost',
params: { 'code' => 'Janusz' },
path: '/',
post?: false
)
end
it 'returns 200 status code' do
expect(block.call(request)[0]).to eq(200)
end
it 'returns html' do
expect(block.call(request)[2][0]).to include('password')
end
it 'fills in application name if used as Rails middleware' do
expect(block.call(request)[2][0]).to include(app_name)
end
end
describe 'for post requests' do
let(:request) do
double(
cookies: {},
host: 'localhost',
params: { 'code' => 'Janusz' },
path: '/',
post?: true
)
end
context 'when requests contain proper auth code' do
let(:block) { Block.new('app', { auth_codes: ['Janusz'] }) }
it 'returns 301 status code' do
expect(block.call(request)[0]).to eq(301)
end
end
context 'when requests contain invalid auth code' do
let(:block) { Block.new('app', { auth_codes: ['Janusz123'] }) }
it 'returns html' do
expect(block.call(request)[2][0]).to include('password')
end
end
end
end
end
|
Use order serializer for order/show | class Sprangular::OrdersController < Sprangular::BaseController
before_filter :check_authorization
def show
authorize! :show, @user
@order = Spree::Order.where(number: params[:id]).first!
render 'spree/api/orders/show'
end
end
| class Sprangular::OrdersController < Sprangular::BaseController
before_filter :check_authorization
def show
authorize! :show, @user
@order = Spree::Order.where(number: params[:id]).first!
render json: @order,
scope: current_spree_user,
serializer: Sprangular::OrderSerializer,
root: false
end
end
|
Resolve deprecation warning in the chefspecs | require 'spec_helper'
describe 'aws::ec2_hints' do
let(:chef_run) { ChefSpec::SoloRunner.converge(described_recipe) }
it 'creates the ohai hint' do
expect(chef_run).to create_ohai_hint('ec2').at_compile_time
end
end
| require 'spec_helper'
describe 'aws::ec2_hints' do
let(:chef_run) { ChefSpec::SoloRunner.new(platform: 'ubuntu', version: '16.04').converge(described_recipe) }
it 'creates the ohai hint' do
expect(chef_run).to create_ohai_hint('ec2').at_compile_time
end
end
|
Add rake as a development dep | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'henson/version'
Gem::Specification.new do |gem|
gem.name = "henson"
gem.version = Henson::VERSION
gem.platform = Gem::Platform::RUBY
gem.authors = ["Will Farrington"]
gem.email = ["wfarr@github.com"]
gem.description = %q{Bundler for Puppet modules.}
gem.summary = %q{Bundler for Puppet modules.}
gem.homepage = ""
gem.required_ruby_version = ">= 1.8.7"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
# dependencies
gem.add_dependency "thor", "~> 0.16.0"
# development dependencies
gem.add_development_dependency "mocha", "~> 0.12.7"
gem.add_development_dependency "rspec", "~> 2.11"
gem.add_development_dependency "simplecov", "0.7.1"
end
| # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'henson/version'
Gem::Specification.new do |gem|
gem.name = "henson"
gem.version = Henson::VERSION
gem.platform = Gem::Platform::RUBY
gem.authors = ["Will Farrington"]
gem.email = ["wfarr@github.com"]
gem.description = %q{Bundler for Puppet modules.}
gem.summary = %q{Bundler for Puppet modules.}
gem.homepage = ""
gem.required_ruby_version = ">= 1.8.7"
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ["lib"]
# dependencies
gem.add_dependency "thor", "~> 0.16.0"
# development dependencies
gem.add_development_dependency "mocha", "~> 0.12.7"
gem.add_development_dependency "rspec", "~> 2.11"
gem.add_development_dependency "simplecov", "0.7.1"
gem.add_development_dependency "rake"
end
|
Use AWS installer for installing CodeDeploy agent | remote_file "#{Chef::Config[:file_cache_path]}/codedeploy-agent.rpm" do
source "https://s3.amazonaws.com/aws-codedeploy-us-east-1/latest/codedeploy-agent.noarch.rpm"
end
package "codedeploy-agent" do
action :install
source "#{Chef::Config[:file_cache_path]}/codedeploy-agent.rpm"
end
service "codedeploy-agent" do
action [:enable, :start]
end
| remote_file "#{Chef::Config[:file_cache_path]}/codedeploy-agent-install" do
source "https://s3.amazonaws.com/aws-codedeploy-us-east-1/latest/install"
mode 0755
end
bash "install-codedeploy-agent" do
code <<-EOH
#{Chef::Config[:file_cache_path]}/codedeploy-agent-install auto
EOH
end
service "codedeploy-agent" do
action [:enable, :start]
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.