repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
mitixx/abilitysheet
|
lib/tasks/ts_routes.rake
|
<reponame>mitixx/abilitysheet
namespace :ts do
TS_ROUTES_FILENAME = "#{Rails.root}/app/javascript/lib/routes.ts".freeze
desc "Generate #{TS_ROUTES_FILENAME}"
task routes: :environment do
Rails.logger.info("Generating #{TS_ROUTES_FILENAME}")
source = TsRoutes.generate(exclude: [/admin/, /active_storage/])
File.write(TS_ROUTES_FILENAME, source)
end
end
|
mitixx/abilitysheet
|
lib/rails_log_silencer.rb
|
<gh_stars>10-100
# frozen_string_literal: true
class RailsLogSilencer
def initialize(app, paths)
@app = app
@paths = paths
end
def call(env)
if @paths.include?(env['PATH_INFO'])
::Rails.logger.silence { @app.call(env) }
else
@app.call(env)
end
end
end
|
mitixx/abilitysheet
|
app/services/application_service.rb
|
<filename>app/services/application_service.rb
# frozen_string_literal: true
class ApplicationService
end
|
mitixx/abilitysheet
|
app/controllers/api/v1/sheets_controller.rb
|
# frozen_string_literal: true
class Api::V1::SheetsController < Api::V1::BaseController
def index
render json: { sheets: Sheet.active.map(&:schema) }
end
def list
render json: { sheets: Sheet.order(:id) }
end
end
|
mitixx/abilitysheet
|
app/models/concerns/score/api.rb
|
# frozen_string_literal: true
module Score::Api
extend ActiveSupport::Concern
included do
def schema
{
sheet_id: sheet_id,
title: title,
state: state,
score: score,
bp: bp,
version: version,
updated_at: updated_at
}
end
def self.pie
ret = Array.new(8, 0)
all.each do |score|
ret[score.state] += 1
end
ret[7] += Sheet.active.count - ret.sum
ret
end
end
end
|
mitixx/abilitysheet
|
app/models/concerns/user/devise_methods.rb
|
# frozen_string_literal: true
module User::DeviseMethods
extend ActiveSupport::Concern
included do
def self.find_first_by_auth_conditions(warden_conditions)
conditions = warden_conditions.dup
login = conditions.delete(:login)
if login
find_by('username = :value OR iidxid = :value OR email = :value', value: login)
else
find_by(conditions)
end
end
def update_without_current_password(params, *options)
params.delete(:current_password)
params.delete(:password) if params[:password].blank?
params.delete(:password_confirmation) if params[:password_confirmation].blank?
clean_up_passwords
update_attributes(params, *options)
end
def email_required?
false
end
def email_changed?
false
end
end
end
|
mitixx/abilitysheet
|
app/models/concerns/user/ist.rb
|
<reponame>mitixx/abilitysheet<gh_stars>10-100
# frozen_string_literal: true
require 'ist_client'
module User::Ist
extend ActiveSupport::Concern
FROM_IST_TO_AB = {
'旋律のドグマ~Miserables~' => '旋律のドグマ ~Misérables~',
'火影' => '焱影'
}.freeze
SEARCH_PARAMS = {
q: {
chart_level_eq: 12,
chart_play_type_status_eq: 0,
version_eq: Abilitysheet::Application.config.iidx_version
}
}.freeze
def find_pref(pref)
User::Static::PREF.index(pref)
end
# NOTE: 見つからなかったやつは無段位
def find_grade(grade)
User::Static::GRADE.index(grade.split[1]) || User::Static::GRADE.size - 1
end
def update_user(user)
old_djname = djname
self.djname = old_djname unless update(djname: user['user_activity']['djname'])
pref = find_pref(user['user_activity']['pref_status'])
grade = find_grade(user['user_activity']['sp_grade_status'])
update!(grade: grade, pref: pref)
avatar.attach(io: URI.parse(user['image_path']).open, filename: 'avatar.png') unless Rails.env.development?
end
def find_sheet_id(score, sheets)
if score['title'] == 'gigadelic' || score['title'] == 'Innocent Walls'
difficulty_type = score['difficulty_type_status'] == 'HYPER' ? '[H]' : '[A]'
sheets[score['title'] + difficulty_type]
elsif FROM_IST_TO_AB.key?(score['title'])
sheets[FROM_IST_TO_AB[score['title']]]
elsif score['difficulty_type_status'] == 'LEGGENDARIA'
sheets["#{score['title']}†"]
else
sheets[score['title']]
end
end
included do
def check_ist_user
ist_client.get_user(iidxid)
end
def update_ist
user = ist_client.get_user(iidxid)
return false if user['iidxid'] != iidxid
result = ist_client.get_scores(iidxid, SEARCH_PARAMS)
return false if result['error'] == 'Not Found'
update_user(user)
sheets = Sheet.active.pluck(:title, :id).to_h
result['scores'].each do |score|
sheet_id = find_sheet_id(score, sheets)
# 削除曲だけunlessになる可能性がある
next unless sheet_id
s = scores.find_or_initialize_by(sheet_id: sheet_id, version: Abilitysheet::Application.config.iidx_version)
state = ::Static::LAMP_OFFICIAL.index(score['clear_type_status'])
# NO PLAYのものは更新しない
next if state == 7
# ランプに変動がある、あるいはスコアが変動しているパターンでは更新する
next if s.state == state && score['score'].zero?
scores.find_or_create_by!(
sheet_id: sheet_id,
version: Abilitysheet::Application.config.iidx_version
).update_with_logs(
sheet_id: sheet_id,
state: state,
score: score['score'],
bp: score['miss_count']
)
end
end
def ist_client
IstClient.new
end
end
end
|
mitixx/abilitysheet
|
spec/rails_helper.rb
|
<reponame>mitixx/abilitysheet
# frozen_string_literal: true
ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path('../config/environment', __dir__)
require 'rspec/rails'
Dir[Rails.root.join('spec/support/**/*.rb')].sort.each { |f| require f }
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
config.order = :random
config.include Devise::Test::ControllerHelpers, type: :controller
config.include SessionHelpers
config.include ApiHelper, type: :request
config.include Warden::Test::Helpers
config.include FactoryBot::Syntax::Methods
config.include SheetsHelper
config.include ActiveJob::TestHelper
config.include ActiveSupport::Testing::Assertions
end
VCR.configure do |c|
c.cassette_library_dir = 'spec/cassettes'
c.hook_into :webmock
c.allow_http_connections_when_no_cassette = true
end
RedisHelper.load_sheets_data
|
mitixx/abilitysheet
|
spec/systems/admin/rails_admin_spec.rb
|
# frozen_string_literal: true
feature RailsAdmin, type: :system do
given(:user) { create(:user, id: 1) }
background { login(user) }
context '管理者の場合' do
background do
user.update!(role: 100)
visit rails_admin_path
end
scenario '管理者ページが閲覧できる' do
expect(page).to have_content('サイト管理')
end
end
end
|
JonRowe/wrapup
|
lib/wrapup.rb
|
require "wrapup/version"
require "wrapup/wrap"
module WrapUp
end
|
JonRowe/wrapup
|
spec/wrapup/wrap_spec.rb
|
<gh_stars>0
require 'wrapup/wrap'
module WrapUp
describe Wrap do
class WrapperClass < Struct.new(:original)
end
let(:item_1) { double "item" }
let(:item_2) { double "item" }
let(:wrap) { described_class.new [item_1, item_2], WrapperClass }
describe "#initialize" do
it "takes a collection and a constant in which to wrap the collection" do
described_class.new [], WrapperClass
end
end
describe "enumerability" do
it " has the standard enumerability module in its ancestors" do
expect(described_class.ancestors).to include Enumerable
end
end
describe '#[]' do
it 'allows access to an individual via a wrapper' do
expect( wrap[1] ).to be_a WrapperClass
expect( wrap[1].original ).to eq item_2
end
end
describe '#size' do
it 'measure the internal collection size' do
expect( wrap.size ).to eq 2
end
end
describe "#each" do
let(:output) { [] }
before { wrap.each { |i| output << i } }
it "wraps the items in constant" do
expect( output.all? { |item| item.is_a? WrapperClass } ).to eq true
end
it 'wraps them in order' do
expect( output.map(&:original) ).to eq [item_1,item_2]
end
end
end
end
|
JonRowe/wrapup
|
lib/wrapup/wrap.rb
|
<reponame>JonRowe/wrapup
module WrapUp
class Wrap
include Enumerable
def initialize collection, wrapper_constant
@collection = collection
@wrapper = wrapper_constant
end
def each &block
@collection.each do |item|
block.call @wrapper.new item
end
end
def size
@collection.size
end
def [] key
@wrapper.new @collection[key]
end
end
end
|
yoshoku/gem_rbs_collection
|
gems/sidekiq/6.2/_test/test_2.rb
|
<filename>gems/sidekiq/6.2/_test/test_2.rb
class HardWorker
include Sidekiq::Worker
end
class Hook
end
class Middleware
end
HardWorker.perform_async(1, 2, 3)
# Test Sidekiq::Client
client = Sidekiq::Client.new
client.middleware do |chain|
chain.add Middleware
end
Sidekiq::Client.push('class' => HardWorker, 'args' => [1, 2, 3])
Sidekiq::Client.push_bulk('class' => HardWorker, 'args' => [[1, 2, 3], [4,5,6]])
Sidekiq::Client.enqueue(HardWorker, 'foo', 1, :bat => 'bar')
Sidekiq::Client.enqueue_to(:queue_name, HardWorker, 'foo', 1, :bat => 'bar')
Sidekiq::Client.enqueue_to_in(:queue_name, Time.now + 3 * 60, HardWorker, 'foo', 1, :bat => 'bar')
Sidekiq::Client.enqueue_in(Time.now + 3 * 60, HardWorker, 'foo', 1, :bat => 'bar')
# Test configuration of middleware
Sidekiq.configure_server do |config|
config.redis = { namespace: 'rails', size: 2, url: 'redis://rails:6457/0' }
config.server_middleware do |chain|
chain.add Hook
end
config.client_middleware do |chain|
chain.add Hook
end
end
# Using Redis
# Taken from: https://github.com/mperham/sidekiq/wiki/Using-Redis
Sidekiq.configure_server do |config|
config.redis = { url: 'redis://redis.example.com:7372/0', network_timeout: 5 }
end
Sidekiq.configure_client do |config|
config.redis = { url: 'redis://redis.example.com:7372/0' }
end
redis_conn = -> { Redis.new }
Sidekiq.configure_client do |config|
config.redis = ConnectionPool.new(size: 5, &redis_conn)
end
Sidekiq.configure_server do |config|
config.redis = ConnectionPool.new(size: 25, &redis_conn)
end
Sidekiq.configure_server do |config|
config.on(:shutdown) do
puts "Shutting down!"
end
end
|
yoshoku/gem_rbs_collection
|
gems/wavedash/0.1/_test/test.rb
|
# Write Ruby code to test the RBS.
# It is type checked by `steep check` command.
require "wavedash"
str = "こんにちは\u{301C}"
Wavedash.destination_encoding = 'eucjp-ms'
Wavedash.normalize(str) # => "こんにちは~"
Wavedash.invalid?(str) # => true
|
yoshoku/gem_rbs_collection
|
gems/aws-sdk-s3/1/_test/test.rb
|
<filename>gems/aws-sdk-s3/1/_test/test.rb
require "aws-sdk-s3"
client = Aws::S3::Client.new
resp = client.list_buckets
resp.buckets.each do |bucket|
bucket.name.upcase
end
begin
resp = Aws::S3::Client.new.get_object(bucket: 'test', key: 'test')
resp.body.read
rescue Aws::S3::Errors::InvalidObjectState => e
e.storage_class.downcase
end
resp = client.put_object(
bucket: 'test',
key: 'test',
acl: 'private',
)
resp.etag.tr('', '')
|
yoshoku/gem_rbs_collection
|
gems/httparty/0.18/_test/test_2.rb
|
<reponame>yoshoku/gem_rbs_collection
HTTParty.get('http://api.stackexchange.com/2.2/questions', query: {site: 'stackoverflow'})
HTTParty.get('http://api.stackexchange.com/2.2/questions', query: {site: 'stackoverflow'}) { |res| res }
HTTParty.post('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.patch('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.put('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.delete('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.move('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.copy('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.head('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.options('https://reqres.in/api/users', query: { "name": "random", "job": "random" })
HTTParty.get('http://api.stackexchange.com/2.2/questions', query: {site: 'stackoverflow'}) { |res| res }
HTTParty.post('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.patch('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.put('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.delete('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.move('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.copy('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.head('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
HTTParty.options('https://reqres.in/api/users', query: { "name": "random", "job": "random" }) { |res| res }
request = HTTParty::Request.new(Net::HTTP::Get, URI('https://reqres.in'), {follow_redirects: true})
request.path = URI('https://reqres.in/api/users')
request.perform
request.perform { |response| puts response }
puts request.perform.headers, request.perform.code
|
yoshoku/gem_rbs_collection
|
gems/httparty/0.18/_test/test_1.rb
|
class Foo
include HTTParty
base_uri "reqres.in"
basic_auth "username", "password"
digest_auth "username", "password"
default_timeout 10
open_timeout 10
read_timeout 10
write_timeout 10
debug_output $stderr
headers 'Accept' => 'application/json'
format :json
pem File.read('/home/user/my.pem'), "optional password"
pkcs12 File.read('/home/user/my.p12'), "password"
parser Proc.new { |data| data }
def initialize(page)
@options = { page: page }
end
def users
self.class.get('/api/users', @options)
end
end
Foo.get('https://reqres.in')
Foo.mkcol('https://reqris.in')
Foo.lock('https://reqres.in')
Foo.unlock('https://reqres.in')
Foo.mkcol('https://reqris.in', query: { "name": "random", "job": "random" }) { |response| response }
Foo.lock('https://reqres.in', query: { "name": "random", "job": "random" }) { |response| response }
Foo.unlock('https://reqres.in', query: { "name": "random", "job": "random" }) { |response| response }
|
yoshoku/gem_rbs_collection
|
gems/woothee/1.11/_test/test.rb
|
# Write Ruby code to test the RBS.
# It is type checked by `steep check` command.
require "woothee"
result = Woothee.parse("Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)")
result[:name] # => "Internet Explorer"
result[:category] # => :pc
result[:os] # => "Windows 7"
result[:os_version] # => "NT 6.1"
result[:version] # => "8.0"
result[:vendor] # => "Microsoft"
Woothee.is_crawler("Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)") # => false
|
yoshoku/gem_rbs_collection
|
gems/chronic/0.10/_test/test.rb
|
require "chronic"
Chronic.parse('monday')
Chronic.parse('monday', context: :future)
Chronic.parse('monday', context: :past)
Chronic.parse('monday', now: Time.local(2000, 1, 1))
Chronic.parse('20:00:00', hours24: false) # => nil
Chronic.parse('monday', week_start: :sunday)
Chronic.parse('monday', week_start: :monday)
Chronic.parse('may 27th', guess: false)
Chronic.parse('may 27th', guess: true)
Chronic.parse('monday', ambiguous_time_range: 7)
Chronic.parse('03/04/2011', endian_precedence: [:middle, :little])
Chronic.parse('03/04/2011', endian_precedence: [:little, :middle])
Chronic.parse('03/04/2011', endian_precedence: :little)
Chronic.parse('03/04/2011', endian_precedence: :middle)
Chronic.parse('monday', ambiguous_year_future_bias: 79)
Chronic.parse('INVALID DATE')
|
yoshoku/gem_rbs_collection
|
aws_client_types_generator.rb
|
#! /usr/bin/env ruby
require 'aws-sdk-code-generator'
require 'json'
require 'rbs'
using Module.new {
refine String do
def underscore
AwsSdkCodeGenerator::Underscore.underscore(self)
end
end
}
class AwsClientTypesGenerator
def initialize(path)
@api = File.open(path) do |file|
JSON.parse(file.read)
end
end
def escape(key)
RBS::Parser::KEYWORDS.key?(key) ? "#{key}_" : key
end
def types(name)
if RBS::Parser::KEYWORDS.key?(name)
name
else
"Types::#{name}"
end
end
def type(name, types_prefix: false)
if structure?(name)
name
else
escape(name.underscore)
end.then { types_prefix ? types(_1) : _1 }
end
def structure?(name)
@api.dig("shapes", name, "type") == "structure"
end
def write(io)
io.puts "module Aws"
io.puts " class EmptyStructure"
io.puts " end"
io.puts " module #{@api["metadata"]["serviceId"].tr(' ', '')}"
io.puts " class Client"
@api["operations"].each do |key, body|
input = body.dig("input", "shape")&.then { |i| shape_to_kwargs(i, allow_opt: true, types_prefix: true) }
output = body.dig("output", "shape")&.then { |o| types(o) } || "Aws::EmptyStructure"
name = body["name"].underscore
io.puts " def #{name}: (#{input}) -> #{output}"
end
io.puts " end"
io.puts " module Types"
@api["shapes"].each do |key, body|
if body["type"] == "structure"
next if body["exception"]
io.puts " class #{key}"
body["members"]&.each do |member_name, member_body|
io.puts " attr_accessor #{escape(member_name.underscore)}: #{shape_to_rbs_type(member_body["shape"], types_prefix: false)}"
end
io.puts " end"
else
io.puts " type #{escape(key.underscore)} = #{shape_to_rbs_type(key, types_prefix: false)}"
end
end
io.puts " end"
io.puts " module Errors"
@api["shapes"].each do |key, body|
if body["type"] == "structure" && body["exception"]
io.puts " class #{key} < RuntimeError"
body["members"]&.each do |member_name, member_body|
io.puts " attr_accessor #{escape(member_name.underscore)}: #{shape_to_rbs_type(member_body["shape"], types_prefix: true)}"
end
io.puts " end"
end
end
io.puts " end"
io.puts " end"
io.puts "end"
end
def shape_to_rbs_type(shape_name, types_prefix: false)
shape_body = @api["shapes"][shape_name]
case type = shape_body["type"]
when "string"
if shape_body["enum"]
"(#{shape_body["enum"].map { "\"#{_1}\"" }.join(" | ")})"
else
"::String"
end
when "integer", "long"
"::Integer"
when "double"
"::Float"
when "timestamp"
case shape_body["timestampFormat"]
when "iso8601", "rfc822", nil
"::Time"
else
raise [shape_name, shape_body].inspect
end
when "list"
"::Array[#{type(shape_body["member"]["shape"], types_prefix: types_prefix)}]"
when "map"
key_type = type(shape_body["key"]["shape"], types_prefix: types_prefix)
value_type = type(shape_body["value"]["shape"], types_prefix: types_prefix)
"::Hash[#{key_type}, #{value_type}]"
when "structure"
shape_name
when "boolean"
"bool"
when "blob"
"::IO"
else
raise "unimplemented shape type #{type} on #{shape_name}"
end
end
def shape_to_kwargs(shape_name, allow_opt: false, types_prefix: false)
shape_body = @api["shapes"].fetch(shape_name)
members = shape_body["members"]
raise if members.empty?
required = shape_body["required"]
params = members.map do |member_name, member_body|
member_shape_name = member_body["shape"]
member_type = type(member_shape_name, types_prefix: types_prefix)
prefix = (required && required.include?(member_name)) ? "" : "?"
prefix = allow_opt ? prefix : ""
"#{prefix}#{member_name.underscore}: #{member_type}"
end
params.join(", ")
end
end
AwsClientTypesGenerator.new(ARGV[0]).write($stdout)
|
yoshoku/gem_rbs_collection
|
gems/nokogiri/1.11/_test/test.rb
|
# https://nokogiri.org/#how-to-use-nokogiri
require 'nokogiri'
# Fetch and parse HTML document
doc = Nokogiri::HTML(<<~HTML)
<body>
<nav>
<ul class="menu">
<li><a href="#">hello</a></li>
</ul>
</nav>
<article>
<h2>hello</h2>
</article>
</body>
HTML
# Search for nodes by css
doc.css('nav ul.menu li a', 'article h2').each do |link|
puts link.content
end
# Search for nodes by xpath
doc.xpath('//nav//ul//li/a', '//article//h2').each do |link|
puts link.content
end
# Or mix and match
doc.search('nav ul.menu li a', '//article//h2').each do |link|
puts link.content
end
# Create nodes
doc.create_element('h1', 'hello') { |e| puts e }
doc.create_text_node('hello') { |e| puts e }
doc.create_comment('hello') { |e| puts e }
doc.create_cdata('<hello>') { |e| puts e }
# Create nodes without using blocks
puts doc.create_element('h1', 'hello')
puts doc.create_text_node('hello')
puts doc.create_comment('hello')
puts doc.create_cdata('<hello>')
|
yoshoku/gem_rbs_collection
|
gems/zengin_code/1.0/_test/test.rb
|
<reponame>yoshoku/gem_rbs_collection
# Write Ruby code to test the RBS.
# It is type checked by `steep check` command.
require "zengin_code"
ZenginCode::Bank.all # => { '0001' => <#ZenginCode::Bank code, name, kana, hira, roma ... >, .... }
bank = ZenginCode::Bank["0001"] or raise
puts bank.code
puts bank.name
puts bank.kana
puts bank.hira
puts bank.roma
branch = bank.branches["001"]
puts branch.code
puts branch.name
puts branch.kana
puts branch.hira
puts branch.roma
puts branch.bank
ZenginCode::Bank["0000"] # => nil
|
yoshoku/gem_rbs_collection
|
gems/delayed_job/4.1/_test/test.rb
|
Delayed::Worker.queue_attributes = {
high_priority: { priority: -10 },
low_priority: { priority: 10 }
}
Delayed::Worker.delay_jobs = ->(job) {
job.queue != 'inline'
}
Delayed::Worker.destroy_failed_jobs = false
Delayed::Worker.sleep_delay = 60
Delayed::Worker.max_attempts = 3
Delayed::Worker.max_run_time = 5.minutes
Delayed::Worker.read_ahead = 10
Delayed::Worker.default_queue_name = 'default'
Delayed::Worker.delay_jobs = !Rails.env.test?
Delayed::Worker.raise_signal_exceptions = :term
Delayed::Worker.logger = Logger.new(File.join(Rails.root, 'log', 'delayed_job.log'))
|
kyounger/homebrew-jx
|
Formula/jx.rb
|
class Jx < Formula
desc "A tool to install and interact with Jenkins X on your Kubernetes cluster."
homepage "https://jenkins-x.github.io/jenkins-x-website/"
version "1.3.887"
url "https://github.com/jenkins-x/jx/releases/download/v#{version}/jx-darwin-amd64.tar.gz"
sha256 "456182d8026c670c8c1c9c29ea395cf75330d2910cd5f78e793f0f56d36626d0"
def install
bin.install name
output = Utils.popen_read("SHELL=bash #{bin}/jx completion bash")
(bash_completion/"jx").write output
output = Utils.popen_read("SHELL=zsh #{bin}/jx completion zsh")
(zsh_completion/"_jx").write output
prefix.install_metafiles
end
end
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/recipes/default.rb
|
# apt-get update awesomeness
include_recipe 'apt'
# Install some random packages defined by the user
node.packages.each do |pkg|
package pkg
end
# Stolen from https://github.com/bflad/chef-docker/blob/91cae5b866e096cbaa962ef1e3db3aafca7782ef/recipes/aufs.rb#L30
image_extra = Mixlib::ShellOut.new("apt-cache search linux-image-extra-`uname -r | grep --only-matching -e [0-9]\.[0-9]\.[0-9]-[0-9]*` | cut -d ' ' -f 1").run_command.stdout.strip
package image_extra
# https://github.com/fnichol/chef-user
include_recipe "user::data_bag"
# https://github.com/fnichol/chef-rvm
include_recipe 'rvm::system'
# Vagrant and vagrant-lxc
include_recipe 'dev_droplet::vagrant'
# GO!
include_recipe 'golang'
# # Git clone projects
include_recipe 'dev_droplet::git_projects'
# dotfiles + vimfiles
include_recipe 'dev_droplet::dotfiles'
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/metadata.rb
|
name "dev_droplet"
maintainer "<NAME>"
maintainer_email "<EMAIL>"
license "MIT"
depends 'apt'
depends 'user'
depends 'openssh'
depends 'fail2ban'
depends 'sudo'
depends 'rvm'
depends 'golang'
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/recipes/vagrant.rb
|
vagrant_version = node[:vagrant][:version]
vagrant_source = node[:vagrant][:source]
vagrant_plugins = node[:vagrant][:plugins]
vagrant_path = "#{Chef::Config[:file_cache_path]}/vagrant_#{vagrant_version}_x86_64.deb"
package 'lxc' do
options "-o Dpkg::Options::='--force-confdef' -o Dpkg::Options::='--force-confold'"
end
remote_file vagrant_path do
source vagrant_source
end
bash 'install-vagrant' do
code "dpkg -i #{vagrant_path}"
not_if "dpkg -s vagrant | grep -q '#{vagrant_version}'"
end
node.vagrant.plugins.each do |plugin|
bash "install-#{plugin}" do
code "vagrant plugin install #{plugin}"
user node.developer.user
environment 'VAGRANT_HOME' => "/home/#{node.developer.user}/.vagrant.d"
not_if "sudo su -l #{node.developer.user} -- vagrant plugin list | grep -q #{plugin}"
end
end
bash 'set-vagrant-lxc-default-provider' do
code "echo 'export VAGRANT_DEFAULT_PROVIDER=lxc' >> /home/#{node.developer.user}/.profile"
not_if "grep -q 'VAGRANT_DEFAULT_PROVIDER' /home/#{node.developer.user}/.profile"
end
developer_user = node[:developer][:user]
node[:developer][:vagrant_lxc_boxes].each do |name, url|
bash "vagrant-lxc-box-add-#{name}" do
code "vagrant box add #{name} #{url}"
user developer_user
environment 'VAGRANT_HOME' => "/home/#{developer_user}/.vagrant.d"
not_if "test -d /home/#{developer_user}/.vagrant.d/boxes/#{name}/lxc"
end
end
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/recipes/dotfiles.rb
|
<gh_stars>0
%w( dotfiles vimfiles ).each do |project|
bash "setup-#{project}" do
code "./setup.sh"
cwd "#{node.developer.projects_root}/#{project}"
user node.developer.user
environment 'HOME' => "/home/#{node.developer.user}"
end
end
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/recipes/git_projects.rb
|
Chef::Provider::Git.class_eval do
alias :run_options_old :run_options
def run_options(run_opts={})
ret = run_options_old(run_opts)
ret[:environment] = {} unless ret[:environment]
ret[:environment]['HOME'] = "/home/#{node[:developer][:user]}"
ret
end
end
directory node[:developer][:projects_root] do
recursive true
owner node[:developer][:user]
group node[:developer][:user]
end
node.developer.projects.each do |project, data|
git "#{node[:developer][:projects_root]}/#{project}" do
repository data[:repository]
revision 'master'
user node[:developer][:user]
group node[:developer][:user]
end
end
|
fgrehm/dev-droplet
|
site-cookbooks/dev_droplet/attributes/default.rb
|
<reponame>fgrehm/dev-droplet
default[:developer][:user] = 'developer'
default[:developer][:projects_root] = "/home/#{node[:developer][:user]}/projects"
default[:developer][:projects] = { }
default[:developer][:vagrant_lxc_boxes] = { }
default.packages = %w( htop vim git curl wget psmisc tmux redir apparmor-utils )
default[:user][:ssh_keygen] = 'false'
default[:users] = [node.developer.user]
default[:go][:version] = "1.1.2"
default[:go][:filename] = "go#{node[:go][:version]}.#{node[:os]}-#{node[:go][:platform]}.tar.gz"
default[:go][:url] = "http://go.googlecode.com/files/#{node[:go][:filename]}"
# ruby that will get installed and set to `rvm use default`.
default[:rvm][:default_ruby] = "2.0.0"
# list of additional rubies that will be installed
default[:rvm][:rubies] = ['1.9.3', '1.8.7', '2.1.0-preview1']
default[:rvm][:vagrant] = {
'system_chef_client' => "/opt/chef/bin/chef-client",
'system_chef_solo' => "/opt/chef/bin/chef-solo"
}
default[:vagrant][:version] = "1.3.5"
default[:vagrant][:source] = "http://files.vagrantup.com/packages/a40522f5fabccb9ddabad03d836e120ff5d14093/vagrant_1.3.5_x86_64.deb"
default[:vagrant][:plugins] = %w(
vagrant-lxc vagrant-proxyconf vagrant-pristine vagrant-cachier
vagrant-global-status vagrant-omnibus vagrant-hostmanager
)
|
Stromweld/chef
|
spec/unit/provider/user/linux_spec.rb
|
#
# Author:: <NAME> (<<EMAIL>>)
# Author:: <NAME> (<<EMAIL>>)
# Copyright:: Copyright (c) Chef Software Inc.
#
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Provider::User::Linux do
subject(:provider) do
p = described_class.new(@new_resource, @run_context)
p.current_resource = @current_resource
p
end
supported_useradd_options = {
"comment" => "-c",
"gid" => "-g",
"uid" => "-u",
"shell" => "-s",
"password" => <PASSWORD>",
"expire_date" => "-e",
"inactive" => "-f",
}
include_examples "a useradd-based user provider", supported_useradd_options
describe "manage_home behavior" do
before(:each) do
@new_resource = Chef::Resource::User::LinuxUser.new("adam", @run_context)
@current_resource = Chef::Resource::User::LinuxUser.new("adam", @run_context)
end
it "throws an error when trying to set supports manage_home: true" do
expect { @new_resource.supports( manage_home: true ) }.to raise_error(NoMethodError)
end
it "throws an error when trying to set supports non_unique: true" do
expect { @new_resource.supports( non_unique: true ) }.to raise_error(NoMethodError)
end
it "defaults manage_home to false" do
expect( @new_resource.manage_home ).to be false
end
it "by default manage_home is false and we use -M" do
expect( provider.useradd_options ).to eql(["-M"])
end
it "setting manage_home to false includes -M" do
@new_resource.manage_home false
expect( provider.useradd_options ).to eql(["-M"])
end
it "setting manage_home to true includes -m" do
@new_resource.manage_home true
expect( provider.useradd_options ).to eql(["-m"])
end
end
end
|
shirot7335/spf_lookup
|
lib/spf_lookup/txt_record_fetcher.rb
|
require 'resolv'
module SpfLookup
class TXTRecordFetcher
SUPPORTED_TYPE_CLASS = %w[TXT].freeze
def initialize(dns_conf = nil)
@resolver = Resolv::DNS.new(dns_conf)
end
def txt_record_values(domain)
return txt_record_resources(domain).collect { |resource| resource.data }
end
def txt_record_resources(domain)
return record_resources(domain, 'TXT')
end
def record_resources(domain, record_type)
resources = @resolver.getresources(domain, type_class(record_type))
return resources
end
private
def type_class(record_type)
raise ArgumentError unless SUPPORTED_TYPE_CLASS.include?(record_type.upcase)
Resolv::DNS::Resource::IN.const_get(record_type.upcase)
end
end
end
|
shirot7335/spf_lookup
|
lib/spf_lookup/version.rb
|
<reponame>shirot7335/spf_lookup<filename>lib/spf_lookup/version.rb
module SpfLookup
VERSION = "0.1.2c"
end
|
shirot7335/spf_lookup
|
lib/spf_lookup/spf_record.rb
|
require "json"
module SpfLookup
class SpfRecord
attr_accessor :domain, :record_value, :includes
attr_accessor :lookup_term_count
def initialize(domain, record_value, includes, lookup_term_count)
@domain = domain || ""
@record_value = record_value || ""
@includes = includes || []
@lookup_term_count = lookup_term_count || 0
end
def lookup_count
return count_dns_lookup_0
end
def to_hash
return {
domain: @domain,
record_value: @record_value,
includes: includes_to_hash,
lookup_term_count: @lookup_term_count
}
end
def to_json
return self.to_hash.to_json
end
private
def count_dns_lookup_0
return count_dns_lookup(self, 0)
end
def count_dns_lookup(result, count)
_count = result.includes.inject(count) {|memo, r|
memo = count_dns_lookup(r, memo)
}
return _count + result.lookup_term_count
end
def includes_to_hash
return includes.map {|result| result.to_hash }
end
end
end
|
shirot7335/spf_lookup
|
lib/spf_lookup/lookup.rb
|
<reponame>shirot7335/spf_lookup
require 'coppertone'
require_relative './txt_record_fetcher'
require_relative './spf_record'
require_relative './error'
module SpfLookup
class Lookup
class << self
def run(domain)
return dns_lookup(domain)
end
private
def dns_lookup(domain)
spf_record = find_spf_record(domain)
includes = lookup_target_domains(spf_record).map {|_domain|
dns_lookup(_domain)
}
return SpfRecord.new(
domain,
spf_record&.to_s,
includes,
spf_record&.dns_lookup_term_count
)
end
def lookup_target_domains(spf_record)
return [] if spf_record.blank?
domain_fetcher = -> (obj) { obj.domain_spec.macro_text }
domains = spf_record.includes.each_with_object([]) { |include_value, memo|
memo << domain_fetcher.(include_value.mechanism)
}
domains << domain_fetcher.(spf_record.redirect) unless spf_record.redirect.nil?
return domains.compact
end
def find_spf_record(domain)
spf_record = record_fetcher.txt_record_values(domain).each_with_object([]) {|txt_record_value, memo|
memo << Coppertone::Record.new(txt_record_value) if Coppertone::Record.record?(txt_record_value)
}
if spf_record.length > 1
raise SpfLookup::MultipleSpfRecordError.new("There must be only one SPF record in the same domain.")
end
return spf_record.first
end
def record_fetcher
@fetcher ||= TXTRecordFetcher.new(SpfLookup::DNS_CONFIG[:option])
end
end
end
end
|
shirot7335/spf_lookup
|
lib/spf_lookup.rb
|
require "spf_lookup/version"
require_relative './spf_lookup/lookup'
module SpfLookup
# 'options' could set nil or Resolv::DNS.new argument.
# ex.
# {nameserver: '8.8.8.8'}
DNS_CONFIG = {option: nil}
LOOKUP_LIMIT_SPECIFIED_BY_RFC7208 = 10
class << self
def retrieve_record_set(domain)
return SpfLookup::Lookup.run(domain)
end
def lookup_count(domain)
return SpfLookup::Lookup.run(domain).lookup_count
end
def dns_configure(dns_config = nil)
DNS_CONFIG[:option] = dns_config
end
end
end
|
shirot7335/spf_lookup
|
lib/spf_lookup/error.rb
|
module SpfLookup
class Error < StandardError
end
class SpfRecordNotFound < Error
end
class MultipleSpfRecordError < Error
end
end
|
dentarg/rubocop-eighty-four-codes
|
rubocop-eightyfourcodes.gemspec
|
<filename>rubocop-eightyfourcodes.gemspec
$LOAD_PATH.unshift File.expand_path('lib', __dir__)
require 'rubocop/eightyfourcodes/version'
Gem::Specification.new do |spec|
spec.name = 'rubocop-eightyfourcodes'
spec.summary = 'Basic security checks for projects'
spec.description = <<~DESCRIPTION
Basic security checking for Ruby files.
A plugin for the RuboCop code style enforcing & linting tool.
DESCRIPTION
spec.homepage = 'https://github.com/84codes/rubocop-eightyfourcodes/'
spec.authors = ['<NAME>', '<NAME>']
spec.email = [
'<EMAIL>',
'<EMAIL>'
]
spec.licenses = ['MIT']
spec.version = RuboCop::EightyFourCodes::Version::STRING
spec.platform = Gem::Platform::RUBY
spec.required_ruby_version = '>= 2.3.0'
spec.require_paths = ['lib']
spec.files = Dir[
'{config,lib}/**/*',
'*.md',
'*.gemspec',
'Gemfile'
]
spec.extra_rdoc_files = ['LICENSE.md', 'README.md']
spec.add_runtime_dependency 'rubocop', '>= 0.51'
spec.add_development_dependency 'rake'
end
|
lzap/logging-journald
|
lib/logging/layouts/noop.rb
|
module Logging::Layouts
def self.noop(*args)
return ::Logging::Layouts::Noop if args.empty?
::Logging::Layouts::Noop.new(*args)
end
class Noop < ::Logging::Layout
def format(event)
event.data.to_s
end
end
end
|
lzap/logging-journald
|
test/test_appenders.rb
|
require 'logging'
require 'test/unit'
require 'mocha/test_unit'
Logging.initialize_plugins
module TestLogging
module TestAppenders
DEBUG = ::Journald::LOG_DEBUG
INFO = ::Journald::LOG_INFO
WARN = ::Journald::LOG_WARNING
ERR = ::Journald::LOG_ERR
CRIT = ::Journald::LOG_CRIT
class TestJournald < Test::Unit::TestCase
def setup
@log = Logging.logger['test']
@log.clear_appenders
@log.level = :debug
end
def setup_appender(*args)
@appender = Logging.appenders.journald('simple', *args)
@log.add_appenders(@appender)
end
def test_initialize
setup_appender
assert_equal "test", @log.name
assert_equal 1, @log.appenders.size
end
def test_simple_debug_line
setup_appender
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: DEBUG)).once.returns(true)
@log.debug "test"
end
def test_simple_debug_line_with_facility
setup_appender facility: 10
@appender.journal.expects(:send_message).with(has_entries(message: "test", SYSLOG_FACILITY: 10, priority: DEBUG)).once.returns(true)
@log.debug "test"
end
def test_simple_debug_line_with_logger
setup_appender logger_name: :lg
@appender.journal.expects(:send_message).with(has_entries(message: "test", lg: 'test', priority: DEBUG)).once.returns(true)
@log.debug "test"
end
def test_simple_debug_line_level_info
setup_appender
@log.level = :info
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: DEBUG)).never.returns(true)
@log.debug "test"
end
def test_simple_info_line
setup_appender
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: INFO)).once.returns(true)
@log.info "test"
end
def test_simple_warning_line
setup_appender
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: WARN)).once.returns(true)
@log.warn "test"
end
def test_simple_error_line
setup_appender
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: ERR)).once.returns(true)
@log.error "test"
end
def test_simple_fatal_line
setup_appender
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: CRIT)).once.returns(true)
@log.fatal "test"
end
def test_simple_debug_line_as_hash
setup_appender
@appender.journal.expects(:send_message).with(has_entries("message" => "test", priority: DEBUG)).once.returns(true)
@log.debug "message" => "test"
end
def test_simple_debug_line_as_hash_with_layout
setup_appender(layout: Logging.layouts.pattern(pattern: "X %m X"))
@appender.journal.expects(:send_message).with(has_entries("message" => "test", priority: DEBUG)).once.returns(true)
@log.debug "message" => "test"
end
def test_simple_info_line_with_mdc
setup_appender
Logging.mdc['test'] = 'value'
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: INFO, "test" => "value")).once.returns(true)
@log.info "test"
ensure
Logging.mdc.clear
end
def test_simple_info_line_with_ndc
setup_appender
Logging.ndc << { test: "value" }
@appender.journal.expects(:send_message).with(has_entries(message: "test", priority: INFO, test: "value")).once.returns(true)
@log.info "test"
ensure
Logging.ndc.clear
end
def test_simple_info_line_with_layout_mdc_ndc
setup_appender(layout: Logging.layouts.pattern(pattern: "%m %X{test1} %x"))
Logging.mdc['test1'] = 'value'
Logging.ndc << { "test2" => "value" }
@appender.journal.expects(:send_message).with(has_entries(message: "test value {\"test2\"=>\"value\"}", priority: INFO, "test1" => "value", "test2" => "value")).once.returns(true)
@log.info "test"
ensure
Logging.mdc.clear
Logging.ndc.clear
end
end
end
end
|
lzap/logging-journald
|
lib/logging/plugins/journald.rb
|
<filename>lib/logging/plugins/journald.rb
module Logging
module Plugins
module Journald
extend self
def initialize_journald
require File.expand_path('../../layouts/noop', __FILE__)
require File.expand_path('../../appenders/journald', __FILE__)
end
end
end
end
|
lzap/logging-journald
|
logging-journald.gemspec
|
<filename>logging-journald.gemspec<gh_stars>1-10
Gem::Specification.new do |spec|
spec.name = 'logging-journald'
spec.version = '2.1.0'
spec.authors = ['<NAME>']
spec.email = ['<EMAIL>']
spec.summary = "Journald appender for logging gem"
spec.description = "Plugin for logging gem providing journald appender"
spec.homepage = 'https://github.com/lzap/logging-journald'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.required_ruby_version = ">= 2.0.0"
spec.add_runtime_dependency 'journald-logger', '~> 3.0'
spec.add_runtime_dependency 'logging'
spec.add_development_dependency 'rake', '~> 11.0' # ruby 2.0
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'mocha'
spec.add_development_dependency 'test-unit'
end
|
lzap/logging-journald
|
examples/simple.rb
|
<gh_stars>1-10
require 'logging'
log = Logging.logger['example']
log.add_appenders(Logging.appenders.journald('simple',
ident: 'simple', # optional log ident (appender name by default)
layout: Logging.layouts.pattern(pattern: "%m\n"), # optional layout
mdc: true, # log mdc into custom journal fields (true by default)
ndc: true, # log ndc hash values into custom journal fields (true by default)
facility: ::Syslog::Constants::LOG_USER, # optional syslog facility
extra: {}, # extra custom journal fields
))
log.add_appenders(Logging.appenders.stdout)
log.level = :debug
# mapped diagnostic context is logged when mdc is set to true
Logging.mdc['USERNAME'] = 'Ondra'
# logging into journal is straight-forward
log.debug "this is debug message"
log.info "a very nice little info message"
log.warn "this is your last warning"
# hash instead string is supported with arbitrary key/value pairs but
# layout is ignored in this case
log.error message: "oh no an error", akey: "a value"
# when ndc is enabled, any number of hash objects can be pushed
# and will be logged with the message
Logging.ndc << { akey: "a value" }
log.fatal "an exception occured"
Logging.ndc.clear
|
lzap/logging-journald
|
lib/logging/appenders/journald.rb
|
<gh_stars>1-10
require 'journald/logger'
module Logging
module Appenders
def self.journald(name, *args)
if args.empty?
return self['journald'] || ::Logging::Appenders::Journald.new(name)
end
::Logging::Appenders::Journald.new(name, *args)
end
class Journald < ::Logging::Appender
attr_reader :ident, :mdc, :ndc, :facility, :extra, :journal
def initialize(name, opts = {})
opts[:layout] ||= ::Logging::Layouts::Noop.new
@ident = opts.fetch(:ident, name)
@mdc = opts.fetch(:mdc, true)
@ndc = opts.fetch(:ndc, true)
@facility = Integer(opts.fetch(:facility, -1))
@extra = opts.fetch(:extra, {})
@logger_name = opts.fetch(:logger_name, false)
@map = [
::Journald::LOG_DEBUG,
::Journald::LOG_INFO,
::Journald::LOG_WARNING,
::Journald::LOG_ERR,
::Journald::LOG_CRIT
]
map = opts.fetch(:map, nil)
self.map = map unless map.nil?
@journal = ::Journald::Logger.new(ident, ::Journald::LOG_DEBUG)
#@journal.sev_threshold = ::Journald::LOG_DEBUG
super(name, opts)
end
def map=(levels)
map = []
levels.keys.each do |lvl|
num = ::Logging.level_num(lvl)
map[num] = syslog_level_num(levels[lvl])
end
@map = map
end
def close(*args)
super(false)
end
private
def syslog_level_num(level)
case level
when Integer; level
when String, Symbol
level = level.to_s.upcase
self.class.const_get level
else
raise ArgumentError, "unknown level '#{level}'"
end
end
def write(event)
record = {}
record.merge!(extra) unless extra.empty?
record[:SYSLOG_FACILITY] = @facility if @facility >= 0
record.merge!(Logging.mdc.context) if mdc
if ndc
Logging.ndc.context.each do |item|
record.merge!(item) if item.instance_of?(Hash)
end
end
if event.instance_of?(::Logging::LogEvent)
record[:priority] = (@map[event.level] || ::Journald::LOG_DEBUG)
record[@logger_name] = event.logger if @logger_name
if event.data.instance_of?(Hash)
record.merge!(event.data)
else
record[:message] = @layout.format(event)
end
else
record[:message] = event
end
@journal.send_message(record)
self
rescue StandardError => err
self.level = :off
::Logging.log_internal 'system journal appender have been disabled'
::Logging.log_internal_error(err)
raise(err)
end
end
end
end
|
bbc/linkr
|
test/test_resolve.rb
|
require 'helper'
class TestLinkr < Test::Unit::TestCase
def test_basics
FakeWeb.register_uri(:get, "http://bbc.in/pdTHqe", :location => "http://www.bbc.co.uk", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://www.bbc.co.uk", :status => ["200", "OK"], :body => "Hello World")
l = Linkr.new("http://bbc.in/pdTHqe", {
:redirect_limit => 10,
:timeout => 10
})
assert_equal l.class, Linkr
assert_equal l.original_url, "http://bbc.in/pdTHqe"
assert_equal l.redirect_limit, 10
assert_equal l.timeout, 10
assert_equal l.body, 'Hello World'
assert_equal l.response.class, Net::HTTPOK
end
def test_normal_link
FakeWeb.register_uri(:get, "http://www.bbc.co.uk", :status => ["200", "OK"], :body => "Hello World")
assert_equal Linkr.resolve("http://www.bbc.co.uk"), "http://www.bbc.co.uk"
end
def test_internal_error
FakeWeb.register_uri(:get, "http://www.bbc.co.uk", :status => ["500", "Internal Error"])
assert_equal Linkr.resolve("http://www.bbc.co.uk"), "http://www.bbc.co.uk"
end
def test_unauthorized
FakeWeb.register_uri(:get, "http://www.bbc.co.uk", :body => "Unauthorized", :status => ["401", "Unauthorized"])
assert_equal Linkr.resolve("http://www.bbc.co.uk"), "http://www.bbc.co.uk"
end
def test_some_invalid_urls
# These are invalid based on a regular expression
# /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/ix
# and is because Net/HTTP bails on uris that URI and Addressable deem fine
['http','xxx','whomwah.com','0','123','http://foo'].each do |link|
assert_raise(Linkr::InValidUrl) {
Linkr.resolve(link)
}
end
end
def test_empty_args
assert_raise(ArgumentError) {
Linkr.resolve('')
}
end
def test_simple_resolve
FakeWeb.register_uri(:get, "http://bbc.in/pdTHqe", :location => "http://www.bbc.co.uk", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://www.bbc.co.uk", :status => ["200", "OK"])
assert_equal Linkr.resolve("http://bbc.in/pdTHqe"), "http://www.bbc.co.uk"
end
def test_too_many_redirects
FakeWeb.register_uri(:get, "http://bbc.in/pdTHqe", :location => "http://url1.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url1.com", :location => "http://url2.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url2.com", :location => "http://url3.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url3.com", :location => "http://url4.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url4.com", :location => "http://url5.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url5.com", :location => "http://url6.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url6.com", :location => "http://url7.com", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://url7.com", :status => ["200", "OK"])
assert_raise(Linkr::TooManyRedirects) {
Linkr.resolve("http://bbc.in/pdTHqe")
}
end
def test_relative_urls_in_the_redirect
FakeWeb.register_uri(:get, "http://foo.in/duncan", :location => "/fred", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://foo.in/fred", :status => ["200", "OK"])
assert_equal Linkr.resolve("http://foo.in/duncan"), "http://foo.in/fred"
end
def test_redirect_with_location_in_body
FakeWeb.register_uri(:get, "http://foo.in/duncan", :body => "<p><a href='http://bar.in/fred'>Redirecting...</a>", :status => ["301", "Moved permanently"])
FakeWeb.register_uri(:get, "http://bar.in/fred", :status => ["200", "OK"])
assert_equal Linkr.resolve("http://foo.in/duncan"), "http://bar.in/fred"
end
end
|
bbc/linkr
|
lib/linkr.rb
|
require 'ostruct'
require 'net/http'
require 'addressable/uri'
class Linkr
class TooManyRedirects < StandardError; end
class InValidUrl < StandardError; end
attr_accessor :original_url, :redirect_limit, :timeout
attr_writer :url, :response
def initialize(original_url, opts={})
opts = {
:redirect_limit => 5,
:timeout => 5
}.merge(opts)
@original_url = original_url
@redirect_limit = opts[:redirect_limit]
@timeout = opts[:timeout]
@proxy = ENV['http_proxy'] ? Addressable::URI.parse(ENV['http_proxy']) : OpenStruct.new
@link_cache = nil
end
def url
resolve unless @url
@url
end
def body
response.body
end
def response
resolve unless @response
@response
end
def self.resolve(*args)
self.new(*args).url
end
private
def resolve
raise TooManyRedirects if @redirect_limit < 0
self.url = original_url unless @url
@uri = Addressable::URI.parse(@url).normalize
fix_relative_url if !@uri.normalized_site && @link_cache
raise InValidUrl unless valid?
http = Net::HTTP::Proxy(@proxy.host, @proxy.port).new(@uri.host, @uri.port)
http.read_timeout = http.open_timeout = @timeout
request = Net::HTTP::Get.new(@uri.omit(:scheme,:authority).to_s)
self.response = http.request(request)
redirect if response.kind_of?(Net::HTTPRedirection)
end
def redirect
@link_cache = @uri.normalized_site
self.url = redirect_url
@redirect_limit -= 1
resolve
end
def fix_relative_url
@url = File.join(@link_cache, @uri.omit(:scheme,:authority).to_s)
@uri = Addressable::URI.parse(@url).normalize
@link_cache = nil
end
def redirect_url
if response['location'].nil?
response.body.match(/<a href=[\"|\']([^>]+)[\"|\']>/i)[1]
else
response['location']
end
end
def valid?
regex = /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/ix
true if self.url && self.url =~ regex
end
end
|
bbc/linkr
|
test/helper.rb
|
<filename>test/helper.rb
require 'test/unit'
require 'fakeweb'
require_relative '../lib/linkr.rb'
|
4ormat/rubycas-client
|
spec/casclient/validation_response_spec.rb
|
<filename>spec/casclient/validation_response_spec.rb<gh_stars>0
require 'spec_helper'
require 'casclient/responses.rb'
describe CASClient::ValidationResponse do
context "when parsing extra attributes as raw" do
let(:response_text) do
<<RESPONSE_TEXT
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:attributes>
<cas:name><NAME></cas:name>
<cas:status><![CDATA[stuff
]]></cas:status>
<cas:yaml><![CDATA[--- true
]]></cas:yaml>
<cas:json><![CDATA[{"id":10529}]]></cas:json>
</cas:attributes>
</cas:authenticationSuccess>
</cas:serviceResponse>
RESPONSE_TEXT
end
subject { CASClient::ValidationResponse.new response_text, :encode_extra_attributes_as => :raw }
it "sets text attributes to their string value" do
subject.extra_attributes["name"].should == "<NAME>"
end
it "preserves whitespace for CDATA" do
subject.extra_attributes["status"].should == "stuff\n"
end
it "passes yaml through as is" do
subject.extra_attributes["yaml"].should == "--- true\n"
end
it "passes json through as is" do
subject.extra_attributes["json"].should == "{\"id\":10529}"
end
end
context "when parsing extra attributes as yaml" do
let(:response_text) do
<<RESPONSE_TEXT
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:attributes>
<cas:name><NAME></cas:name>
<cas:status><![CDATA[stuff
]]></cas:status>
<cas:truthy><![CDATA[--- true
]]></cas:truthy>
<cas:falsy><![CDATA[#{false.to_yaml}]]></cas:falsy>
</cas:attributes>
</cas:authenticationSuccess>
</cas:serviceResponse>
RESPONSE_TEXT
end
subject { CASClient::ValidationResponse.new response_text, :encode_extra_attributes_as => :yaml }
it "sets text attributes to their string value" do
subject.extra_attributes["name"].should == "<NAME>"
end
it "sets the value of boolean attributes to their boolean value" do
subject.extra_attributes["falsy"].should == false
subject.extra_attributes["truthy"].should == true
end
end
context "when parsing extra attributes as JSON" do
let(:response_text) do
<<RESPONSE_TEXT
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:attributes>
<cas:first_name>Jack</cas:first_name>
<cas:last_name>92.5</cas:last_name>
<cas:mobile_phone></cas:mobile_phone>
<cas:global_roles><![CDATA[]]></cas:global_roles>
<cas:foo_data><![CDATA[[{"id":10529}]]]></cas:foo_data>
<cas:food_data><![CDATA[{"id":10529}]]></cas:food_data>
<cas:allegedly_yaml>- 10</cas:allegedly_yaml>
<cas:truthy><![CDATA[--- true
]]></cas:truthy>
<cas:falsy><![CDATA[--- false
]]></cas:falsy>
</cas:attributes>
</cas:authenticationSuccess>
</cas:serviceResponse>
RESPONSE_TEXT
end
subject { CASClient::ValidationResponse.new response_text, :encode_extra_attributes_as => :json }
it "sets the value of non-CDATA escaped empty attribute to nil" do
subject.extra_attributes["mobile_phone"].should be_nil
end
it "sets the value of CDATA escaped empty attribute to nil" do
subject.extra_attributes["global_roles"].should be_nil
end
it "sets the value of literal attributes to their value" do
subject.extra_attributes["first_name"].should == "Jack"
end
it "sets the value of JSON attributes containing Arrays to their parsed value" do
subject.extra_attributes["foo_data"][0]["id"].should == 10529
end
it "sets the value of JSON attributes containing Hashes to their parsed value" do
subject.extra_attributes["food_data"]["id"].should == 10529
end
it "sets non-hash attributes as strings" do
subject.extra_attributes["last_name"].should be_a_kind_of String
end
it "sets the value of attributes which are not valid JSON but are valid YAML to their literal value" do
subject.extra_attributes["allegedly_yaml"].should == '- 10'
end
end
context "When parsing extra attributes with multiple attribute elements" do
let(:response_text) do
<<RESPONSE_TEXT
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:attributes>
<cas:group>ABC</cas:group>
<cas:group>DEF</cas:group>
</cas:attributes>
</cas:authenticationSuccess>
</cas:serviceResponse>
RESPONSE_TEXT
end
subject { CASClient::ValidationResponse.new response_text }
it "parses mutliple same-type elements to array of their values" do
subject.extra_attributes["group"].should == ['ABC', 'DEF']
end
end
context "When parsing extra attributes from xml attributes" do
let(:response_text) do
<<RESPONSE_TEXT
<?xml version="1.0" encoding="UTF-8"?>
<cas:serviceResponse xmlns:cas="http://www.yale.edu/tp/cas">
<cas:authenticationSuccess>
<cas:user>myuser</cas:user>
<cas:attribute name="username" value="myuser"/>
<cas:attribute name="name" value="<NAME>"/>
<cas:attribute name="email" value="<EMAIL>"/>
</cas:authenticationSuccess>
</cas:serviceResponse>
RESPONSE_TEXT
end
subject { CASClient::ValidationResponse.new response_text }
it "sets attributes for other type of format" do
expected = {"username" => "myuser", "name" => '<NAME>', "email" => '<EMAIL>'}
subject.user.should == 'myuser'
subject.extra_attributes.should == expected
end
end
end
|
mengxianbin/sonic-notes
|
instrument/guitar/electronic_guitar.rb
|
define :play_guitar do |&do_play|
use_synth :pluck
use_synth_defaults attack: 0.01, sustain: 0.5, decay: 0.1, release: 0.2, amp: 1, note_slide: 0.25
with_fx :reverb do
with_fx :lpf, cutoff: 115 do
do_play.()
end
end
end
|
mengxianbin/sonic-notes
|
util/note/check_note.rb
|
<filename>util/note/check_note.rb
define :check_note do |n|
n > 88 ? 88 : n < 0 ? 0 : n
end
|
mengxianbin/sonic-notes
|
idea/2020/0227/blues_2020_0227_001.rb
|
<gh_stars>1-10
use_bpm 100
rhythm = [0.5, 0.25]
template = [:C, :C, :E, :E, :G, :G, :A, :A, :Bb, :Bb, :A, :A, :G, :G, :E, :E]
roots = [:C, :C, :F, :C, :G, :F, :C, :C]
live_loop :blues do
# play_phrase: reference from /util/play
play_blues = ->() { roots.each { |root| play_phrase root, template, rhythm } }
play_piano &play_blues
# play_guitar: reference from /instrument/guitar/acoustic_guitar
play_guitar &play_blues
end
|
mengxianbin/sonic-notes
|
util/play/play_phrase.rb
|
define :play_phrase do |root, template, rhythm|
offset = root - note(template[0])
with_fx :reverb do
# check_note: reference from /util/note
play_pattern_timed template.map { |n| check_note(n + offset) }, rhythm
end
end
|
mengxianbin/sonic-notes
|
idea/2020/0227/blues_2020_0227_002.rb
|
<filename>idea/2020/0227/blues_2020_0227_002.rb
use_bpm 60
live_loop :ticks do
play 0, release: 0
sleep 1
end
live_loop :melody, sync: :ticks do
rhythm = [0.33, 0.17, 0.34, 0.16]
template = [:C, :C, :E, :E, :G, :G, :A, :A, :Bb, :Bb, :A, :A, :G, :G, :E, :E]
roots = [:C, :C, :F, :C, :G, :F, :C, :C]
play_piano &->() { roots.each { |root| play_phrase root, template, rhythm } }
end
live_loop :bass, sync: :ticks do
rhythm = [2, 0.83, 0.17, 1]
template = [:C, :C, :C, 0]
roots = [:C, :C, :F, :C, :G, :F, :C, :C].map { |n| n - 24 }
play_piano &->() { roots.each { |root| play_phrase root, template, rhythm } }
end
|
mengxianbin/sonic-notes
|
api/v3.1/lang/synth_names.rb
|
<reponame>mengxianbin/sonic-notes
# available synth names
(ring
:beep,
:blade,
:bnoise,
:chipbass,
:chiplead,
:chipnoise,
:cnoise,
:dark_ambience,
:dpulse,
:dsaw,
:dtri,
:dull_bell,
:fm,
:gnoise,
:growl,
:hollow,
:hoover,
:mod_beep,
:mod_dsaw,
:mod_fm,
:mod_pulse,
:mod_saw,
:mod_sine,
:mod_tri,
:noise,
:piano,
:pluck,
:pnoise,
:pretty_bell,
:prophet,
:pulse,
:saw,
:sine,
:sound_in,
:sound_in_stereo,
:square,
:subpulse,
:supersaw,
:tb303,
:tech_saws,
:tri,
:zawa
)
|
mengxianbin/sonic-notes
|
idea/2020/0228/tracks_2020_0228_001.rb
|
use_bpm 100
live_loop :ticks do
play_pattern_timed [0], 4
end
# Drum Track
live_loop :beats, sync: :ticks do
sample_rate = (sample_duration :loop_amen) / 4.0
sample :loop_amen, rate: sample_rate
sleep 4
end
define :play_ch do |ch|
play_pattern_timed ch, 0.0625, attack: 0.4, sustain: 1, release: 4, amp: 2
sleep 3.75
end
# Piano Track
live_loop :chords, sync: :ticks do
use_synth :piano
with_fx :reverb do
play_ch chord(:D3, :m7)
play_ch chord(:G2, "7", invert: 2)
play_ch chord(:C3, :M7)
play_ch (ring chord(:C3, "6"), chord(:A2, "7", invert: 1)).tick(:chord)
end
end
define :play_bass do |root|
amp_bass = 1.5
play_pattern [root], amp: amp_bass; sleep 0.5
play_pattern [root], amp: amp_bass; sleep 1.5
end
# Bass Track
live_loop :bass, sync: :ticks do
use_synth :fm
play_bass :F2
play_bass :G2
play_bass :C3
play_bass :A2
end
# Melody Track
live_loop :melody, sync: :ticks do
play_pattern_timed [0], 1
play_pattern_timed [(ring :E, :A).tick(:melody)], 0.5
play_pattern_timed [:G], 0.5, amp: 0.4
play_pattern_timed [:G], 0.5, amp: 0.4
play_pattern_timed [:E], 0.5, amp: 0.4
play_pattern_timed [:E], 1, amp: 0.4
end
define :play_choir do |root|
play_pattern_timed [root], 2.5, amp: 0.6
play_pattern_timed [root - 2], 0.5, amp: 0.6
play_pattern_timed [root], 0.5, amp: 0.6
play_pattern_timed [root - (ring 2, 2, 2, 1).tick(:chord)], 4.5, amp: 0.6
end
# Choir Track
live_loop :choir, sync: :ticks do
play_choir :E5
play_choir :D5
end
|
mengxianbin/sonic-notes
|
api/v3.1/lang/scale.rb
|
<reponame>mengxianbin/sonic-notes<filename>api/v3.1/lang/scale.rb
# available scale types
(scale :C, :diatonic)
(scale :C, :ionian)
(scale :C, :major)
(scale :C, :dorian)
(scale :C, :phrygian)
(scale :C, :lydian)
(scale :C, :mixolydian)
(scale :C, :aeolian)
(scale :C, :minor)
(scale :C, :locrian)
(scale :C, :hex_major6)
(scale :C, :hex_dorian)
(scale :C, :hex_phrygian)
(scale :C, :hex_major7)
(scale :C, :hex_sus)
(scale :C, :hex_aeolian)
(scale :C, :minor_pentatonic)
(scale :C, :yu)
(scale :C, :major_pentatonic)
(scale :C, :gong)
(scale :C, :egyptian)
(scale :C, :shang)
(scale :C, :jiao)
(scale :C, :zhi)
(scale :C, :ritusen)
(scale :C, :whole_tone)
(scale :C, :whole)
(scale :C, :chromatic)
(scale :C, :harmonic_minor)
(scale :C, :melodic_minor_asc)
(scale :C, :hungarian_minor)
(scale :C, :octatonic)
(scale :C, :messiaen1)
(scale :C, :messiaen2)
(scale :C, :messiaen3)
(scale :C, :messiaen4)
(scale :C, :messiaen5)
(scale :C, :messiaen6)
(scale :C, :messiaen7)
(scale :C, :super_locrian)
(scale :C, :hirajoshi)
(scale :C, :kumoi)
(scale :C, :neapolitan_major)
(scale :C, :bartok)
(scale :C, :bhairav)
(scale :C, :locrian_major)
(scale :C, :ahirbhairav)
(scale :C, :enigmatic)
(scale :C, :neapolitan_minor)
(scale :C, :pelog)
(scale :C, :augmented2)
(scale :C, :scriabin)
(scale :C, :harmonic_major)
(scale :C, :melodic_minor_desc)
(scale :C, :romanian_minor)
(scale :C, :hindu)
(scale :C, :iwato)
(scale :C, :melodic_minor)
(scale :C, :diminished2)
(scale :C, :marva)
(scale :C, :melodic_major)
(scale :C, :indian)
(scale :C, :spanish)
(scale :C, :prometheus)
(scale :C, :diminished)
(scale :C, :todi)
(scale :C, :leading_whole)
(scale :C, :augmented)
(scale :C, :purvi)
(scale :C, :chinese)
(scale :C, :lydian_minor)
(scale :C, :blues_major)
(scale :C, :blues_minor)
|
mengxianbin/sonic-notes
|
api/v3.1/lang/chord.rb
|
<reponame>mengxianbin/sonic-notes
# available chord types
(chord :C, '1')
(chord :C, '5')
(chord :C, '+5')
(chord :C, 'm+5')
(chord :C, :sus2)
(chord :C, :sus4)
(chord :C, '6')
(chord :C, :m6)
(chord :C, '7sus2')
(chord :C, '7sus4')
(chord :C, '7-5')
(chord :C, 'm7-5')
(chord :C, '7+5')
(chord :C, 'm7+5')
(chord :C, '9')
(chord :C, :m9)
(chord :C, 'm7+9')
(chord :C, :maj9)
(chord :C, '9sus4')
(chord :C, '6*9')
(chord :C, 'm6*9')
(chord :C, '7-9')
(chord :C, 'm7-9')
(chord :C, '7-10')
(chord :C, '9+5')
(chord :C, 'm9+5')
(chord :C, '7+5-9')
(chord :C, 'm7+5-9')
(chord :C, '11')
(chord :C, :m11)
(chord :C, :maj11)
(chord :C, '11+')
(chord :C, 'm11+')
(chord :C, '13')
(chord :C, :m13)
(chord :C, :add2)
(chord :C, :add4)
(chord :C, :add9)
(chord :C, :add11)
(chord :C, :add13)
(chord :C, :madd2)
(chord :C, :madd4)
(chord :C, :madd9)
(chord :C, :madd11)
(chord :C, :madd13)
(chord :C, :major)
(chord :C, :M)
(chord :C, :minor)
(chord :C, :m)
(chord :C, :major7)
(chord :C, :dom7)
(chord :C, '7')
(chord :C, :M7)
(chord :C, :minor7)
(chord :C, :m7)
(chord :C, :augmented)
(chord :C, :a)
(chord :C, :diminished)
(chord :C, :dim)
(chord :C, :i)
(chord :C, :diminished7)
(chord :C, :dim7)
(chord :C, :i7)
|
mengxianbin/sonic-notes
|
instrument/piano/piano.rb
|
define :play_piano do |&do_play|
use_synth :piano
with_fx :reverb do
do_play.()
end
end
|
mengxianbin/sonic-notes
|
idea/2020/0227/lofi_2020_0227_001.rb
|
use_bpm 100
live_loop :ticks do
play 0
sleep 4
end
define :play_ch do |ch|
play_pattern_timed ch, 0.125
sleep 3.5
end
live_loop :chord, sync: :ticks do
use_synth :piano
with_fx :reverb do
play_ch chord(:D3, :m7)
play_ch chord(:G2, "7", invert: 2)
play_ch chord(:C3, :M7)
play_ch chord(:C3, "6")
end
end
live_loop :beats, sync: :ticks do
sample_rate = (sample_duration :loop_amen) / 4.0
sample :loop_amen, rate: sample_rate
sleep 4
end
|
mengxianbin/sonic-notes
|
idea/2020/0228/loop_2020_0228_001.rb
|
<filename>idea/2020/0228/loop_2020_0228_001.rb
use_bpm 100
live_loop :ticks do
play 0
sleep 4
end
# Drum Track
live_loop :beats, sync: :ticks do
sample_rate = (sample_duration :loop_amen) / 4.0
sample :loop_amen, rate: sample_rate
sleep 4
end
define :play_ch do |ch|
play_pattern_timed ch, 0.0625, attack: 0.4, sustain: 1, release: 4, amp: 2
sleep 3.75
end
# Piano Track
live_loop :chord, sync: :ticks do
use_synth :piano
with_fx :reverb do
play_ch chord(:D3, :m7)
play_ch chord(:G2, "7", invert: 2)
play_ch chord(:C3, :M7)
play_ch (ring chord(:C3, "6"), chord(:A2, "7", invert: 1)).tick(:chord)
end
end
define :play_bass do |root|
play_pattern_timed [root], 0.5, release: 0.1, amp: 0.6
play_pattern_timed [root], 0.5, release: 0.1, amp: 0.6
play_pattern_timed [root], 0.5, release: 0.1, amp: 0.6
play_pattern_timed [root - 3], 0.25, release: 0.2, amp: 0.5
play_pattern_timed [root], 0.5, release: 0.2, amp: 0.5
play_pattern_timed [root], 0.25, release: 0.2, amp: 0.5
play_pattern_timed [root], 0.25, release: 0.2, amp: 0.5
play_pattern_timed [root], 0.25, release: 0.2, amp: 0.5
play_pattern_timed [root + 4], 1, amp: 0.6
end
# Bass Track
live_loop :bass, sync: :ticks do
use_synth :fm
play_bass :C3
end
|
mengxianbin/sonic-notes
|
instrument/guitar/acoustic_guitar.rb
|
<reponame>mengxianbin/sonic-notes
define :play_guitar do |&do_play|
use_synth :pluck
with_fx :reverb do
with_fx :lpf, cutoff: 115 do
with_synth :pluck do
do_play.()
end
end
end
end
|
igrigorik/shopify-core-web-vitals
|
config/initializers/shopify_app.rb
|
<filename>config/initializers/shopify_app.rb
ShopifyApp.configure do |config|
config.application_name = "Core Web Vitals Dashboard"
config.api_key = ENV["SHOPIFY_API_KEY"]
config.secret = ENV["SHOPIFY_API_SECRET"]
config.old_secret = ""
# In theory, we don't need any scopes at all for this app, but Shopify complains on empty list..
# https://help.shopify.com/en/api/getting-started/authentication/oauth/scopes
config.scope = "read_content, read_products"
config.embedded_app = true
config.after_authenticate_job = false
config.api_version = "2020-07"
config.shop_session_repository = "Shop"
config.webhooks = [
{topic: "app/uninstalled", address: "https://core-web-vitals-dashboard.heroku.com/webhooks/app_uninstalled", format: "json"}
]
end
# ShopifyApp::Utils.fetch_known_api_versions # Uncomment to fetch known api versions from shopify servers on boot
# ShopifyAPI::ApiVersion.version_lookup_mode = :raise_on_unknown # Uncomment to raise an error if attempting to use an api version that was not previously known
|
igrigorik/shopify-core-web-vitals
|
app/models/competitor.rb
|
class Competitor < ApplicationRecord
belongs_to :shop
validates :origin, presence: true
end
|
igrigorik/shopify-core-web-vitals
|
app/controllers/competitors_controller.rb
|
# frozen_string_literal: true
class CompetitorsController < AuthenticatedController
before_action do
@shop = Shop.find_by(shopify_domain: helpers.get_primary_shop_domain)
end
def show
render json: @shop.competitors
end
def create
@shop.competitors << Competitor.new(origin: params[:origin])
render json: @shop.save
end
def destroy
if (c = @shop.competitors.find_by(origin: params[:origin]))
render json: c.destroy
else
render json: {error: "not found"}
end
end
end
|
igrigorik/shopify-core-web-vitals
|
app/controllers/home_controller.rb
|
<reponame>igrigorik/shopify-core-web-vitals<filename>app/controllers/home_controller.rb
# frozen_string_literal: true
class HomeController < AuthenticatedController
DEFAULT_COMPETITORS = ["https://www.amazon.com"]
def index
# Effectively a noop API call but a necessary one as well to validate
# that the granted tokens are still valid - e.g. if the store owner
# uninstalls the app, this is the only way to find out immediately as
# Shopify will invalidate the granted keys.
@scopes = ShopifyAPI::AccessScope.find(:all)
@shop = Shop.find_by(shopify_domain: helpers.get_primary_shop_domain)
@competitors = @shop.competitors.pluck(:origin)
@competitors = DEFAULT_COMPETITORS if @competitors.empty?
end
end
|
igrigorik/shopify-core-web-vitals
|
config/initializers/user_agent.rb
|
module ShopifyAPI
class Base < ActiveResource::Base
self.headers['User-Agent'] << " | ShopifyApp/#{ShopifyApp::VERSION} | Shopify App CLI"
end
end
|
igrigorik/shopify-core-web-vitals
|
app/helpers/application_helper.rb
|
<filename>app/helpers/application_helper.rb<gh_stars>10-100
module ApplicationHelper
SHOP_QUERY = <<-'GRAPHQL'
{
shop {
name,
primaryDomain {
id,
url
}
}
}
GRAPHQL
def get_primary_shop_domain
client = ShopifyAPI::GraphQL.client
result = client.query(client.parse(SHOP_QUERY))
URI.parse(result.data.shop.primary_domain.url).host
end
end
|
igrigorik/shopify-core-web-vitals
|
config/routes.rb
|
Rails.application.routes.draw do
root to: "home#index"
mount ShopifyApp::Engine, at: "/"
get "/privacy", to: "application#privacy"
get "/competitors", to: "competitors#show"
post "/competitors", to: "competitors#create"
delete "/competitors", to: "competitors#destroy"
end
|
igrigorik/shopify-core-web-vitals
|
app/jobs/app_uninstalled_job.rb
|
class AppUninstalledJob < ActiveJob::Base
def perform(shop_domain:, webhook:)
shop = Shop.find_by(shopify_domain: shop_domain)
if shop.nil?
logger.error("#{self.class} failed: cannot find shop with domain '#{shop_domain}'")
return
end
shop.with_shopify_session do
shop.destroy
end
end
end
|
royratcliffe/sqlanywhere
|
sqlanywhere.gemspec
|
pkg_version = ""
# The package version of determined by parsing the c source file. This ensures the version is
# only ever specified ins a single place.
File.open(File.join("ext", "sqlanywhere.c") ) do |f|
f.grep( /const char\* VERSION/ ) do |line|
pkg_version = /\s*const char\* VERSION\s*=\s*["|']([^"']*)["|'];\s*/.match(line)[1]
end
end
Gem::Specification.new do |gem|
gem.authors = ["<NAME>"]
gem.email = '<EMAIL>'
gem.name = 'sqlanywhere'
gem.summary = 'SQL Anywhere library for Ruby'
gem.description = <<-EOF
SQL Anywhere Driver for Ruby
EOF
gem.version = pkg_version
#gem.autorequire = 'sqlanywhere'
gem.has_rdoc = true
gem.rubyforge_project = 'sqlanywhere'
gem.homepage = 'http://sqlanywhere.rubyforge.org'
gem.platform = Gem::Platform::CURRENT
gem.required_ruby_version = '>= 1.8.6'
gem.require_paths = ['lib']
gem.test_file = 'test/sqlanywhere_test.rb'
gem.rdoc_options << '--title' << 'SQL Anywhere Ruby Driver' <<
'--main' << 'README' <<
'--line-numbers'
gem.extra_rdoc_files = ['README', 'CHANGELOG', 'LICENSE', 'ext/sqlanywhere.c']
end
|
royratcliffe/sqlanywhere
|
test/sqlanywhere_test.rb
|
#====================================================
#
# Copyright 2008-2010 iAnywhere Solutions, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# While not a requirement of the license, if you do modify this file, we
# would appreciate hearing about it. Please email <EMAIL>
#
#
#====================================================
require 'test/unit'
require 'date'
begin
require 'rubygems'
unless defined? SQLAnywhere
require 'sqlanywhere'
end
end
class Types
A_INVALID_TYPE= 0
A_BINARY = 1
A_STRING = 2
A_DOUBLE = 3
A_VAL64 = 4
A_UVAL64 = 5
A_VAL32 = 6
A_UVAL32 = 7
A_VAL16 = 8
A_UVAL16 = 9
A_VAL8 = 10
A_UVAL8 = 11
end
class Direction
DD_INVALID = 0
DD_INPUT = 1
DD_OUTPUT = 2
DD_INPUT_OUTPUT = 3
end
class SQLAnywhere_Test < Test::Unit::TestCase
def setup
@api = SQLAnywhere::SQLAnywhereInterface.new()
assert_not_nil @api
assert_nothing_raised do
SQLAnywhere::API.sqlany_initialize_interface( @api )
end
assert_nothing_raised do
@api.sqlany_init()
end
@conn = @api.sqlany_new_connection()
assert_not_nil @conn
conn_str = "eng=test;uid=dba;pwd=<PASSWORD>"
assert_succeeded @api.sqlany_connect(@conn, conn_str)
end
def teardown
assert_succeeded @api.sqlany_execute_immediate(@conn, 'SELECT * FROM dummy')
assert_nil @api.sqlany_disconnect(@conn)
assert_failed @api.sqlany_execute_immediate(@conn, 'SELECT * FROM dummy')
assert_nil @api.sqlany_free_connection(@conn)
assert_nothing_raised do
@api.sqlany_fini()
end
assert_nothing_raised do
SQLAnywhere::API.sqlany_finalize_interface( @api )
end
end
def test_execute_immediate
assert_succeeded @api.sqlany_execute_immediate(@conn, 'SELECT * FROM dummy')
end
def test_errors
sql = "INSERT INTO test(\"id\") VALUES('test');"
assert_failed @api.sqlany_execute_immediate(@conn, sql)
code, msg = @api.sqlany_error(@conn)
assert_equal -157, code
assert_not_equal "", msg
assert_equal "53018\000", @api.sqlany_sqlstate(@conn)
assert_nil @api.sqlany_clear_error(@conn)
code, msg = @api.sqlany_error(@conn)
assert_equal 0, code
assert_equal "", msg
end
def test_rollback
id = setup_transaction
@api.sqlany_rollback(@conn)
sql = "SELECT * FROM test where \"id\" = " + id.to_s + ";"
rs = exec_direct_with_test(sql)
assert_failed @api.sqlany_fetch_next(rs)
end
def test_commit
id = setup_transaction
@api.sqlany_commit(@conn)
sql = "SELECT * FROM test where \"id\" = " + id.to_s + ";"
rs = exec_direct_with_test(sql)
assert_succeeded @api.sqlany_fetch_next(rs)
res, ret_id = @api.sqlany_get_column(rs, 0)
assert_succeeded res
assert_not_nil ret_id
assert_equal id, ret_id
assert_failed @api.sqlany_fetch_next(rs)
end
def test_column_info
is_iq = is_iq_table?("types")
rs = exec_direct_with_test("SELECT TOP 2 * FROM \"types\" ORDER BY \"id\"")
assert_equal 22, @api.sqlany_num_cols(rs)
assert_column_info(rs, 0, "id", Types::A_VAL32, 4)
assert_column_info(rs, 1, "_binary_", Types::A_BINARY, 8)
assert_column_info(rs, 2, "_numeric_", Types::A_STRING, 2)
assert_column_info(rs, 3, "_decimal_", Types::A_STRING, 2)
assert_column_info(rs, 4, "_bounded_string_", Types::A_STRING, 255)
assert_column_info(rs, 5, "_unbounded_string_", Types::A_STRING, (2 * (2**30)) - 1)
assert_column_info(rs, 6, "_signed_bigint_", Types::A_VAL64, 8)
assert_column_info(rs, 7, "_unsigned_bigint_", Types::A_UVAL64, 8)
assert_column_info(rs, 8, "_signed_int_", Types::A_VAL32, 4)
assert_column_info(rs, 9, "_unsigned_int_", Types::A_UVAL32, 4)
assert_column_info(rs, 10, "_signed_smallint_", Types::A_VAL16, 2)
assert_column_info(rs, 11, "_unsigned_smallint_", Types::A_UVAL16, 2) unless is_iq #IQ Does not have an unsigned small int datatype
assert_column_info(rs, 12, "_signed_tinyint_", Types::A_UVAL8, 1)
assert_column_info(rs, 13, "_unsigned_tinyint_", Types::A_UVAL8, 1)
assert_column_info(rs, 14, "_bit_", Types::A_VAL8, 1)
assert_column_info(rs, 15, "_date_", Types::A_STRING, 10)
assert_column_info(rs, 16, "_datetime_", Types::A_STRING, 23)
assert_column_info(rs, 17, "_smalldatetime_", Types::A_STRING, 23)
assert_column_info(rs, 18, "_timestamp_", Types::A_STRING, 23)
assert_column_info(rs, 19, "_double_", Types::A_DOUBLE, 8)
assert_column_info(rs, 20, "_float_", Types::A_DOUBLE, 4)
assert_column_info(rs, 21, "_real_", Types::A_DOUBLE, 4)
assert_nil @api.sqlany_free_stmt(rs)
end
def test_bounds_on_types
is_iq = is_iq_table?("types")
rs = exec_direct_with_test("SELECT TOP 2 * FROM \"types\" ORDER BY \"id\"")
assert_succeeded @api.sqlany_fetch_next(rs)
assert_class_and_value(rs, String, 1, "x")
assert_class_and_value(rs, String, 2, "1.1")
assert_class_and_value(rs, String, 3, "1.1")
assert_class_and_value(rs, String, 4, 'Bounded String Test')
assert_class_and_value(rs, String, 5, 'Unbounded String Test')
assert_class_and_value(rs, Bignum, 6, 9223372036854775807)
assert_class_and_value(rs, Bignum, 7, 18446744073709551615)
assert_class_and_value(rs, Bignum, 8, 2147483647)
assert_class_and_value(rs, Bignum, 9, 4294967295)
assert_class_and_value(rs, Fixnum, 10, 32767)
assert_class_and_value(rs, Fixnum, 11, 65535) unless is_iq #IQ Does not have an unsigned small int datatype
assert_class_and_value(rs, Fixnum, 12, 255)
assert_class_and_value(rs, Fixnum, 13, 255)
assert_class_and_value(rs, Fixnum, 14, 1)
assert_date_and_time(rs, Date, 15, Date.new(1999, 1, 2))
assert_date_and_time(rs, DateTime, 16, DateTime.new(1999, 1, 2, 21, 20, 53))
assert_date_and_time(rs, DateTime, 17, DateTime.new(1999, 1, 2, 21, 20, 53))
assert_date_and_time(rs, DateTime, 18, DateTime.new(1999, 1, 2, 21, 20, 53))
assert_class_and_float_value(rs, Float, 19, 1.79769313486231e+308, 1e+293 )
assert_class_and_float_value(rs, Float, 20, 3.402823e+38, 1e+32 )
assert_class_and_float_value(rs, Float, 21, 3.402823e+38, 1e+32 )
assert_succeeded @api.sqlany_fetch_next(rs)
assert_class_and_value(rs, String, 1, 255.chr)
assert_class_and_value(rs, String, 2, "-1.1")
assert_class_and_value(rs, String, 3, "-1.1")
assert_class_and_value(rs, String, 4, '')
assert_class_and_value(rs, String, 5, '')
assert_class_and_value(rs, Bignum, 6, -9223372036854775808)
assert_class_and_value(rs, Fixnum, 7, 0)
assert_class_and_value(rs, Bignum, 8, -2147483648)
assert_class_and_value(rs, Fixnum, 9, 0)
assert_class_and_value(rs, Fixnum, 10, -32768)
assert_class_and_value(rs, Fixnum, 11, 0) unless is_iq #IQ Does not have an unsigned small int datatype
assert_class_and_value(rs, Fixnum, 12, 0)
assert_class_and_value(rs, Fixnum, 13, 0)
assert_class_and_value(rs, Fixnum, 14, 0)
assert_class_and_value(rs, NilClass, 15, nil)
assert_class_and_value(rs, NilClass, 16, nil)
assert_class_and_value(rs, NilClass, 17, nil)
assert_class_and_value(rs, NilClass, 18, nil)
assert_class_and_float_value(rs, Float, 19, -1.79769313486231e+308, 1e+293 )
assert_class_and_float_value(rs, Float, 20, -3.402823e+38, 1e+32 )
assert_class_and_float_value(rs, Float, 21, -3.402823e+38, 1e+32 )
assert_nil @api.sqlany_free_stmt(rs)
end
def test_prepared_stmt
is_iq = is_iq_table?("types")
stmt = @api.sqlany_prepare(@conn, "SELECT * FROM \"types\" WHERE \"id\" = ?")
assert_not_nil stmt
assert_failed @api.sqlany_execute(stmt) unless is_iq #IQ does not throw an error here
assert_equal 1, @api.sqlany_num_params(stmt)
res, param = @api.sqlany_describe_bind_param(stmt, 0)
assert_not_equal 0, res
assert_equal "?", param.get_name()
assert_equal Direction::DD_INPUT, param.get_direction()
assert_nil param.set_value(0);
@api.sqlany_bind_param(stmt, 0, param)
assert_succeeded @api.sqlany_execute(stmt)
assert_succeeded @api.sqlany_fetch_next(stmt)
assert_class_and_value(stmt, String, 4, "Bounded String Test")
assert_nil param.set_value(1);
@api.sqlany_bind_param(stmt, 0, param)
assert_succeeded @api.sqlany_execute(stmt)
assert_succeeded @api.sqlany_fetch_next(stmt)
assert_class_and_value(stmt, String, 4, "")
assert_nil @api.sqlany_free_stmt(stmt)
end
def test_insert_binary
assert_insert("_binary_", "x", String)
end
def test_insert_numeric
assert_insert("_numeric_", "1.1", String)
end
def test_insert_decimal
assert_insert("_decimal_", "1.1", String)
end
def test_insert_bounded_string
assert_insert("_bounded_string_", "Bounded String Test", String)
end
def test_insert_unbounded_string
assert_insert("_unbounded_string_", "Unbounded String Test", String)
end
def test_insert_int64
assert_insert("_signed_bigint_", 9223372036854775807, Bignum)
assert_insert("_signed_bigint_", -9223372036854775808, Bignum)
end
def test_insert_uint64
assert_insert("_unsigned_bigint_", 9223372036854775807, Bignum)
assert_insert("_unsigned_bigint_", 0, Fixnum)
end
def test_insert_int32
assert_insert("_signed_int_", 2147483647, Bignum)
assert_insert("_signed_int_", -2147483648, Bignum)
end
def test_insert_uint32
assert_insert("_unsigned_int_", 4294967295, Bignum)
assert_insert("_unsigned_int_", 0, Fixnum)
end
def test_insert_int16
assert_insert("_signed_smallint_", 32767, Fixnum)
assert_insert("_signed_smallint_", -32768, Fixnum)
end
def test_insert_uint16
is_iq = is_iq_table?("types") #IQ Does not have an unsigned small int datatype
assert_insert("_unsigned_smallint_", 65535, Fixnum) unless is_iq
assert_insert("_unsigned_smallint_", 0, Fixnum) unless is_iq
end
def test_insert_int8
assert_insert("_signed_smallint_", 255, Fixnum)
assert_insert("_signed_smallint_", 0, Fixnum)
end
def test_insert_uint8
is_iq = is_iq_table?("types") #IQ Does not have an unsigned small int datatype
assert_insert("_unsigned_smallint_", 255, Fixnum) unless is_iq
assert_insert("_unsigned_smallint_", 0, Fixnum) unless is_iq
end
def test_insert_date
assert_insert("_date_", Date.new(1999, 1, 2), Date)
end
def test_insert_datetime
assert_insert("_datetime_", DateTime.new(1999, 1, 2, 21, 20, 53), DateTime)
end
def test_insert_smalldate
assert_insert("_smalldatetime_", DateTime.new(1999, 1, 2, 21, 20, 53), DateTime)
end
def test_insert_timestamp
assert_insert("_timestamp_", DateTime.new(1999, 1, 2, 21, 20, 53), DateTime)
end
def test_insert_double
assert_insert("_double_", 1.79769313486231e+308, Float, 1e+293)
end
def test_insert_float
assert_insert("_float_", 3.402823e+38, Float, 1e+32)
end
def test_insert_real
assert_insert("_real_", 3.402823e+38, Float, 1e+32)
end
def is_iq_table?(table_name)
rs = @api.sqlany_execute_direct(@conn, "SELECT server_type FROM SYS.SYSTABLE WHERE table_name = '#{table_name}'")
@api.sqlany_fetch_next(rs)
return @api.sqlany_get_column(rs, 0)[1] == 'IQ'
end
def assert_insert(column_name, value, type, delta = nil)
stmt = @api.sqlany_prepare(@conn, 'INSERT INTO "types"("id", "' + column_name + '", "_bit_") VALUES(3, ?, 1)')
assert_not_nil stmt
res, param = @api.sqlany_describe_bind_param(stmt, 0)
if type == Date or type == DateTime then
assert_nil param.set_value(value.strftime("%F %T"));
else
assert_nil param.set_value(value);
end
@api.sqlany_bind_param(stmt, 0, param)
assert_succeeded @api.sqlany_execute(stmt)
assert_nil @api.sqlany_free_stmt(stmt)
rs = exec_direct_with_test('SELECT "' + column_name + '" FROM "types" WHERE "id" = 3')
assert_succeeded @api.sqlany_fetch_next(rs)
if type == Date or type == DateTime then
assert_date_and_time(rs, type, 0, value)
elsif type == Float
assert_class_and_float_value(rs, type, 0, value, delta)
else
assert_class_and_value(rs, type, 0, value)
end
assert_nil @api.sqlany_free_stmt(rs)
@api.sqlany_rollback(@conn)
end
def assert_column_info(rs, pos, expected_col_name, expected_col_type, expected_col_size)
res, col_num, col_name, col_type, col_native_type, col_precision, col_scale, col_size, col_nullable = @api.sqlany_get_column_info(rs, pos);
assert_succeeded res
assert_equal expected_col_name, col_name
assert_equal expected_col_type, col_type
assert_equal expected_col_size, col_size
end
def assert_class_and_float_value(rs, cl, pos, expected_value, allowed_delta)
res, val = @api.sqlany_get_column(rs, pos)
assert_succeeded res
assert_not_nil val unless expected_value.nil?
assert_in_delta expected_value, val, allowed_delta
assert_instance_of cl, val
end
def assert_date_and_time(rs, cl, pos, expected_value)
res, val = @api.sqlany_get_column(rs, pos)
assert_succeeded res
assert_not_nil val unless expected_value.nil?
parsed = cl.parse(val)
assert_equal expected_value, parsed
assert_instance_of cl, parsed
end
def assert_class_and_value(rs, cl, pos, expected_value)
res, val = @api.sqlany_get_column(rs, pos)
assert_succeeded res
assert_not_nil val unless expected_value.nil?
assert_equal expected_value, val
assert_instance_of cl, val
end
def setup_transaction
sql = "INSERT INTO test VALUES( DEFAULT );"
assert_succeeded @api.sqlany_execute_immediate(@conn, sql)
rs = exec_direct_with_test("SELECT @@identity")
assert_succeeded @api.sqlany_fetch_next(rs)
res, id = @api.sqlany_get_column(rs, 0)
assert_succeeded res
assert_not_nil id
sql = "SELECT * FROM test where \"id\" = " + id.to_s + ";"
rs = @api.sqlany_execute_direct(@conn, sql)
assert_not_nil rs
assert_succeeded @api.sqlany_fetch_next(rs)
assert_failed @api.sqlany_fetch_next(rs)
assert_nil @api.sqlany_free_stmt(rs)
id
end
def exec_direct_with_test(sql)
rs = @api.sqlany_execute_direct(@conn, sql)
code, msg = @api.sqlany_error(@conn)
assert_not_nil rs, "SQL Code: #{code}; Message: #{msg}"
rs
end
def assert_succeeded(val)
assert_not_equal 0, val, @api.sqlany_error(@conn)
end
def assert_failed(val)
assert_equal 0, val, @api.sqlany_error(@conn)
end
end
|
centrevillage/letter_opener
|
lib/letter_opener/message.rb
|
<gh_stars>0
require "cgi"
require "erb"
require "fileutils"
require "uri"
module LetterOpener
class Message
attr_reader :mail
def self.rendered_messages(mail, options = {})
messages = []
messages << new(mail, options.merge(part: mail.html_part)) if mail.html_part
messages << new(mail, options.merge(part: mail.text_part)) if mail.text_part
messages << new(mail, options) if messages.empty?
messages.each(&:render)
messages.sort
end
ERROR_MSG = '%s or default configuration must be given'.freeze
def initialize(mail, options = {})
@mail = mail
@location = options[:location] || LetterOpener.configuration.location
@part = options[:part]
@template = options[:message_template] || LetterOpener.configuration.message_template
@attachments = []
raise ArgumentError, ERROR_MSG % 'options[:location]' unless @location
raise ArgumentError, ERROR_MSG % 'options[:message_template]' unless @template
end
def render
FileUtils.mkdir_p(@location)
if mail.attachments.any?
attachments_dir = File.join(@location, 'attachments')
FileUtils.mkdir_p(attachments_dir)
mail.attachments.each do |attachment|
filename = attachment_filename(attachment)
path = File.join(attachments_dir, filename)
unless File.exist?(path) # true if other parts have already been rendered
File.open(path, 'wb') { |f| f.write(attachment.body.raw_source) }
end
@attachments << [attachment.filename, "attachments/#{CGI.escape(filename)}"]
end
end
File.open(filepath, 'w') do |f|
f.write ERB.new(template).result(binding)
end
end
def template
File.read(File.expand_path("../templates/#{@template}.html.erb", __FILE__))
end
def filepath
File.join(@location, "#{type}.html")
end
def content_type
@part && @part.content_type || @mail.content_type
end
def body
@body ||= begin
body = (@part || @mail).decoded
mail.attachments.each do |attachment|
body.gsub!(attachment.url, "attachments/#{attachment_filename(attachment)}")
end
body
end
end
def from
@from ||= Array(@mail['from']).join(", ")
end
def sender
@sender ||= Array(@mail['sender']).join(", ")
end
def to
@to ||= Array(@mail['to']).join(", ")
end
def cc
@cc ||= Array(@mail['cc']).join(", ")
end
def bcc
@bcc ||= Array(@mail['bcc']).join(", ")
end
def reply_to
@reply_to ||= Array(@mail['reply-to']).join(", ")
end
def type
content_type =~ /html/ ? "rich" : "plain"
end
def encoding
body.respond_to?(:encoding) ? body.encoding : "utf-8"
end
def auto_link(text)
text.gsub(URI::Parser.new.make_regexp(%W[https http])) do |link|
"<a href=\"#{ link }\">#{ link }</a>"
end
end
def h(content)
CGI.escapeHTML(content)
end
def attachment_filename(attachment)
attachment.filename.gsub(/[^\w\-_.]/, '_')
end
def <=>(other)
order = %w[rich plain]
order.index(type) <=> order.index(other.type)
end
end
end
|
flowli/homebrew-yt2nas
|
Formula/yt2nas.rb
|
class Yt2nas < Formula
desc "Downloads at YT video and uploads it to the NAS"
homepage "https://arweb.de"
version "0.1"
url "https://github.com/flowli/homebrew-yt2nas/raw/master/yt2nas/yt2nas.zip", :using => :curl
def install
bin.install "yt2nas"
end
end
|
masakazutakewaka/exif_csv
|
spec/spec_helper.rb
|
# frozen_string_literal: true
$LOAD_PATH << File.expand_path(__dir__)
require 'tempfile'
require 'open3'
require 'cli_helper'
RSpec.configure do |config|
#...
end
|
masakazutakewaka/exif_csv
|
spec/cli/exif_csv_spec.rb
|
# frozen_string_literal: true
TEST_IMG_PATH = File.expand_path('../img', __dir__)
describe 'exif_csv' do
context 'when no image was found' do
specify do
img = TEST_IMG_PATH + '/wrong_path'
output, status = cli_run([img])
expect(status.success?).to be false
expect(output).to match /No image was found in/
end
end
context 'when successfully csv was omitted' do
specify do
img = TEST_IMG_PATH
output, status = cli_run([img, '--dry-run'])
expect(status.success?).to be true
expect(output).to match /#{TEST_IMG_PATH}\/exif.jpg,42.75110694444444,-84.49125222222222/
end
end
end
|
masakazutakewaka/exif_csv
|
spec/cli_helper.rb
|
<filename>spec/cli_helper.rb
module CLIHelper
def cli_run(args)
path = File.expand_path('../exe/exif_csv', __dir__)
Tempfile.open(['exif_csv', '.rb']) do |f|
f.puts(File.read(path))
f.flush
cmd = ([:ruby, f.path] + args).join(' ')
Open3.capture2e(cmd)
end
end
end
include CLIHelper
|
juanluis-garrote/octokit.rb
|
spec/octokit/client/search_spec.rb
|
<filename>spec/octokit/client/search_spec.rb
require 'helper'
describe Octokit::Client::Search do
before do
Octokit.reset!
@client = oauth_client
end
describe ".search_code" do
it "searches code", :vcr do
results = @client.search_code 'code user:github in:file extension:gemspec -repo:octokit/octokit.rb', \
:sort => 'indexed',
:order => 'asc'
assert_requested :get, github_url('/search/code?q=code%20user:github%20in:file%20extension:gemspec%20-repo:octokit/octokit.rb&sort=indexed&order=asc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
end # .search_code
describe ".search_commits" do
it "searches commits", :vcr do
results = @client.search_commits 'repo:octokit/octokit.rb author:jasonrudolph', \
:sort => 'author-date',
:order => 'asc',
:accept => preview_header
assert_requested :get, github_url('/search/commits?q=repo:octokit/octokit.rb%20author:jasonrudolph&sort=author-date&order=asc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
end # .search_commits
describe ".search_issues" do
it "searches issues", :vcr do
results = @client.search_issues 'http author:jasonrudolph', \
:sort => 'created',
:order => 'desc'
assert_requested :get, github_url('/search/issues?q=http%20author:jasonrudolph&sort=created&order=desc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
end # .search_issues
describe ".search_repositories" do
it "searches repositories", :vcr do
results = @client.search_repositories 'games language:assembly', \
:sort => 'stars',
:order => 'desc'
assert_requested :get, github_url('/search/repositories?q=games%20language:assembly&sort=stars&order=desc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
end # .search_topics
describe ".search_topics" do
it "searches topics", :vcr do
results = @client.search_topics 'tetris language:assembly', \
:sort => 'stars',
:order => 'desc'
assert_requested :get, github_url('/search/topics?q=tetris%20language:assembly&sort=stars&order=desc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
end # .search_repositories
describe ".search_users" do
it "searches users", :vcr do
results = @client.search_users 'mike followers:>10', \
:sort => 'joined',
:order => 'desc'
assert_requested :get, github_url('/search/users?q=mike%20followers:%3E10&sort=joined&order=desc')
expect(results.total_count).to be_kind_of Integer
expect(results.items).to be_kind_of Array
end
it "utilizes auto_pagination", :vcr do
@client.auto_paginate = true
results = @client.search_users 'user:pengwynn user:defunkt', :per_page => 1
expect(results.total_count).to eq(2)
expect(results.items.length).to eq(2)
end
end # .search_users
def preview_header
Octokit::Preview::PREVIEW_TYPES[:commit_search]
end
end
|
DevMakerMobileApps/devmaker-contracts
|
config/routes.rb
|
DevmakerContracts::Engine.routes.draw do
root to: "private_contracts#index", as: :private_contracts
resources :contracts, controller: "private_contracts", except: [:show]
get ":slug" => "public_contracts#show", as: :show_contract
end
|
DevMakerMobileApps/devmaker-contracts
|
test/dummy/config/initializers/devmaker_contracts.rb
|
<reponame>DevMakerMobileApps/devmaker-contracts
# the private controller used to edit the contracts
DevmakerContracts.private_controller = "PrivateController"
# the public controller used to display the contracts (defaults to ApplicationController)
# DevmakerContracts.private_controller = "PublicController"
|
DevMakerMobileApps/devmaker-contracts
|
test/dummy/config/routes.rb
|
Rails.application.routes.draw do
mount DevmakerContracts::Engine => "/devmaker_contracts"
get "/public" => "public#index"
get "/private" => "private#index"
end
|
DevMakerMobileApps/devmaker-contracts
|
devmaker_contracts.gemspec
|
$:.push File.expand_path("lib", __dir__)
# Maintain your gem's version:
require "devmaker_contracts/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |spec|
spec.name = "devmaker_contracts"
spec.version = DevmakerContracts::VERSION
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.homepage = "http://www.devmaker.com.br"
spec.summary = "Engine to add the contracts handling in DevMaker Rails projects"
spec.description = "It adds a private admin CRUD of contracts and a public display of its HTML. Usefull for Terms of Services and Privacy Policies"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.md"]
spec.add_dependency "rails", "> 5.0"
spec.add_development_dependency "sqlite3"
end
|
DevMakerMobileApps/devmaker-contracts
|
test/dummy/app/controllers/private_controller.rb
|
class PrivateController < ApplicationController
http_basic_authenticate_with name: "admin", password: "<PASSWORD>"
def index
end
end
|
DevMakerMobileApps/devmaker-contracts
|
app/models/devmaker_contracts/contract.rb
|
module DevmakerContracts
class Contract < ApplicationRecord
validates :name, presence: true
scope :search_for, -> (string) do
s = "%#{string}%"
where("name ilike ? or slug ilike ?", s, s) if string.present?
end
validates :slug, uniqueness: true
end
end
|
DevMakerMobileApps/devmaker-contracts
|
lib/devmaker_contracts.rb
|
require "devmaker_contracts/engine"
module DevmakerContracts
mattr_accessor :public_controller
def self.public_controller
(@@public_controller || "ApplicationController").constantize
end
mattr_accessor :private_controller
def self.private_controller
(@@private_controller || "ApplicationController").constantize
end
end
|
DevMakerMobileApps/devmaker-contracts
|
app/controllers/devmaker_contracts/public_contracts_controller.rb
|
module DevmakerContracts
class PublicContractsController < DevmakerContracts.public_controller
def show
@contract = DevmakerContracts::Contract.find_by slug: params[:slug]
head(:not_found) unless @contract
render :show, layout: false
end
end
end
|
DevMakerMobileApps/devmaker-contracts
|
lib/devmaker_contracts/engine.rb
|
<reponame>DevMakerMobileApps/devmaker-contracts<gh_stars>0
module DevmakerContracts
class Engine < ::Rails::Engine
isolate_namespace DevmakerContracts
end
end
|
DevMakerMobileApps/devmaker-contracts
|
app/helpers/devmaker_contracts/private_contracts_helper.rb
|
<reponame>DevMakerMobileApps/devmaker-contracts<filename>app/helpers/devmaker_contracts/private_contracts_helper.rb
module DevmakerContracts
module PrivateContractsHelper
end
end
|
DevMakerMobileApps/devmaker-contracts
|
app/controllers/devmaker_contracts/private_contracts_controller.rb
|
module DevmakerContracts
class PrivateContractsController < DevmakerContracts.private_controller
before_action :set_contract, only: [:show, :edit, :update, :destroy]
def index
@contracts = DevmakerContracts::Contract.search_for(params[:q]).order(:id)
end
def new
@contract = DevmakerContracts::Contract.new
end
def create
@contract = DevmakerContracts::Contract.new(form_params)
if @contract.save
redirect_to contracts_path
else
render :new
end
end
def edit
end
def update
if @contract.update(form_params)
redirect_to contracts_path
else
render :edit
end
end
def destroy
@contract.destroy
flash[:success] = "Contrato removido."
redirect_to contracts_path
end
private
def form_params
params.require(:contract).permit([:name, :slug, :content_html])
end
def set_contract
@contract = DevmakerContracts::Contract.find(params[:id])
end
end
end
|
DevMakerMobileApps/devmaker-contracts
|
db/migrate/20190111164004_create_devmaker_contracts_contracts.rb
|
<reponame>DevMakerMobileApps/devmaker-contracts
class CreateDevmakerContractsContracts < ActiveRecord::Migration[5.2]
def change
create_table :devmaker_contracts_contracts do |t|
t.text :name
t.text :content_html
t.text :slug
t.index :slug, unique: true
t.timestamps
end
end
end
|
DevMakerMobileApps/devmaker-contracts
|
app/helpers/devmaker_contracts/application_helper.rb
|
module DevmakerContracts
module ApplicationHelper
end
end
|
datacite/cheetoh
|
app/controllers/index_controller.rb
|
class IndexController < ApplicationController
def login
fail NotImplementedError, "one-time login and session cookies not supported by this service"
end
end
|
datacite/cheetoh
|
spec/apis/show_spec.rb
|
require "rails_helper"
describe "show", :type => :api, vcr: true do
it "show doi and metadata" do
doi = "10.24354/n296wz12m"
get "/id/doi:#{doi}"
expect(last_response.status).to eq(200)
response = last_response.body
hsh = response.from_anvl
expect(hsh["success"]).to eq("doi:10.24354/n296wz12m")
expect(hsh["_updated"]).to eq("1541469669")
expect(hsh["_target"]).to eq("https://www.datacite.org")
expect(hsh["datacite"]).to start_with("<?xml version=\"1.0\"?>\n<resource xmlns=\"http://datacite.org/schema/kernel-4\"")
expect(hsh["_profile"]).to eq("datacite")
expect(hsh["_datacenter"]).to eq("DATACITE.TEST")
expect(hsh["_export"]).to eq("yes")
expect(hsh["_created"]).to eq("1512423789")
expect(hsh["_status"]).to eq("public")
end
it "bibtex format" do
doi = "10.24354/n296wz12m"
params = { "_profile" => "bibtex" }
get "/id/doi:#{doi}", params
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.24354/n296wz12m")
expect(response["_updated"]).to eq("1541469669")
expect(response["_target"]).to eq("https://www.datacite.org")
expect(response["bibtex"]).to start_with("@phdthesis{https://doi.org/10.24354/n296wz12m")
expect(response["_profile"]).to eq("bibtex")
expect(response["datacite"]).to be_nil
expect(response["_datacenter"]).to eq("DATACITE.TEST")
expect(response["_export"]).to eq("yes")
expect(response["_created"]).to eq("1512423789")
expect(response["_status"]).to eq("public")
end
it "schema.org format" do
doi = "10.24354/n296wz12m"
params = { "_profile" => "schema_org" }
get "/id/doi:#{doi}", params
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
output = JSON.parse(response["schema_org"])
expect(response["success"]).to eq("doi:10.24354/n296wz12m")
expect(response["_updated"]).to eq("1541469669")
expect(response["_target"]).to eq("https://www.datacite.org")
expect(response["_profile"]).to eq("schema_org")
expect(response["datacite"]).to be_nil
expect(response["_datacenter"]).to eq("DATACITE.TEST")
expect(response["_export"]).to eq("yes")
expect(response["_created"]).to eq("1512423789")
expect(response["_status"]).to eq("public")
expect(output["name"]).to eq("DOI Test")
expect(output["author"]).to eq("@type"=>"Person", "name"=>"<NAME>", "givenName"=>"Tom", "familyName"=>"Johnson")
end
it "missing valid doi parameter" do
doi = "20.5072/0000-03vc"
get "/id/doi:#{doi}"
expect(last_response.status).to eq(400)
expect(last_response.body).to eq("error: bad request - no such identifier")
end
it "doi not found" do
doi = "10.5072/bc11-cqw99"
get "/id/doi:#{doi}"
expect(last_response.status).to eq(400)
expect(last_response.body).to eq("error: bad request - no such identifier")
end
it "not found" do
get "/id/x"
expect(last_response.status).to eq(400)
expect(last_response.body).to eq("error: bad request - no such identifier")
end
end
|
datacite/cheetoh
|
spec/apis/reserved_spec.rb
|
<filename>spec/apis/reserved_spec.rb
require "rails_helper"
describe "reserved", :type => :api, vcr: true, :order => :defined do
let(:doi) { "10.5072/bc11-cqw9" }
let(:username) { ENV['MDS_USERNAME'] }
let(:password) { ENV['MDS_PASSWORD'] }
let(:headers) do
{ "HTTP_CONTENT_TYPE" => "text/plain",
"HTTP_AUTHORIZATION" => ActionController::HttpAuthentication::Basic.encode_credentials(username, password) }
end
it "create new reserved doi" do
params = { "_status" => "reserved" }.to_anvl
put "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5072/bc11-cqw9")
expect(response["_status"]).to eq("reserved")
end
it "show reserved doi" do
get "/id/doi:#{doi}"
expect(last_response.status).to eq(200)
response = last_response.body
hsh = response.from_anvl
expect(hsh["success"]).to eq("doi:10.5072/bc11-cqw9")
expect(hsh["_profile"]).to eq("datacite")
expect(hsh["_status"]).to eq("reserved")
end
it "delete doi and metadata" do
delete "/id/doi:#{doi}", nil, headers
expect(last_response.status).to eq(200)
response = last_response.body
hsh = response.from_anvl
expect(hsh["success"]).to eq("doi:10.5072/bc11-cqw9")
expect(hsh["_profile"]).to eq("datacite")
end
end
|
datacite/cheetoh
|
spec/apis/status_spec.rb
|
<reponame>datacite/cheetoh<gh_stars>0
require "rails_helper"
describe "status", :type => :api, vcr: true, :order => :defined do
let(:datacite) { File.read(file_fixture('10.5072_bc11-cqw8.xml')) }
let(:url) { "https://blog.datacite.org/differences-between-orcid-and-datacite-metadata/" }
let(:username) { ENV['MDS_USERNAME'] }
let(:password) { ENV['<PASSWORD>'] }
let(:headers) do
{ "HTTP_CONTENT_TYPE" => "text/plain",
"HTTP_AUTHORIZATION" => ActionController::HttpAuthentication::Basic.encode_credentials(username, password) }
end
context "test prefix" do
let(:doi) { "10.5072/bc11-cqw8" }
it "create status reserved" do
params = { "datacite" => datacite, "_target" => url, "_status" => "reserved" }.to_anvl
put "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5072/bc11-cqw8")
expect(response["datacite"]).to eq(datacite.strip)
expect(response["_target"]).to eq(url)
expect(response["_status"]).to eq("reserved")
end
it "status public" do
params = { "_status" => "public" }.to_anvl
post "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5072/bc11-cqw8")
expect(response["datacite"]).to eq(datacite.strip)
expect(response["_status"]).to eq("reserved")
end
it "status unavailable" do
params = { "_status" => "unavailable | withdrawn by author" }.to_anvl
post "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5072/bc11-cqw8")
expect(response["datacite"]).to eq(datacite.strip)
expect(response["_status"]).to eq("reserved")
end
it "delete doi and metadata" do
delete "/id/doi:#{doi}", nil, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5072/bc11-cqw8")
expect(response["datacite"]).to eq(datacite.strip)
expect(response["_target"]).to eq(url)
end
end
context "normal prefix" do
let(:doi) { "10.5438/bc11-cqw8" }
let(:datacite) { File.read(file_fixture('10.5438_bc11-cqw8.xml')) }
it "status public" do
params = { "_status" => "public" }.to_anvl
post "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5438/bc11-cqw8")
#expect(response["_target"]).to eq(url)
expect(response["_status"]).to eq("public")
doc = Nokogiri::XML(response["datacite"], nil, 'UTF-8', &:noblanks)
expect(doc.at_css("identifier").content).to eq("10.5438/BC11-CQW8")
end
it "status unavailable" do
params = { "_status" => "unavailable | withdrawn by author" }.to_anvl
post "/id/doi:#{doi}", params, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5438/bc11-cqw8")
#expect(response["_target"]).to eq(url)
expect(response["_status"]).to eq("unavailable | withdrawn by author")
doc = Nokogiri::XML(response["datacite"], nil, 'UTF-8', &:noblanks)
expect(doc.at_css("identifier").content).to eq("10.5438/BC11-CQW8")
end
end
context "status change" do
let(:doi) { "10.5438/bc11-cqw8" }
let(:datacite) { File.read(file_fixture('10.5438_bc11-cqw8.xml')) }
it "status unavailable" do
params = { "_status" => "ready", "datacite" => datacite }.to_anvl
params_update = { "_status" => "unavailable | withdrawn by magic", "datacite" => datacite }.to_anvl
post "/id/doi:#{doi}", params, headers
post "/id/doi:#{doi}", params_update, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5438/bc11-cqw8")
#expect(response["_target"]).to eq(url)
expect(response["_status"]).to eq("unavailable | withdrawn by magic")
doc = Nokogiri::XML(response["datacite"], nil, 'UTF-8', &:noblanks)
expect(doc.at_css("identifier").content).to eq("10.5438/BC11-CQW8")
end
it "status unavailable with reason" do
params = { "_status" => "unavailable | withdrawn by author", "datacite" => datacite }.to_anvl
params_update = { "_status" => "unavailable", "datacite" => datacite }.to_anvl
post "/id/doi:#{doi}", params, headers
post "/id/doi:#{doi}", params_update, headers
expect(last_response.status).to eq(200)
response = last_response.body.from_anvl
expect(response["success"]).to eq("doi:10.5438/bc11-cqw8")
#expect(response["_target"]).to eq(url)
expect(response["_status"]).to eq("unavailable")
doc = Nokogiri::XML(response["datacite"], nil, 'UTF-8', &:noblanks)
expect(doc.at_css("identifier").content).to eq("10.5438/BC11-CQW8")
end
end
end
|
datacite/cheetoh
|
spec/apis/compatibility_spec.rb
|
require "rails_helper"
describe "ezid compatibility", :type => :api, vcr: true do
let(:username) { ENV['MDS_USERNAME'] }
let(:password) { ENV['<PASSWORD>'] }
let(:headers) do
{ "HTTP_CONTENT_TYPE" => "text/plain",
"HTTP_AUTHORIZATION" => ActionController::HttpAuthentication::Basic.encode_credentials(username, password) }
end
context "ark identifiers" do
it "show ark identifier" do
get "/id/ark:/99999/fk4test", nil, headers
expect(last_response.body).to eq("error: ark identifiers are not supported by this service")
expect(last_response.status).to eq(501)
end
it "create ark identifier" do
put "/id/ark:/99999/fk4test", nil, headers
expect(last_response.body).to eq("error: ark identifiers are not supported by this service")
expect(last_response.status).to eq(501)
end
it "update ark identifier" do
post "/id/ark:/99999/fk4test", nil, headers
expect(last_response.body).to eq("error: ark identifiers are not supported by this service")
expect(last_response.status).to eq(501)
end
it "delete ark identifier" do
delete "/id/ark:/99999/fk4test", nil, headers
expect(last_response.body).to eq("error: ark identifiers are not supported by this service")
expect(last_response.status).to eq(501)
end
end
describe "profiles" do
it "show erc profile" do
doi = "10.4124/XZ7JTC6TBB.1"
params = { "_profile" => "erc" }
get "/id/doi:#{doi}", params
expect(last_response.status).to eq(501)
expect(last_response.body).to eq("error: erc profile not supported by this service")
end
it "show dc profile" do
doi = "10.4124/XZ7JTC6TBB.1"
params = { "_profile" => "dc" }
get "/id/doi:#{doi}", params
expect(last_response.status).to eq(501)
expect(last_response.body).to eq("error: dc profile not supported by this service")
end
end
end
|
datacite/cheetoh
|
config/initializers/anvl.rb
|
require "anvl"
|
datacite/cheetoh
|
spec/apis/index_spec.rb
|
require 'rails_helper'
describe '/login', type: :api do
it "login path not supported" do
get '/login'
expect(last_response.status).to eq(501)
expect(last_response.body).to eq("error: one-time login and session cookies not supported by this service")
end
end
|
datacite/cheetoh
|
app/controllers/dois_controller.rb
|
class DoisController < ApplicationController
include Doiable
prepend_before_action :authenticate_user_with_basic_auth!, except: [:show]
before_action :set_profile
before_action :set_doi, only: [:show, :update, :destroy]
before_action :set_raven_context, only: [:mint, :create, :update]
def show
response = DoisController.get_doi(@doi)
if response.status == 200
render plain: ez_response(response.body, profile: @profile).to_anvl, status: :ok
elsif response.status == 404
render plain: "error: bad request - no such identifier", status: :bad_request
else
logger.error response.body.dig("errors", 0, "title")
render plain: "error: " + response.body.dig("errors", 0, "title"), status: response.status
end
end
def mint
fail IdentifierError, "no _profile provided" unless profile_present?(safe_params)
fail IdentifierError, "no _target provided" if (safe_params[:_target].blank? && safe_params[:_status] != "reserved")
# make sure we generate a random DOI that is not already used
# allow seed with number for deterministic minting (e.g. testing)
if safe_params[:_number].present?
doi = generate_random_doi(params[:id], number: safe_params[:_number])
fail IdentifierError, "doi:#{doi} has already been registered" if DoisController.get_doi(doi).status == 200
else
duplicate = true
while duplicate do
doi = generate_random_doi(params[:id])
duplicate = DoisController.get_doi(doi).status == 200
end
end
data = safe_params[@profile].present? ? URI.decode(safe_params[@profile].anvlunesc) : nil
options = {
data: data,
url: safe_params[:_target],
target_status: safe_params[:_status],
username: @username,
password: <PASSWORD> }
options = datacite_options(options) if @profile.to_s == "datacite"
response = DoisController.post_doi(doi, options)
if [200, 201].include?(response.status)
render plain: ez_response(response.body, profile: @profile).to_anvl, status: :ok
elsif [401, 403].include?(response.status)
response.headers.delete_if { |key| key == 'X-Credential-Username' }
render plain: "error: unauthorized", status: :unauthorized
else
logger.error response.body.dig("errors", 0, "title")
render plain: "error: " + response.body.dig("errors", 0, "title"), status: response.status
end
end
def create
doi = validate_doi(params[:id])
fail IdentifierError, "ark identifiers are not supported by this service" if is_ark?(params[:id])
fail IdentifierError, "no doi provided" unless doi.present?
fail IdentifierError, "no _profile provided" unless profile_present?(safe_params)
fail IdentifierError, "no _target provided" if
(safe_params[:_target].blank? && safe_params[:_status] != "reserved")
fail IdentifierError, "doi:#{doi} has already been registered" if DoisController.get_doi(doi).status == 200
data = decode_param(safe_params[@profile])
options = {
data: data,
url: decode_param(safe_params[:_target]),
target_status: safe_params[:_status],
username: @username,
password: <PASSWORD> }.compact
options = datacite_options(options) if @profile.to_s == "datacite"
response = DoisController.post_doi(doi, options)
if [200, 201].include?(response.status)
render plain: ez_response(response.body, profile: @profile).to_anvl, status: :ok
elsif [401, 403].include?(response.status)
response.headers.delete_if { |key| key == 'X-Credential-Username' }
render plain: "error: unauthorized", status: :unauthorized
else
logger.error response.body.dig("errors", 0, "title")
render plain: "error: " + response.body.dig("errors", 0, "title"), status: response.status
end
end
def update
fail IdentifierError, "No _profile, _target or _status provided" unless
safe_params[@profile].present? ||
safe_params[:_target].present? ||
safe_params[:_status].present?
data = decode_param(safe_params[@profile])
options = {
data: data,
url: decode_param(safe_params[:_target]),
target_status: safe_params[:_status],
username: @username,
password: <PASSWORD> }.compact
options = datacite_options(options) if @profile.to_s == "datacite"
response = DoisController.put_doi(@doi, options)
if [200, 201].include?(response.status)
render plain: ez_response(response.body, profile: @profile).to_anvl, status: :ok
elsif [401, 403].include?(response.status)
response.headers.delete_if { |key| key == 'X-Credential-Username' }
render plain: "error: unauthorized", status: :unauthorized
else
logger.error response.body.dig("errors", 0, "title")
render plain: "error: " + response.body.dig("errors", 0, "title"), status: response.status
end
end
def destroy
response = DoisController.get_doi(@doi)
fail AbstractController::ActionNotFound unless response.status == 200
fail IdentifierError, "#{params[:id]} is not a reserved DOI" unless response.body.dig("data", "attributes", "state") == "draft"
delete_response = DoisController.delete_doi(@doi, username: @username, password: <PASSWORD>)
if delete_response.status == 204
render plain: ez_response(response.body, profile: @profile).to_anvl, status: :ok
elsif [401, 403].include?(delete_response.status)
delete_response.headers.delete_if { |key| key == 'X-Credential-Username' }
render plain: "error: unauthorized", status: :unauthorized
else
logger.error delete_response.body.dig("errors", 0, "title")
render plain: "error: " + delete_response.body.dig("errors", 0, "title"), status: response.status
end
end
protected
def set_doi
fail IdentifierError, "ark identifiers are not supported by this service" if is_ark?(params[:id])
@doi = validate_doi(params[:id])
fail AbstractController::ActionNotFound unless @doi.present?
end
def profile_present?(safe_params)
safe_params[:_status] == "reserved" ||
safe_params[@profile].present? ||
safe_params["datacite.creator"].present? &&
safe_params["datacite.title"].present? &&
safe_params["datacite.publisher"].present? &&
safe_params["datacite.publicationyear"].present? &&
safe_params["datacite.resourcetype"].present?
end
def set_profile
@profile = safe_params[:_profile].presence || :datacite
fail IdentifierError, "#{safe_params[:_profile]} profile not supported by this service" unless
SUPPORTED_PROFILES[@profile.<EMAIL>sym].present?
end
private
def safe_params
params.permit(:id, :_target, :_export, :_profile, :_status, :_number, :datacite, :bibtex, :ris, :schema_org, :citeproc, "datacite.creator", "datacite.title", "datacite.publisher", "datacite.publicationyear", "datacite.resourcetype")
end
def set_raven_context
return nil unless safe_params[@profile].present?
Raven.extra_context metadata: URI.escape(safe_params[@profile])
end
def datacite_options(options)
resource_type_general, resource_type = decode_param(safe_params["datacite.resourcetype"])&.split('/')
options = options.merge(
creator: decode_param(safe_params["datacite.creator"]),
title: decode_param(safe_params["datacite.title"]),
publisher: decode_param(safe_params["datacite.publisher"]),
publication_year: decode_param(safe_params["datacite.publicationyear"]),
resource_type_general: resource_type_general,
resource_type: resource_type
)
end
end
|
datacite/cheetoh
|
config/routes.rb
|
<filename>config/routes.rb
Rails.application.routes.draw do
resources :heartbeat, only: [:index]
# support login path
get 'login', :to => 'index#login'
resources :index, path: '/', only: [:index]
resources :dois, path: '/id', only: [:show], constraints: { :id => /.+/ }
# custom routes, as EZID's routes don't follow standard rails pattern
# we need to add constraints, as the id may contain slashes
# create identifier
put 'id/:id', :to => 'dois#create', constraints: { :id => /.+/ }
# mint identifier
post 'shoulder/:id', :to => 'dois#mint', constraints: { :id => /.+/ }
# update identifier
post 'id/:id', :to => 'dois#update', constraints: { :id => /.+/ }
# delete identifier
delete 'id/:id', :to => 'dois#destroy', constraints: { :id => /.+/ }
root :to => 'index#index'
# rescue routing errors
match "*path", to: "application#route_not_found", via: :all
end
|
datacite/cheetoh
|
spec/apis/delete_spec.rb
|
require "rails_helper"
describe "delete", :type => :api, vcr: true, :order => :defined do
let(:doi) { "10.5072/bc11-cqw7" }
let(:username) { ENV['MDS_USERNAME'] }
let(:password) { ENV['MDS_PASSWORD'] }
let(:headers) do
{ "HTTP_CONTENT_TYPE" => "text/plain",
"HTTP_AUTHORIZATION" => ActionController::HttpAuthentication::Basic.encode_credentials(username, password) }
end
it "missing valid doi parameter" do
doi = "20.5072/0000-03vc"
delete "/id/doi:#{doi}", nil, headers
expect(last_response.status).to eq(400)
expect(last_response.body).to eq("error: bad request - no such identifier")
end
it "missing login credentials" do
delete "/id/doi:#{doi}"
expect(last_response.status).to eq(401)
expect(last_response.headers["WWW-Authenticate"]).to eq("Basic realm=\"ez.stage.datacite.org\"")
expect(last_response.body).to eq("HTTP Basic: Access denied.\n")
end
it "not a reserved doi" do
doi = "10.5438/mcnv-ga6n"
delete "/id/doi:#{doi}", nil, headers
expect(last_response.status).to eq(400)
expect(last_response.body).to eq("error: doi:10.5438/mcnv-ga6n is not a reserved DOI")
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.