repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
michaelvobrien/furigana
|
lib/furigana/cli.rb
|
<filename>lib/furigana/cli.rb
require 'furigana'
require 'singleton'
require 'ostruct'
require 'optparse'
module Furigana
class CLI
include Singleton
def initialize
@settings = OpenStruct.new
@settings.format = :text
end
def parse_options
OptionParser.new do |opts|
opts.banner = "Usage: furigana [options] [file]"
opts.on("--text", "Add furigana and output text (default)") do
@settings.format = :text
end
opts.on("--html", "Add furigana and output HTML") do
@settings.format = :html
end
opts.on("--yomikata", "Output yomikata only") do
@settings.format = :yomikata
end
opts.on("--json", "Add furigana and output JSON") do
@settings.format = :json
end
opts.on_tail("-h", "--help", "Show this message") do
puts opts
exit
end
opts.on_tail("--version", "Show version") do
puts Furigana::VERSION
exit
end
end.parse!
end
def start
parse_options
input = ARGF.read
case @settings.format
when :text
puts Formatter::Text.new(input, Reader.new.reading(input)).render
when :html
puts Formatter::HTML.new(input, Reader.new.reading(input)).render
when :yomikata
puts Formatter::Yomikata.new(input, Reader.new.reading(input)).render
when :json
puts Formatter::JSON.new(input, Reader.new.reading(input)).render
end
end
end
end
|
michaelvobrien/furigana
|
furigana.gemspec
|
# Ensure we require the local version and not one we might have installed already
require File.join([File.dirname(__FILE__),'lib','furigana','version.rb'])
spec = Gem::Specification.new do |s|
s.name = 'furigana'
s.version = Furigana::VERSION
s.author = "<NAME>"
s.email = '<EMAIL>'
s.homepage = 'https://github.com/michaelvobrien/furigana'
s.platform = Gem::Platform::RUBY
s.summary = 'Add furigana to text'
s.files = %w(
bin/furigana
lib/furigana.rb
lib/furigana/formatter/base.rb
lib/furigana/formatter/html.rb
lib/furigana/formatter/yomikata.rb
lib/furigana/formatter/json.rb
lib/furigana/formatter/text.rb
lib/furigana/formatters.rb
lib/furigana/cli.rb
lib/furigana/mecab.rb
lib/furigana/reader.rb
lib/furigana/version.rb
)
s.require_paths << 'lib'
s.has_rdoc = true
s.rdoc_options << '--title' << 'furigana' << '-ri'
s.bindir = 'bin'
s.executables << 'furigana'
s.add_runtime_dependency('diff-lcs')
s.add_development_dependency('rake')
s.add_development_dependency('rdoc')
s.add_development_dependency('pry')
s.add_development_dependency('pry-doc')
end
|
michaelvobrien/furigana
|
lib/furigana/formatter/yomikata.rb
|
require 'nkf'
module Furigana
module Formatter
class Yomikata < Formatter::Base
def replacement(surface_form, reading)
reading
end
def render
k2h(super)
end
private
def k2h(k)
return nil if k.nil?
NKF.nkf("-h1 -w", k)
end
end
end
end
|
michaelvobrien/furigana
|
lib/furigana.rb
|
require 'furigana/version'
require 'furigana/mecab'
require 'furigana/reader'
require 'furigana/formatters'
|
michaelvobrien/furigana
|
test/mecab_test.rb
|
# -*- coding: utf-8 -*-
require 'test_helper'
class MecabTest < Test::Unit::TestCase
test "tokenize 食べる" do
text = '食べる'
expected = [{ surface_form: "食べる", reading: "タベル" }]
assert_equal expected, Furigana::Mecab.tokenize(text)
end
test "tokenize alphanumeric with 食べる" do
text = 'This is "食べる" 123'
expected = [
{:surface_form => "This", :reading => "This"},
{:surface_form => "is", :reading => "is"},
{:surface_form => "\"", :reading => "\""},
{:surface_form => "食べる", :reading => "タベル"},
{:surface_form => "\"", :reading => "\""},
{:surface_form => "123", :reading => "123"}
]
assert_equal expected, Furigana::Mecab.tokenize(text)
end
test "tokenize バルト海の新しい読み物。" do
text = "バルト海の新しい読み物。"
expected = [
{:surface_form => "バルト海", :reading => "バルトカイ"},
{:surface_form => "の", :reading => "ノ"},
{:surface_form => "新しい", :reading => "アタラシイ"},
{:surface_form => "読み物", :reading => "ヨミモノ"},
{:surface_form => "。", :reading => "。"}
]
assert_equal expected, Furigana::Mecab.tokenize(text)
end
test "paragraph" do
text = <<-END
私は2年前に日本に移住しました。カメラで写真を撮りました。私は食べ物が好
きです。SNS123567。
END
expected = [
{:surface_form => "私", :reading => "ワタシ"},
{:surface_form => "は", :reading => "ハ"},
{:surface_form => "2", :reading => "2"},
{:surface_form => "年", :reading => "ネン"},
{:surface_form => "前", :reading => "マエ"},
{:surface_form => "に", :reading => "ニ"},
{:surface_form => "日本", :reading => "ニッポン"},
{:surface_form => "に", :reading => "ニ"},
{:surface_form => "移住", :reading => "イジュウ"},
{:surface_form => "し", :reading => "シ"},
{:surface_form => "まし", :reading => "マシ"},
{:surface_form => "た", :reading => "タ"},
{:surface_form => "。", :reading => "。"},
{:surface_form => "カメラ", :reading => "カメラ"},
{:surface_form => "で", :reading => "デ"},
{:surface_form => "写真", :reading => "シャシン"},
{:surface_form => "を", :reading => "ヲ"},
{:surface_form => "撮り", :reading => "トリ"},
{:surface_form => "まし", :reading => "マシ"},
{:surface_form => "た", :reading => "タ"},
{:surface_form => "。", :reading => "。"},
{:surface_form => "私", :reading => "ワタシ"},
{:surface_form => "は", :reading => "ハ"},
{:surface_form => "食べ物", :reading => "タベモノ"},
{:surface_form => "が", :reading => "ガ"},
{:surface_form => "好き", :reading => "スキ"},
{:surface_form => "です", :reading => "デス"},
{:surface_form => "。", :reading => "。"},
{:surface_form => "SNS", :reading => "SNS"},
{:surface_form => "1", :reading => "イチ"},
{:surface_form => "23567", :reading => "23567"},
{:surface_form => "。", :reading => "。"}
]
assert_equal expected, Furigana::Mecab.tokenize(text)
end
end
|
michaelvobrien/furigana
|
lib/furigana/mecab.rb
|
<gh_stars>10-100
require 'open3'
module Furigana
class Mecab
class << self
def tokenize(text)
surface_form, reading = 0, 1
stdout, stderr, status = Open3.capture3("mecab -Ochasen", :stdin_data => sanitize_text(text))
lines = split_stdout(stdout)
lines.reduce([]) do |kanji_tokens, line|
return kanji_tokens if line == 'EOS'
columns = line.split("\t")
kanji_tokens << {
:surface_form => columns[surface_form],
:reading => columns[reading]
}
end
end
private
def sanitize_text(text)
format("%s\n", text.tr("\n", ""))
end
def split_stdout(stdout)
# Avoid `ArgumentError - invalid byte sequence in UTF-8`
if stdout.valid_encoding?
stdout.split("\n")
else
stdout.encode!("UTF-8", "UTF-8", :invalid => :replace, :undef => :replace, :replace => "�")
stdout.split("\n")
end
end
end
end
end
|
DanThiffault/rails-lastmile
|
recipes/default.rb
|
<reponame>DanThiffault/rails-lastmile
#
# Cookbook Name:: rails-bootstrap
# Recipe:: default
#
# Copyright 2013, 119 Labs LLC
#
# See license.txt for details
#
class Chef::Recipe
# mix in recipe helpers
include Chef::RubyBuild::RecipeHelpers
end
app_dir = node['rails-lastmile']['app_dir']
include_recipe "rails-lastmile::setup"
include_recipe "nginx"
include_recipe "unicorn"
directory "/var/run/unicorn" do
owner "root"
group "root"
mode "777"
action :create
end
file "/var/run/unicorn/master.pid" do
owner "root"
group "root"
mode "666"
action :create_if_missing
end
file "/var/log/unicorn.log" do
owner "root"
group "root"
mode "666"
action :create_if_missing
end
unicorn_config "/etc/unicorn.cfg" do
listen( { node[:unicorn][:listen] => node[:unicorn][:options] })
pid node[:unicorn][:pid]
working_directory app_dir
worker_timeout node[:unicorn][:worker_timeout]
stdout_path node[:unicorn][:stdout_path]
stderr_path node[:unicorn][:stderr_path]
preload_app node[:unicorn][:preload_app]
worker_processes node[:unicorn][:worker_processes]
before_fork node[:unicorn][:before_fork]
end
rbenv_script "run-rails" do
rbenv_version node['rails-lastmile']['ruby_version']
cwd app_dir
if node['rails-lastmile']['reset_db']
code <<-EOT1
bundle install #{node[:rails-lastmile][:bundler_args]}
bundle exec rake db:reset
bundle exec rake db:test:load
ps -p `cat /var/run/unicorn/master.pid` &>/dev/null || bundle exec unicorn -c /etc/unicorn.cfg -D --env #{node['rails-lastmile']['environment']}
EOT1
else
code <<-EOT2
bundle install #{node[:rails-lastmile][:bundler_args]}
bundle exec rake db:create
bundle exec rake db:migrate
bundle exec rake db:test:load
ps -p `cat /var/run/unicorn/master.pid` &>/dev/null || bundle exec unicorn -c /etc/unicorn.cfg -D --env #{node['rails-lastmile']['environment']}
EOT2
end
end
template "/etc/nginx/sites-enabled/default" do
owner "root"
group "root"
mode "644"
source "nginx.erb"
variables( :static_root => "#{app_dir}/public")
notifies :restart, "service[nginx]"
end
service "unicorn"
service "nginx"
|
DanThiffault/rails-lastmile
|
attributes/default.rb
|
default['rails-lastmile']['app_dir'] = "/vagrant"
default['rails-lastmile']['ruby_version'] = "1.9.3-p385"
# when true, we reset the db using rake db:drop and rake db:setup
default['rails-lastmile']['reset_db'] = false
default['rails-lastmile']['environment'] = 'development'
default['rails-lastmile']['bunder_args'] = ''
node.default[:unicorn][:worker_timeout] = 30
node.default[:unicorn][:preload_app] = true
node.default[:unicorn][:worker_processes] = 2
node.default[:unicorn][:listen] = '/tmp/unicorn.todo.sock'
node.default[:unicorn][:pid] = '/var/run/unicorn/master.pid'
node.default[:unicorn][:stdout_path] = '/var/log/unicorn.log'
node.default[:unicorn][:stderr_path] = '/var/log/unicorn.log'
node.set[:unicorn][:options] = { :backlog => 100 }
|
DanThiffault/rails-lastmile
|
recipes/setup.rb
|
#
# Cookbook Name:: rails-bootstrap
# Recipe:: setup
#
# Copyright 2013, 119 Labs LLC
#
# See license.txt for details
#
class Chef::Recipe
# mix in recipe helpers
include Chef::RubyBuild::RecipeHelpers
end
node.default['rbenv']['rubies'] = [ node['rails-lastmile']['ruby_version'] ]
include_recipe "apt"
package "build-essential"
include_recipe "ruby_build"
include_recipe "rbenv::system"
include_recipe "rbenv::vagrant"
rbenv_global node['rails-lastmile']['ruby_version']
rbenv_gem "bundler"
rbenv_gem "rails"
|
DanThiffault/rails-lastmile
|
metadata.rb
|
name "rails-lastmile"
maintainer "119 Labs LLC"
maintainer_email "<EMAIL>"
license "All rights reserved"
description "Sets up a server for rails"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1.0"
|
travis-ci/pupcycler
|
lib/pupcycler/packet_client.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'faraday'
require 'faraday_middleware'
require 'pupcycler'
module Pupcycler
class PacketClient
def initialize(auth_token: '', project_id: '')
@auth_token = auth_token
@project_id = project_id
end
attr_reader :auth_token, :project_id
private :auth_token
private :project_id
def devices
accum = []
next_page = "/projects/#{project_id}/devices?page=1"
loop do
resp = conn.get(next_page)
raise resp.body.fetch('errors', ['bork!']).first unless resp.success?
accum += resp.body.fetch('devices').map do |h|
Pupcycler::PacketDevice.from_api_hash(h)
end
meta_next = resp.body.fetch('meta', {}).fetch('next', nil)
break if meta_next.nil?
next_page = meta_next.fetch('href')
end
accum.uniq(&:id)
end
def device(device_id: '')
resp = conn.get("/devices/#{device_id}")
raise resp.body.fetch('errors', ['ugh!']).first unless resp.success?
Pupcycler::PacketDevice.from_api_hash(resp.body)
end
def reboot(device_id: '')
device_action(device_id, 'reboot')
end
private def device_action(device_id, action)
device_id = device_id.to_s.strip
action = action.to_s.strip
raise 'missing device id' if device_id.empty?
raise 'missing action type' if action.empty?
resp = conn.post do |req|
req.url "/devices/#{device_id}/actions", 'type' => action
end
raise resp.body.fetch('errors', ['bork!']).first unless resp.success?
resp.body
end
private def conn
@conn ||= Faraday.new(url: 'https://api.packet.net') do |c|
c.headers = {
'Accept' => 'application/json',
'X-Auth-Token' => auth_token
}
c.response :json, content_type: /\bjson$/
c.adapter :net_http
end
end
end
end
|
travis-ci/pupcycler
|
lib/pupcycler/worker.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'redis'
require 'redlock'
require 'time'
require 'pupcycler'
module Pupcycler
class Worker
def self.run
new(
loop_sleep: Pupcycler.config.worker_loop_sleep,
once: Pupcycler.config.worker_run_once,
redis_url: Pupcycler.config.redis_url
).run
end
def initialize(loop_sleep: 60, once: false, redis_url: '')
@loop_sleep = loop_sleep
@once = once
@redis_url = redis_url
end
attr_reader :loop_sleep, :once, :redis_url
private :loop_sleep
private :once
private :redis_url
def run
loop do
logger.info 'running tick'
run_tick
break if once
logger.info 'sleeping', seconds: loop_sleep
sleep loop_sleep
end
end
private def run_tick
lock_manager.lock!('worker_tick', loop_sleep * 1_000) do
upcycler.upcycle!
end
rescue Redlock::LockError => e
logger.error 'failed to lock', error: e
rescue StandardError => e
logger.error 'boomsies', error: e
end
private def upcycler
@upcycler ||= Pupcycler.upcycler
end
private def lock_manager
@lock_manager ||= Redlock::Client.new([Redis.new(url: redis_url)])
end
private def logger
@logger ||= Pupcycler.logger
end
end
end
|
travis-ci/pupcycler
|
spec/pupcycler/worker_spec.rb
|
# frozen_string_literal: true
describe Pupcycler::Worker do
let :fake_upcycler do
double('fake upcycler', upcycle!: nil)
end
let :fake_lock_manager do
flm = double('fake lock manager')
allow(flm).to receive(:lock!) do |*_, &b|
b.call
end
flm
end
before do
Pupcycler.logger.level = Logger::FATAL
Pupcycler.config.worker_run_once = true
allow_any_instance_of(described_class).to receive(:upcycler)
.and_return(fake_upcycler)
allow_any_instance_of(described_class).to receive(:lock_manager)
.and_return(fake_lock_manager)
end
it 'runs' do
described_class.run
end
it 'upcycles stale workers' do
expect(fake_upcycler).to receive(:upcycle!)
described_class.run
end
it 'rescues StandardError and descendants within each run tick' do
allow(fake_upcycler).to receive(:upcycle!).and_raise(KaboomsError)
expect { described_class.run }.not_to raise_error
end
it 'rescues redis locking errors within each run tick' do
allow(fake_lock_manager).to receive(:lock!).and_raise(Redlock::LockError)
expect { described_class.run }.not_to raise_error
end
end
KaboomsError = Class.new(StandardError)
|
travis-ci/pupcycler
|
lib/pupcycler/config.rb
|
# frozen_string_literal: true
require 'hashr'
require 'travis/config'
module Pupcycler
class Config < Travis::Config
extend Hashr::Env
self.env_namespace = 'PUPCYCLER'
define(
auth_tokens: ENV.fetch(
'PUPCYCLER_AUTH_TOKENS',
ENV.fetch(
'AUTH_TOKENS', 'notset'
)
).split(',').map(&:strip),
environment: ENV.fetch(
'PUPCYCLER_ENVIRONMENT',
ENV.fetch(
'ENVIRONMENT', 'notset'
)
),
log_level: 'info',
logger: { format_type: 'l2met', thread_id: true },
packet_auth_token: ENV.fetch(
'PUPCYCLER_PACKET_AUTH_TOKEN',
ENV.fetch(
'PACKET_AUTH_TOKEN', 'notset'
)
),
packet_project_id: ENV.fetch(
'PUPCYCLER_PACKET_PROJECT_ID',
ENV.fetch(
'PACKET_PROJECT_ID', 'notset'
)
),
pool: ENV.fetch('PUPCYCLER_POOL', ENV.fetch('POOL', '0')),
redis_url: ENV.fetch(
ENV.fetch('REDIS_PROVIDER', 'REDIS_URL'), 'redis://localhost:6379/0'
),
redis_pool_options: { size: 5, timeout: 3 },
upcycler_rebooting_disabled: false,
upcycler_cooldown_threshold: 900,
upcycler_staleness_threshold: 43_200,
upcycler_unresponsiveness_threshold: 3_600,
worker_loop_sleep: 60,
worker_run_once: false
)
end
end
|
travis-ci/pupcycler
|
spec/pupcycler_spec.rb
|
# frozen_string_literal: true
describe Pupcycler do
%w[
App
Config
PacketClient
PacketDevice
Store
Upcycler
Worker
].each do |sym|
describe Pupcycler.const_get(sym) do
it 'exists' do
expect(described_class).to_not be_nil
end
end
end
end
|
travis-ci/pupcycler
|
spec/support.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'simplecov'
require 'rack/test'
require 'rspec'
require 'webmock/rspec'
require 'fakeredis' unless ENV['INTEGRATION_SPECS'] == '1'
require 'pupcycler'
RSpec.configure do |c|
c.include Rack::Test::Methods
end
WebMock.disable_net_connect!
|
travis-ci/pupcycler
|
spec/pupcycler/store_spec.rb
|
<reponame>travis-ci/pupcycler
# frozen_string_literal: true
describe Pupcycler::Store do
subject { Pupcycler.store }
it 'knows how to now' do
expect(subject.send(:now)).to_not be_nil
end
describe 'cleaning up' do
let :device_hashes do
[
{
boop: '2018-07-15 03:32:01 UTC',
heartbeat: nil,
hostname: 'fancy-1-worker-org-07-packet',
reboot: nil,
shutdown: nil,
startup: nil,
state: nil,
id: 'ffffffff-aaaa-ffff-aaaa-fffffffffff0'
},
{
boop: '2018-07-15 03:32:01 UTC',
heartbeat: nil,
hostname: 'fancy-1-worker-org-17-packet',
reboot: nil,
shutdown: nil,
startup: nil,
state: nil,
id: 'ffffffff-aaaa-ffff-aaaa-fffffffffff1'
},
{
boop: '2018-07-15 03:32:01 UTC',
heartbeat: nil,
hostname: 'fancy-1-worker-org-06-packet',
reboot: nil,
shutdown: nil,
startup: nil,
state: nil,
id: 'ffffffff-aaaa-ffff-aaaa-fffffffffff2'
}
]
end
before do
allow(subject).to receive(:fetch_devices)
.and_return(device_hashes)
end
it 'wipes device records' do
expect(subject).to receive(:wipe_device)
.exactly(device_hashes.length).times
subject.cleanup!
end
end
end
|
travis-ci/pupcycler
|
spec/pupcycler/packet_client_spec.rb
|
<reponame>travis-ci/pupcycler
# frozen_string_literal: true
describe Pupcycler::PacketClient do
subject { Pupcycler.packet_client }
let(:nowish) { Time.parse(Time.now.utc.iso8601(0)) }
let :device_id do
"fafafaf-afafafa-fafafafafafaf-#{rand(100_000..1_000_000)}-afafafafaf"
end
let :response_page_1 do
{
'devices' => [
{
'updated_at' => (nowish - 84_000).to_s,
'hostname' => 'fafafaf-testing-1-buh',
'id' => device_id,
'state' => 'running',
'tags' => %w[worker notset pool-0],
'created_at' => (nowish - 7200).to_s
},
{
'updated_at' => (nowish - 3600).to_s,
'hostname' => 'fafafaf-testing-1-qhu',
'id' => 'not-' + device_id,
'state' => 'running',
'tags' => %w[bloop testing],
'created_at' => (nowish - 7200).to_s
}
],
'meta' => {
'next' => {
'href' => '/projects/notset/devices?page=2'
}
}
}
end
let :response_page_2 do
{
'devices' => [
{
'updated_at' => (nowish - 84_000).to_s,
'hostname' => 'fafafaf-testing-3-buh',
'id' => 'also-not' + device_id,
'state' => 'running',
'tags' => %w[worker notset pool-0],
'created_at' => (nowish - 7200).to_s
},
{
'updated_at' => (nowish - 3600).to_s,
'hostname' => 'fafafaf-testing-8-zil',
'id' => 'def-not-' + device_id,
'state' => 'running',
'tags' => %w[bloop testing],
'created_at' => (nowish - 7200).to_s
}
],
'meta' => {
'next' => nil
}
}
end
before do
stub_request(
:get,
%r{api\.packet\.net/projects/[^/]+/devices\?page=1$}
).to_return(
status: 200,
headers: {
'Content-Type' => 'application/json'
},
body: JSON.generate(response_page_1)
)
stub_request(
:get,
%r{api\.packet\.net/projects/[^/]+/devices\?page=2$}
).to_return(
status: 200,
headers: {
'Content-Type' => 'application/json'
},
body: JSON.generate(response_page_2)
)
end
context 'when fetching devices' do
it 'fetches all of them' do
expect(subject.devices.map(&:id).length).to eql(4)
end
end
end
|
travis-ci/pupcycler
|
lib/pupcycler/app.rb
|
<filename>lib/pupcycler/app.rb
# frozen_string_literal: true
require 'pupcycler'
require 'sinatra/base'
require 'sinatra/json'
module Pupcycler
class App < Sinatra::Base
BOOT_TIME = Time.now
helpers do
def protect!
return if authorized?
headers['WWW-Authenticate'] = 'token'
content_type :json
halt 401, '{"no":"not you"}'
end
def authorized?
@auth ||= request.env.fetch('HTTP_AUTHORIZATION', 'notset')
valid_auth?(@auth)
end
def device_id
params.fetch('device_id')
end
def valid_auth?(auth_value)
auth_value.start_with?('token ') &&
Pupcycler.config.auth_tokens.include?(auth_value.sub(/^token /, ''))
end
end
get '/__meta__' do
status :ok
json message: 'hello, human',
uptime: uptime,
version: Pupcycler.version
end
get '/heartbeats/:device_id' do
protect!
store.save_heartbeat(device_id: device_id)
state = store.fetch_state(device_id: device_id)
status :ok
json state: state
end
post '/startups/:device_id' do
protect!
store.save_startup(device_id: device_id)
store.save_state(device_id: device_id, state: 'up')
state = store.fetch_state(device_id: device_id)
status :created
json state: state
end
post '/shutdowns/:device_id' do
protect!
store.save_shutdown(device_id: device_id)
upcycler.reboot(device_id: device_id) unless rebooting_disabled?
store.save_state(device_id: device_id, state: 'down')
state = store.fetch_state(device_id: device_id)
status :created
json state: state
end
get '/devices' do
protect!
status :ok
json data: store.fetch_devices
end
private def uptime
Time.now - BOOT_TIME
end
private def store
@store ||= Pupcycler.store
end
private def upcycler
@upcycler ||= Pupcycler.upcycler
end
private def rebooting_disabled?
Pupcycler.config.upcycler_rebooting_disabled?
end
end
end
|
travis-ci/pupcycler
|
config.ru
|
# frozen_string_literal: true
libdir = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(libdir) unless $LOAD_PATH.include?(libdir)
require 'pupcycler'
run Pupcycler::App
|
travis-ci/pupcycler
|
spec/pupcycler/upcycler_spec.rb
|
# frozen_string_literal: true
describe Pupcycler::Upcycler do
subject { Pupcycler.upcycler }
let(:nowish) { Time.parse(Time.now.utc.iso8601(0)) }
let(:store) { subject.send(:store) }
let(:packet_client) { subject.send(:packet_client) }
let(:last_heartbeat) { nowish - 300 }
let(:last_startup) { nowish - 14_400 }
let(:worker_updated_at) { nowish - 14_400 }
let :device_id do
"fafafaf-afafafa-fafafafafafaf-#{rand(100_000..1_000_000)}-afafafafaf"
end
let :api_response_hash do
{
'devices' => [
{
'updated_at' => worker_updated_at.to_s,
'hostname' => 'fafafaf-testing-1-buh',
'id' => device_id,
'state' => 'running',
'tags' => %w[worker notset pool-0],
'created_at' => (nowish - 7200).to_s
},
{
'updated_at' => (nowish - 3600).to_s,
'hostname' => 'fafafaf-testing-1-qhu',
'id' => 'not-' + device_id,
'state' => 'running',
'tags' => %w[bloop testing],
'created_at' => (nowish - 7200).to_s
}
]
}
end
let :device do
Pupcycler::PacketDevice.from_api_hash(
api_response_hash.fetch('devices').first
)
end
before do
Pupcycler.redis_pool.with do |redis|
keys = redis.scan_each(match: 'device:*').to_a.uniq
redis.multi do |conn|
keys.each { |k| conn.del(k) }
end
end
allow(store).to receive(:now).and_return(nowish)
allow(store).to receive(:fetch_heartbeat)
.with(device_id: device_id).and_return(last_heartbeat)
allow(store).to receive(:fetch_startup)
.with(device_id: device_id).and_return(last_startup)
stub_request(
:get,
%r{api\.packet\.net/projects/[^/]+/devices(\?page=.+|)$}
).to_return(
status: 200,
headers: {
'Content-Type' => 'application/json'
},
body: JSON.generate(api_response_hash)
)
stub_request(
:get,
%r{api\.packet\.net/devices/fafafaf-.+-afafafafaf$}
).to_return(
status: 200,
headers: {
'Content-Type' => 'application/json'
},
body: JSON.generate(api_response_hash.fetch('devices').first)
)
stub_request(
:post,
%r{api\.packet\.net/devices/[^/]+/actions\?type=reboot$}
).to_return(status: 200)
end
describe 'upcycling' do
it 'can upcycle' do
subject.upcycle!
end
context 'when device is deleted' do
before do
allow(subject).to receive(:deleted?).and_return(true)
end
it 'wipes record of the device' do
expect(store).to receive(:wipe_device).with(device_id: device_id)
subject.upcycle!
end
it 'does not check for staleness' do
expect(subject).to_not receive(:stale?)
subject.upcycle!
end
end
context 'when device is unresponsive' do
before do
allow(subject).to receive(:unresponsive?).and_return(true)
end
it 'reboots the device' do
expect(subject).to receive(:reboot).with(device_id: device_id)
subject.upcycle!
end
it 'does not check for staleness' do
expect(subject).to_not receive(:stale?)
subject.upcycle!
end
end
context 'when device is stale' do
before do
allow(subject).to receive(:stale?).and_return(true)
end
it 'gracefully shuts down the device' do
expect(subject).to receive(:graceful_shutdown)
.with(device_id: device_id)
subject.upcycle!
end
end
context 'when store contains devices unknown to packet' do
before do
allow(subject).to receive(:packet_known_worker_devices).and_return([])
allow(store).to receive(:fetch_devices).and_return(
[
{
boop: '2018-07-15 03:32:01 UTC',
heartbeat: nil,
hostname: 'fancy-1-worker-org-07-packet',
reboot: nil,
shutdown: nil,
startup: nil,
state: nil,
id: device_id
}
]
)
end
it 'upcycles' do
expect(subject).to receive(:upcycle_device!)
.with(device_id: device_id, hostname: 'fancy-1-worker-org-07-packet')
subject.upcycle!
end
end
end
describe 'rebooting' do
it 'reboots via Packet API' do
expect(packet_client).to receive(:reboot).with(device_id: device_id)
subject.reboot(device_id: device_id)
end
it 'stores a reboot timestamp' do
subject.reboot(device_id: device_id)
expect(store.fetch_reboot(device_id: device_id)).to eql(nowish)
end
context 'when the device has not cooled down' do
let(:worker_updated_at) { nowish - 300 }
it 'refuses to reboot' do
expect { subject.reboot(device_id: device_id) }
.to raise_error(StandardError)
end
end
end
describe 'gracefully shutting down' do
it 'changes the device state to "down"' do
expect do
subject.graceful_shutdown(device_id: device_id)
end.to change {
store.fetch_state(device_id: device_id)
}.from('up').to('down')
end
end
describe 'deletion detection' do
context 'when device exists' do
before do
allow(packet_client).to receive(:device)
.with(device_id: device_id).and_return(device)
end
it 'reports false' do
expect(subject.send(:deleted?, device_id, nowish - 7200)).to be false
end
end
context 'when device does not exist' do
before do
allow(packet_client).to receive(:device)
.with(device_id: device_id).and_raise(StandardError.new('ugh!'))
end
context 'when device is unresponsive' do
before do
allow(subject).to receive(:unresponsive?).and_return(true)
end
it 'reports true' do
expect(subject.send(:deleted?, device_id, nowish - 7200)).to be true
end
end
context 'when device is still responsive' do
before do
allow(subject).to receive(:unresponsive?).and_return(false)
end
end
end
end
end
|
travis-ci/pupcycler
|
lib/pupcycler/packet_device.rb
|
<reponame>travis-ci/pupcycler
# frozen_string_literal: true
require 'time'
require 'pupcycler'
module Pupcycler
class PacketDevice
def self.from_api_hash(api_hash)
new(
created_at: Time.parse(api_hash.fetch('created_at')),
hostname: api_hash.fetch('hostname'),
id: api_hash.fetch('id'),
state: api_hash.fetch('state'),
tags: Array(api_hash.fetch('tags')),
updated_at: Time.parse(api_hash.fetch('updated_at'))
)
end
def initialize(id: '', hostname: '', state: '', tags: [],
created_at: nil, updated_at: nil)
@created_at = created_at || Time.now.utc
@hostname = hostname
@id = id
@state = state
@tags = Array(tags)
@updated_at = updated_at || Time.now.utc
end
attr_reader :created_at, :hostname, :id, :state, :tags, :updated_at
end
end
|
travis-ci/pupcycler
|
lib/pupcycler.rb
|
<gh_stars>0
# frozen_string_literal: true
require 'connection_pool'
require 'redis'
require 'redis-namespace'
require 'travis/logger'
module Pupcycler
autoload :App, 'pupcycler/app'
autoload :Config, 'pupcycler/config'
autoload :PacketClient, 'pupcycler/packet_client'
autoload :PacketDevice, 'pupcycler/packet_device'
autoload :Store, 'pupcycler/store'
autoload :Upcycler, 'pupcycler/upcycler'
autoload :Worker, 'pupcycler/worker'
def version
ENV.fetch(
'HEROKU_SLUG_COMMIT',
`git rev-parse HEAD 2>/dev/null`
).strip
end
module_function :version
def config
@config ||= Pupcycler::Config.load
end
module_function :config
def logger
@logger ||= Travis::Logger.new(@logdev || $stdout, config)
end
module_function :logger
attr_writer :logdev
module_function :logdev=
def redis_pool
@redis_pool ||= ConnectionPool.new(config.redis_pool_options) do
Redis::Namespace.new(
:pupcycler, redis: Redis.new(url: config.redis_url)
)
end
end
module_function :redis_pool
def upcycler
@upcycler ||= Pupcycler::Upcycler.new(
cooldown_threshold: config.upcycler_cooldown_threshold,
environment: config.environment,
pool: config.pool,
staleness_threshold: config.upcycler_staleness_threshold,
unresponsiveness_threshold: config.upcycler_unresponsiveness_threshold
)
end
module_function :upcycler
def store
@store ||= Pupcycler::Store.new(redis_pool: redis_pool)
end
module_function :store
def packet_client
@packet_client ||= Pupcycler::PacketClient.new(
auth_token: Pupcycler.config.packet_auth_token,
project_id: Pupcycler.config.packet_project_id
)
end
module_function :packet_client
end
|
travis-ci/pupcycler
|
lib/pupcycler/upcycler.rb
|
<reponame>travis-ci/pupcycler<gh_stars>0
# frozen_string_literal: true
require 'pupcycler'
module Pupcycler
class Upcycler
def initialize(cooldown_threshold: 900, environment: 'test',
pool: 0, staleness_threshold: 43_200,
unresponsiveness_threshold: 3_600)
@cooldown_threshold = cooldown_threshold
@matching_tags = %W[worker #{environment} pool-#{pool}]
@staleness_threshold = staleness_threshold
@unresponsiveness_threshold = unresponsiveness_threshold
end
attr_reader :cooldown_threshold, :matching_tags, :staleness_threshold
attr_reader :unresponsiveness_threshold
private :cooldown_threshold
private :matching_tags
private :staleness_threshold
private :unresponsiveness_threshold
def upcycle!
seen = []
packet_known_worker_devices.each do |dev|
seen << dev.id
upcycle_device!(device_id: dev.id, hostname: dev.hostname)
end
store.fetch_devices.each do |dev_hash|
next if seen.include?(dev_hash.fetch(:id))
upcycle_device!(
device_id: dev_hash.fetch(:id), hostname: dev_hash.fetch(:hostname)
)
end
end
def reboot(device_id: '')
assert_device_cooled_down!(device_id: device_id)
logger.warn 'rebooting', device_id: device_id
packet_client.reboot(device_id: device_id)
store.save_reboot(device_id: device_id)
end
def graceful_shutdown(device_id: '')
logger.info 'gracefully shutting down', device_id: device_id
store.save_state(device_id: device_id, state: 'down')
end
private def upcycle_device!(device_id: '', hostname: '')
store.save_hostname(device_id: device_id, hostname: hostname)
store.save_boop(device_id: device_id)
if deleted?(device_id, store.fetch_heartbeat(device_id: device_id))
store.wipe_device(device_id: device_id)
return
end
if unresponsive?(store.fetch_heartbeat(device_id: device_id))
reboot(device_id: device_id)
return
end
return unless stale?(store.fetch_startup(device_id: device_id))
graceful_shutdown(device_id: device_id)
end
private def assert_device_cooled_down!(device_id: '')
dev = packet_client.device(device_id: device_id)
uptime = (now - dev.updated_at)
return if uptime > cooldown_threshold
raise 'device still cooling down ' \
"uptime=#{uptime}s threshold=#{cooldown_threshold}"
end
private def deleted?(device_id, last_heartbeat)
packet_client.device(device_id: device_id)
false
rescue StandardError => e
logger.info(
'failed to fetch possibly deleted device',
device_id: device_id, err: e
)
unresponsive?(last_heartbeat)
end
private def unresponsive?(last_heartbeat)
return false if last_heartbeat.nil?
(now - last_heartbeat) > unresponsiveness_threshold
end
private def stale?(startup)
return false if startup.nil?
(now - startup) > staleness_threshold
end
private def packet_known_worker_devices
packet_client.devices.select do |dev|
(dev.tags & matching_tags) == matching_tags
end
end
private def now
Time.now.utc
end
private def store
@store ||= Pupcycler.store
end
private def logger
@logger ||= Pupcycler.logger
end
private def packet_client
@packet_client ||= Pupcycler.packet_client
end
end
end
|
travis-ci/pupcycler
|
lib/pupcycler/store.rb
|
<filename>lib/pupcycler/store.rb<gh_stars>0
# frozen_string_literal: true
require 'time'
require 'pupcycler'
module Pupcycler
class Store
TIME_COERCE = ->(s) { Time.parse(s) }
private_constant :TIME_COERCE
NAMESPACES = {
devices: 'device:'
}.freeze
private_constant :NAMESPACES
DEVICE_KEYS_COERCIONS = {
boop: TIME_COERCE,
heartbeat: TIME_COERCE,
hostname: nil,
reboot: TIME_COERCE,
shutdown: TIME_COERCE,
startup: TIME_COERCE,
state: nil
}.freeze
private_constant :DEVICE_KEYS_COERCIONS
EMPTY_DEVICE_RECORD = Hash[
DEVICE_KEYS_COERCIONS.keys.map { |k| [k, nil] }
].freeze
private_constant :EMPTY_DEVICE_RECORD
def initialize(redis_pool: nil)
@redis_pool = redis_pool
end
attr_accessor :redis_pool
def save_heartbeat(device_id: '')
save_for_device('heartbeats', device_id)
end
def fetch_heartbeat(device_id: '')
fetch_for_device('heartbeats', device_id, default_value: nil,
coerce: TIME_COERCE)
end
def save_reboot(device_id: '')
save_for_device('reboots', device_id)
end
def fetch_reboot(device_id: '')
fetch_for_device('reboots', device_id, default_value: nil,
coerce: TIME_COERCE)
end
def save_startup(device_id: '')
save_for_device('startups', device_id)
end
def fetch_startup(device_id: '')
fetch_for_device('startups', device_id, default_value: nil,
coerce: TIME_COERCE)
end
def save_shutdown(device_id: '')
save_for_device('shutdowns', device_id)
end
def fetch_shutdown(device_id: '')
fetch_for_device('shutdowns', device_id, default_value: nil,
coerce: TIME_COERCE)
end
def save_state(device_id: '', state: '')
save_for_device('states', device_id, value: state, count: false)
end
def fetch_state(device_id: '')
fetch_for_device('states', device_id, default_value: 'up')
end
def save_boop(device_id: '')
save_for_device('boops', device_id, value: now, count: false)
end
def save_hostname(device_id: '', hostname: '')
save_for_device(
'hostnames', device_id, value: hostname, count: false
)
end
def fetch_devices
devices_by_id = {}
ns = NAMESPACES.fetch(:devices)
DEVICE_KEYS_COERCIONS.each do |subkey, coerce|
hgetall_coerce(
"#{ns}#{subkey}s", coerce: coerce
).each do |device_id, value|
devices_by_id[device_id] ||= EMPTY_DEVICE_RECORD.merge(id: device_id)
devices_by_id[device_id][subkey] = value
end
hgetall_coerce(
"#{ns}#{subkey}s:count", coerce: ->(s) { s.to_i }
).each do |device_id, count|
devices_by_id[device_id] ||= EMPTY_DEVICE_RECORD.merge(id: device_id)
devices_by_id[device_id]["#{subkey}_count".to_sym] = count
end
end
devices_by_id.values.sort do |a, b|
(a[:startup] || now).to_s <=> (b[:startup] || now).to_s
end
end
def wipe_device(device_id: '')
ns = NAMESPACES.fetch(:devices)
redis_pool.with do |redis|
redis.multi do |conn|
DEVICE_KEYS_COERCIONS.keys.each do |subkey|
conn.hdel("#{ns}#{subkey}s", device_id)
conn.hdel("#{ns}#{subkey}s:count", device_id)
end
end
end
end
def cleanup!(nil_check_keys: %i[heartbeat reboot shutdown startup state])
fetch_devices.each do |dev|
next unless nil_check_keys.map { |k| dev.fetch(k).nil? }.all?
wipe_device(device_id: dev.fetch(:id))
yield dev.fetch(:id) if block_given?
end
end
private def fetch_for_device(key, device_id,
default_value: nil, coerce: ->(v) { v })
key = key.to_s.strip
raise 'missing key' if key.empty?
device_id = device_id.to_s.strip
raise 'missing device id' if device_id.empty?
ret = { value: default_value }
ns = NAMESPACES.fetch(:devices)
redis_pool.with do |redis|
value = redis.hget("#{ns}#{key}", device_id).to_s.strip
ret[:value] = coerce.call(value) unless value.empty?
end
ret.fetch(:value)
end
private def save_for_device(key, device_id, value: nil, count: true)
key = key.to_s.strip
raise 'missing key' if key.empty?
device_id = device_id.to_s.strip
raise 'missing device id' if device_id.empty?
ns = NAMESPACES.fetch(:devices)
redis_pool.with do |redis|
redis.multi do |conn|
conn.hset("#{ns}#{key}", device_id, value || now)
next unless count
conn.hincrby("#{ns}#{key}:count", device_id, 1)
end
end
end
private def hgetall_coerce(ns_key, coerce: nil)
ret = {}
coerce = ->(s) { s } if coerce.nil?
redis_pool.with do |redis|
ret.merge!(
Hash[
redis.hgetall(ns_key).map { |i, t| [i, coerce.call(t)] }
]
)
end
ret
end
private def now
Time.now.utc.iso8601(3)
end
end
end
|
travis-ci/pupcycler
|
spec/pupcycler/app_spec.rb
|
# frozen_string_literal: true
describe Pupcycler::App do
subject { described_class }
def app
Pupcycler::App
end
let :store do
Pupcycler.store
end
let :nowish do
Time.parse(Time.now.utc.iso8601(0))
end
let :device_id do
"fafafaf-afafafa-fafafafafafaf-#{rand(100_000..1_000_000)}-afafafafaf"
end
let :body do
JSON.parse(last_response.body)
end
before do
Pupcycler.config.auth_tokens = %w[<PASSWORD>]
store.wipe_device(device_id: device_id)
allow_any_instance_of(Pupcycler::Store).to receive(:now)
.and_return(nowish)
end
it 'has a non-zero boot time' do
expect(subject::BOOT_TIME).to_not be_nil
end
describe 'GET /__meta__' do
before do
get '/__meta__'
end
it 'is friendly' do
expect(last_response).to be_ok
expect(body).to include('message')
expect(body.fetch('message')).to match(/hello/i)
end
it 'provides uptime' do
expect(last_response).to be_ok
expect(body).to include('uptime')
expect(body.fetch('uptime')).to be > -1
end
it 'provides version' do
expect(last_response).to be_ok
expect(body).to include('version')
expect(body.fetch('version')).to_not be_empty
end
end
describe 'GET /heartbeats/{device_id}' do
before do
get "/heartbeats/#{device_id}", nil,
'HTTP_AUTHORIZATION' => 'token f<PASSWORD>'
end
it 'is ok' do
expect(last_response.status).to eql(200)
end
it 'records heartbeat' do
expect(store.fetch_heartbeat(device_id: device_id)).to eql(nowish)
end
it 'responds with the state' do
expect(body.fetch('state')).to eql('up')
end
end
describe 'POST /startups/{device_id}' do
before do
post "/startups/#{device_id}", nil,
'HTTP_AUTHORIZATION' => 'token <PASSWORD>'
end
it 'creates' do
expect(last_response.status).to eql(201)
end
it 'records startup' do
expect(store.fetch_startup(device_id: device_id)).to eql(nowish)
end
it 'saves state as up' do
expect(store.fetch_state(device_id: device_id)).to eql('up')
end
it 'responds with the state' do
expect(body.fetch('state')).to eql('up')
end
end
describe 'POST /shutdowns/{device_id}' do
before do
stub_request(
:get,
%r{api\.packet\.net/devices/#{device_id}$}
).to_return(
status: 200,
headers: {
'Content-Type' => 'application/json'
},
body: JSON.generate(
'updated_at' => (nowish - 3600).to_s,
'hostname' => 'fafafaf-testing-1-buh',
'id' => device_id,
'state' => 'running',
'tags' => %w[worker testing],
'created_at' => (nowish - 7200).to_s
)
)
stub_request(
:post,
%r{api\.packet\.net/devices/#{device_id}/actions\?type=reboot}
).to_return(status: 200)
post "/shutdowns/#{device_id}", nil,
'HTTP_AUTHORIZATION' => 'token <PASSWORD>'
end
it 'creates' do
expect(last_response.status).to eql(201)
end
it 'saves shutdown' do
expect(store.fetch_shutdown(device_id: device_id)).to eql(nowish)
end
it 'reboots' do
expect(WebMock).to have_requested(
:post,
%r{api\.packet\.net/devices/#{device_id}/actions\?type=reboot}
)
end
it 'saves state as down' do
expect(store.fetch_state(device_id: device_id)).to eql('down')
end
it 'responds with state' do
expect(body.fetch('state')).to eql('down')
end
end
describe 'GET /devices' do
before do
get '/devices', nil,
'HTTP_AUTHORIZATION' => 'token <PASSWORD>'
end
it 'is ok' do
expect(last_response.status).to eql(200)
end
it 'responds with data' do
expect(body.key?('data')).to be true
end
end
end
|
350org/strikes-poster-generator
|
config.rb
|
# Reload the browser automatically whenever files change
activate :livereload
config[:livereload_css_target] = nil
###
# Helpers
###
helpers do
def get_url
absolute_prefix + url_prefix
end
end
###
# Config
###
config[:css_dir] = 'stylesheets'
config[:js_dir] = 'javascripts'
config[:images_dir] = 'images'
config[:url_prefix] = '/'
config[:absolute_prefix] = 'http://localhost:4567'
# Build-specific configuration
configure :build do
puts "local build"
config[:url_prefix] = ''
config[:absolute_prefix] = ''
activate :asset_hash
activate :minify_javascript
activate :minify_css
end
|
jtimberman/netatalk
|
recipes/build_deb.rb
|
<reponame>jtimberman/netatalk
#
# Cookbook Name:: netatalk
# Recipe:: build_deb
#
# Copyright 2009, Opscode
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
%w{libcrack2-dev fakeroot libssl-dev}.each do |pkg|
package pkg
end
directory "#{Chef::Config[:file_cache_path]}/netatalk" do
owner "root"
group "root"
end
bash "install_netatalk" do
cwd "#{Chef::Config[:file_cache_path]}/netatalk"
code <<-EOF
apt-get build-dep -y netatalk && apt-get source netatalk
(cd netatalk-2* && DEB_BUILD_OPTIONS=ssl dpkg-buildpackage -rfakeroot)
dpkg -i netatalk*.deb && echo 'netatalk hold' | dpkg --set-selections
EOF
not_if "dpkg -s netatalk | grep -qx 'Status: hold ok installed'"
end
|
jtimberman/netatalk
|
metadata.rb
|
name "netatalk"
maintainer "<NAME>"
maintainer_email "<EMAIL>"
license "Apache 2.0"
description "Installs netatalk source package on Debian/Ubuntu w/ SSL support"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "1.1.1"
recipe "netatalk::default", <<-DESC
Includes the appropriate package prep recipe (if any), creates the base share directory, creates AFP config files and manages the netatalk service(s)
DESC
recipe "netatalk::backport_deb", <<-DESC
Enables the Netatalk backport PPA repository
DESC
recipe "netatalk::build_deb", "Builds Netatalk from the source package with OpenSSL support enabled"
recipe "netatalk::build_aur", "Builds Netatalk package from Arch User Repository (AUR) w/ OpenSSL support enabled"
supports "debian", ">= 6.0"
supports "ubuntu", ">= 10.04"
supports "fedora", ">= 19.0"
supports "redhat", ">= 5.0"
supports "centos", ">= 5.0"
supports "scientific", ">= 5.0"
depends "yum-epel"
attribute("netatalk/shares",
:display_name => "Shares",
:description => "Array of shares that populates AppleVolumes.default, one line per entry",
:type => "array",
:default => [])
attribute("netatalk/share_base",
:display_name => "Base Directory",
:description => "Base directory to share via AFP",
:type => "string",
:default => "/srv/afp_share")
attribute("netatalk/share_base_owner",
:display_name => "Base Directory Owner",
:description => "User that should own the share_base",
:type => "string",
:default => "root")
attribute("netatalk/share_base_group",
:display_name => "Base Directory Group",
:description => "Group that should own the share_base",
:type => "string",
:default => "root")
attribute("netatalk/afpd_options",
:display_name => "AFPD Config Options",
:description => "String of options written to afpd.conf",
:type => "string")
attribute("netatalk/package_recipe",
:display_name => "Package Preparation Recipe",
:description => "The name of a recipe that is used to set up the package for installation",
:calculated => true)
|
jtimberman/netatalk
|
attributes/default.rb
|
#
# Cookbook Name:: netatalk
# Attributes:: default
#
# Copyright 2009-2012, Opscode
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
default['netatalk']['shares'] = []
default['netatalk']['share_base'] = '/srv/afp_share'
default['netatalk']['share_base_owner'] = 'root'
default['netatalk']['share_base_group'] = 'root'
default['netatalk']['afpd_options'] = nil
default['netatalk']['package_recipe'] = nil
# need the backported package for older Debian/Ubuntu
case node['platform']
when 'debian'
default['netatalk']['package_recipe'] = 'backport_deb'
when 'ubuntu'
if node['platform_version'].to_f < 11.10
default['netatalk']['package_recipe'] = 'backport_deb'
end
when 'arch'
default['netatalk']['package_recipe'] = 'build_aur'
end
|
jtimberman/netatalk
|
recipes/build_aur.rb
|
<reponame>jtimberman/netatalk<gh_stars>1-10
#
# Cookbook Name:: netatalk
# Recipe:: build_aur
#
# Copyright 2010, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "build-essential"
%w{ libcups avahi }.each do |pkg|
package pkg
end
pacman_aur "netatalk" do
pkgbuild_src "PKGBUILD"
action [:build,:install]
end
# cnid is a separate service on Arch.
service "cnid" do
action [:enable,:start]
end
|
jtimberman/netatalk
|
recipes/default.rb
|
#
# Cookbook Name:: netatalk
# Recipe:: default
#
# Copyright 2009-2012, Opscode
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if node['netatalk']['package_recipe']
include_recipe "netatalk::#{node['netatalk']['package_recipe']}"
end
package 'netatalk' if platform_family?('debian') || platform_family?('fedora') || platform_family?('rhel')
package 'cracklib' do
package_name value_for_platform_family(
'arch' => 'cracklib',
'fedora' => 'cracklib',
'rhel' => 'cracklib',
'default' => 'libpam-cracklib'
)
end
directory node['netatalk']['share_base'] do
owner node['netatalk']['share_base_owner']
group node['netatalk']['share_base_group']
mode 02775
end
['afpd.conf', 'AppleVolumes.default'].each do |conf|
template "/etc/netatalk/#{conf}" do
source "#{conf}.erb"
mode 00644
owner "root"
group "root"
variables :shares => node['netatalk']['shares']
notifies :restart, 'service[netatalk]'
end
end
service 'netatalk' do
supports :restart => true
case node['platform_family']
when 'debian'
pattern '/usr/sbin/(afpd|cnid_metad)'
when 'arch'
service_name 'afpd'
end
action [:enable, :start]
end
|
kastner/hasher
|
test/spec_hasher.rb
|
# require 'test/spec'
#
# describe "Hasher" do
# it "should hash 17614 to 1 with a hash size of 90917" do
# h = Hasher.new(90917)
# h.hash(17614).should == 1
# end
# end
|
kastner/hasher
|
features/steps/env.rb
|
<gh_stars>1-10
require File.expand_path(File.dirname(__FILE__) + '/../../lib/hasher.rb')
require 'cucumber'
require 'test/unit/assertions'
require 'test/spec'
World(Test::Unit::Assertions)
|
kastner/hasher
|
features/steps/hasher_steps.rb
|
require 'cucumber/formatter/unicode'
Given /^a hash container size of (\d+)$/ do |size|
@hasher = Hasher.new(size.to_i)
end
When /^the method hash is invoked with (.+?)$/ do |string|
@output = @hasher.hash(string)
end
Then /^the number (\d+) is returned$/ do |result|
@output.should == result.to_i
end
Given /^a hash container with a non\-prime size$/ do
@size = 55
end
Then /^a (.+?) error should be raised$/ do |error|
klass, exception = error.split(/::/)
klass = Object.const_get(klass)
lambda { Hasher.new(@size) }.should.raise(klass.const_get(exception))
end
|
kastner/hasher
|
lib/hasher.rb
|
class Hasher
def initialize(size)
@size = size
end
def hash(input, length=input.length)
hash = 0
length.times do |i|
new_hash = hash << 0x7 ^ input[i]
overflow = hash >> 0x15 & 0x1fc
hash = new_hash ^ overflow
end
hash &= 0x7fffffff
hash % @size
end
class NonPrimeContainer < StandardError; end
end
|
pocheptsov/packable
|
lib/packable/extensions/integer.rb
|
<gh_stars>1-10
module Packable
module Extensions #:nodoc:
module Integer #:nodoc:
NEEDS_REVERSAL = Hash.new{|h, endian| raise ArgumentError, "Endian #{endian} is not valid. It must be one of #{h.keys.join(', ')}"}.
merge!(:little => true, :big => false, :network => false, :native => "*\x00\x00\x00".unpack('L').first == 42).freeze
def self.included(base)
base.class_eval do
include Packable
extend ClassMethods
packers do |p|
p.set :merge_all , :bytes=>4, :signed=>true, :endian=>:big
p.set :default , :long
p.set :long , {}
p.set :short , :bytes=>2
p.set :char , :bytes=>1, :signed=>false
p.set :byte , :bytes=>1
p.set :unsigned_long , :bytes=>4, :signed=>false
p.set :unsigned_short , :bytes=>2, :signed=>false
end
end
end
def write_packed(io, options)
val = self
chars = (0...options[:bytes]).collect do
byte = val & 0xFF
val >>= 8
byte.chr
end
chars.reverse! unless NEEDS_REVERSAL[options[:endian]]
io << chars.join
end
module ClassMethods #:nodoc:
def unpack_string(s,options)
s = s.reverse if NEEDS_REVERSAL[options[:endian]]
r = 0
s.each_byte {|b| r = (r << 8) + b}
r -= 1 << (8 * options[:bytes]) if options[:signed] && (1 == r >> (8 * options[:bytes] - 1))
r
end
end
end
end
end
|
pocheptsov/packable
|
lib/packable/version.rb
|
module Packable
VERSION = "1.3.9"
end
|
dropheaven/bookery
|
db/migrate/20190305111956_change_release_date_to_release_year_in_books.rb
|
class ChangeReleaseDateToReleaseYearInBooks < ActiveRecord::Migration[5.2]
def change
change_column :books, :release_date, :integer
rename_column :books, :release_date, :release_year
end
end
|
dropheaven/bookery
|
app/controllers/books_controller.rb
|
class BooksController < ApplicationController
before_action :set_book, only: [:show, :edit, :update, :destroy]
before_action :sign_in_to_access, only: [:new, :edit, :destroy]
def index
# raise params.inspect
if params[:author_id]
@books = Author.find(params[:author_id]).books
else
@books = Book.all
@latest_book = Book.latest.first
end
end
def show
@author = Author.find(params[:author_id])
if @author.id != @book.author_id
redirect_to root_path
flash[:notice] = "Could not find that page!"
else
respond_to do |format|
format.html
format.json { render json: @book }
end
end
end
def new
@book = Book.new(author_id: params[:author_id])
end
def create
@book = Book.new(book_params)
if @book.save
redirect_to author_book_path(@book.author_id, @book)
else
render :new
end
end
def edit
end
def update
if @book.update(book_params)
flash[:notice] = "Book updated!"
redirect_to author_book_path(@book.author, @book)
else
render :edit
end
end
def destroy
@book.destroy
flash[:notice] = "#{@book.title} has been deleted!"
redirect_to root_path
end
private
def book_params
params.require(:book).permit(:title, :release_year, :author_name, :genre_name)
end
def set_book
@book = Book.find(params[:id])
end
end
|
dropheaven/bookery
|
app/controllers/users_controller.rb
|
class UsersController < ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy]
def index
end
def show
end
def new
@user = User.new
end
def create
@user = User.new(user_params)
if @user.save
log_in @user
flash[:success] = "Registered successfully. You are now logged in #{@user.username}"
redirect_to root_path
else
flash.now[:danger] = "There was a problem with your entry"
render :new
end
end
def edit
end
def update
if @user.update(user_params)
redirect_to user_path(@user)
else
render :edit
end
end
def destroy
end
private
def user_params
params.require(:user).permit(:username, :email, :password, :password_confirmation)
end
def set_user
@user = User.find(params[:id])
end
end
|
dropheaven/bookery
|
app/controllers/application_controller.rb
|
class ApplicationController < ActionController::Base
include SessionsHelper
private
def sign_in_to_access
if !logged_in?
flash[:notice] = "Sign in to complete this request"
redirect_to login_path
end
end
end
|
dropheaven/bookery
|
app/serializers/book_author_serializer.rb
|
class BookAuthorSerializer < ActiveModel::Serializer
attributes :full_name, :bio
end
|
dropheaven/bookery
|
app/models/user.rb
|
<reponame>dropheaven/bookery<filename>app/models/user.rb
class User < ApplicationRecord
has_secure_password
has_many :comments
has_many :books, -> { distinct }, through: :comments
validates :username, presence: true, length: { in: 2..20 }, uniqueness: { case_sensitive: false }
validates :email, presence: true, length: { maximum: 255 }, uniqueness: { case_sensitive: false },
format: { with: /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i }
validates :password, presence: true, length: { minimum: 6 }
end
|
dropheaven/bookery
|
app/models/author.rb
|
class Author < ApplicationRecord
has_many :books
has_many :genres, -> { distinct }, through: :books
before_validation :downcase_name
validates :full_name, presence: true, uniqueness: true
private
def downcase_name
full_name.downcase!
end
end
|
dropheaven/bookery
|
app/serializers/book_comment_serializer.rb
|
<gh_stars>0
class BookCommentSerializer < ActiveModel::Serializer
attributes :username, :content, :posted_at
end
|
dropheaven/bookery
|
app/controllers/authors_controller.rb
|
<gh_stars>0
class AuthorsController < ApplicationController
before_action :set_author, only: [:edit, :update, :destroy]
def index
@authors = Author.all
end
def show
@author = Author.find(params[:id])
render json: @author, include: 'books.comments'
end
def new
end
def create
end
def edit
end
def update
end
def destroy
end
private
def author_params
params.require(:author).permit(:first_name, :middle_name, :last_name)
end
def set_author
@author = Author.find(params[:id])
end
end
|
dropheaven/bookery
|
app/serializers/book_serializer.rb
|
class BookSerializer < ActiveModel::Serializer
attributes :id, :title, :release_year, :author_name, :genre_name
belongs_to :author, serializer: BookAuthorSerializer
belongs_to :genre, serializer: BookGenreSerializer
has_many :comments, serializer: BookCommentSerializer
end
|
dropheaven/bookery
|
app/helpers/authors_helper.rb
|
<reponame>dropheaven/bookery<filename>app/helpers/authors_helper.rb
module AuthorsHelper
def titleize(author)
author.full_name.titleize
end
end
|
dropheaven/bookery
|
app/models/comment.rb
|
class Comment < ApplicationRecord
belongs_to :user
belongs_to :book
validates :content, presence: true
def username
User.find(user_id).username
end
def posted_at
created_at.strftime("%A, %b %e, at %l:%M %p")
end
end
|
dropheaven/bookery
|
app/serializers/author_serializer.rb
|
class AuthorSerializer < ActiveModel::Serializer
attributes :id, :full_name, :bio
has_many :books
end
|
dropheaven/bookery
|
app/controllers/comments_controller.rb
|
class CommentsController < ApplicationController
before_action :sign_in_to_access, only: [:create]
def create
book = Book.find(params[:comment][:book_id])
comment = book.comments.build(content: params[:comment][:content], user: current_user)
if comment.save
render json: book, status: 201
else
redirect_to author_book_path(book.author, book)
end
end
private
def comment_params
params.require(:comment).permit(:content, :id)
end
end
|
dropheaven/bookery
|
db/seeds.rb
|
<gh_stars>0
################################## AUTHORS #########################################
nk_jemisin = Author.create(full_name: "<NAME>", bio: "<NAME> is an American science fiction and fantasy writer and a psychologist. Her fiction explores a wide variety of themes, including cultural conflict and oppression. She has won several awards for her work, including the Locus Award.")
yuval = Author.create(full_name: "<NAME>", bio: "<NAME> is an Israeli historian and a tenured professor in the Department of History at the Hebrew University of Jerusalem. He is the author of the international bestsellers Sapiens: A Brief History of Humankind, Homo Deus: A Brief History of Tomorrow, and 21 Lessons for the 21st Century")
james_clear = Author.create(full_name: "<NAME>")
patrick_rothfuss = Author.create(full_name: "<NAME>", bio: "<NAME> is an American writer of epic fantasy. He is best known for his series The Kingkiller Chronicle, which won him several awards, including the 2007 Quill Award for his debut novel, The Name of the Wind. Its sequel, The Wise Man's Fear, topped The New York Times Best Seller list")
dan_brown = Author.create(full_name: "<NAME>", bio: "<NAME> is an American author most well known for his thriller novels, including the Robert Langdon stories, Angels & Demons, The Da Vinci Code, The Lost Symbol, Inferno and Origin.")
################################## GENRES #########################################
non_fiction = Genre.create(name: "Non fiction")
self_dev = Genre.create(name: "Self Development")
fantasy = Genre.create(name: "Fantasy")
thriller = Genre.create(name: "Thriller")
################################## BOOKS #########################################
nk_jemisin.books.create(title: "The Killing Moon", release_year: 2012, genre: fantasy)
nk_jemisin.books.create(title: "The Fifth Season", release_year: 2015, genre: fantasy)
nk_jemisin.books.create(title: "The Obelisk Gate", release_year: 2016, genre: fantasy)
nk_jemisin.books.create(title: "The Stone Sky", release_year: 2017, genre: fantasy)
yuval.books.create(title: "Sapiens", release_year: 2011, genre: non_fiction)
yuval.books.create(title: "Homo Deus", release_year: 2015, genre: non_fiction)
yuval.books.create(title: "21 Lessons for the 21st Century", release_year: 2018, genre: non_fiction)
james_clear.books.create(title: "Atomic Habits", release_year: 2018, genre: self_dev)
patrick_rothfuss.books.create(title: "The Name of the Wind", release_year: 2007, genre: fantasy)
patrick_rothfuss.books.create(title: "The Wise Man's Fear", release_year: 2011, genre: fantasy)
patrick_rothfuss.books.create(title: "The Slow Regard of Silent Things", release_year: 2014, genre: fantasy)
dan_brown.books.create(title: "Origin", release_year: 2017, genre: thriller)
dan_brown.books.create(title: "The Da Vinci Code", release_year: 2003, genre: thriller)
dan_brown.books.create(title: "Angels & Demons", release_year: 2000, genre: thriller)
dan_brown.books.create(title: "The Lost Symbol", release_year: 2009, genre: thriller)
################################## USERS #########################################
saad = User.create(username: "saad", email: "<EMAIL>", password: "password")
maha = User.create(username: "maha", email: "<EMAIL>", password: "password")
james = User.create(username: "james", email: "<EMAIL>", password: "password")
lisa = User.create(username: "lisa", email: "<EMAIL>", password: "password")
zahran = User.create(username: "zahran", email: "<EMAIL>", password: "password")
ayaan = User.create(username: "ayaan", email: "<EMAIL>", password: "password")
anaya = User.create(username: "anaya", email: "<EMAIL>", password: "password")
################################## COMMENTS #########################################
Comment.create(user_id: 1, book_id: 1, content: "loved this!")
Comment.create(user_id: 2, book_id: 1, content: "want to read!")
Comment.create(user_id: 3, book_id: 1, content: "fav")
Comment.create(user_id: 4, book_id: 1, content: "need to buy this")
Comment.create(user_id: 4, book_id: 2, content: "not a fan")
Comment.create(user_id: 5, book_id: 3, content: "highly recommend!")
Comment.create(user_id: 5, book_id: 4, content: "love her work")
Comment.create(user_id: 6, book_id: 5, content: "a must read!")
Comment.create(user_id: 6, book_id: 6, content: ":O")
Comment.create(user_id: 4, book_id: 8, content: "so helpful")
Comment.create(user_id: 5, book_id: 8, content: "yes^^")
Comment.create(user_id: 1, book_id: 12, content: "love dan brown")
Comment.create(user_id: 5, book_id: 13, content: "woowwo")
Comment.create(user_id: 6, book_id: 14, content: "will share with friends")
|
dropheaven/bookery
|
app/models/book.rb
|
class Book < ApplicationRecord
belongs_to :author
belongs_to :genre
has_many :comments
has_many :users, -> { distinct }, through: :comments
before_validation :make_title_case
validates :title, presence: true, uniqueness: { scope: :author, message: "already exists" }
scope :latest, -> { order("created_at desc").limit(1) }
scope :alpha, -> { order("title asc") }
def genre_name=(name)
genre = Genre.find_or_create_by(name: name.downcase)
self.genre = genre
end
def genre_name
self.genre ? self.genre.name : nil
end
def author_name=(full_name)
author = Author.find_or_create_by(full_name: full_name)
self.author = author
end
def author_name
self.author ? self.author.full_name : nil
end
private
def make_title_case
# downcase first to avoid cases like "whaT IS this".titlecase => "Wha T Is This"
self.title = self.title.downcase.titlecase
end
end
|
dropheaven/bookery
|
config/routes.rb
|
Rails.application.routes.draw do
root 'books#index'
resources :books, only: [:new, :create, :update, :destroy]
resources :authors, only: [:index, :show] do
resources :books, only: [:index, :show, :new, :edit]
end
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
get '/auth/github/callback', to: 'sessions#create'
resources :users
resources :comments, only: :create
resources :genres, only: :show
end
|
dropheaven/bookery
|
db/migrate/20190304011052_remove_name_from_authors.rb
|
class RemoveNameFromAuthors < ActiveRecord::Migration[5.2]
def change
remove_column :authors, :first_name, :string
remove_column :authors, :middle_name, :string
remove_column :authors, :last_name, :string
add_column :authors, :full_name, :string
end
end
|
dropheaven/bookery
|
app/models/genre.rb
|
class Genre < ApplicationRecord
has_many :books
has_many :authors, -> { distinct }, through: :books
before_validation :make_downcase
validates :name, presence: true, uniqueness: true
private
def make_downcase
name.downcase!
end
end
|
Milstein/spinone
|
app/models/concerns/authorable.rb
|
module Authorable
extend ActiveSupport::Concern
require "namae"
included do
def get_one_author(author)
name = cleanup_author(author)
if is_personal_name?(name)
names = Namae.parse(name)
parsed_name = names.first
if parsed_name.present?
given_name = parsed_name.given
family_name = parsed_name.family
else
given_name = nil
family_name = nil
end
if (given_name.present? || family_name.present?)
{ "given" => given_name,
"family" => family_name }.compact
else
{ "name" => name }.compact
end
else
{ "literal" => name }.compact
end
end
def cleanup_author(author)
# detect pattern "<NAME>.", but not "<NAME>."
author = author.gsub(/[[:space:]]([A-Z]\.)?(-?[A-Z]\.)$/, ', \1\2') unless author.include?(",")
# remove spaces around hyphens
author = author.gsub(" - ", "-")
# titleize strings
# remove non-standard space characters
author.my_titleize.gsub(/[[:space:]]/, ' ')
end
def is_personal_name?(author)
author.include?(",") && author.exclude?(";")
end
# parse array of author strings into CSL format
def get_authors(authors)
Array.wrap(authors).map { |author| get_one_author(author) }
end
end
end
|
Milstein/spinone
|
app/controllers/milestones_controller.rb
|
<reponame>Milstein/spinone<filename>app/controllers/milestones_controller.rb
class MilestonesController < ApplicationController
def index
@milestones = Milestone.where(params.merge(github_token: ENV['GITHUB_PERSONAL_ACCESS_TOKEN']))
options = {}
options[:meta] = @milestones[:meta]
@milestones = @milestones[:data]
render json: MilestoneSerializer.new(@milestones, options).serialized_json, status: :ok
end
def show
@milestone = Milestone.where({ id: params[:id] }.merge(github_token: ENV['GITHUB_PERSONAL_ACCESS_TOKEN']))
fail AbstractController::ActionNotFound unless @milestone.present?
@milestone = @milestone[:data]
render json: MilestoneSerializer.new(@milestone).serialized_json, status: :ok
end
end
|
Milstein/spinone
|
app/serializers/user_story_serializer.rb
|
<reponame>Milstein/spinone<filename>app/serializers/user_story_serializer.rb
class UserStorySerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
set_type "user-stories"
attributes :id, :url, :title, :description, :comments, :projects, :stakeholders, :state, :inactive, :milestone, :created, :updated, :closed
end
|
Milstein/spinone
|
app/controllers/datasets_controller.rb
|
class DatasetsController < ApplicationController
before_action :set_include
def set_include
if params[:include].present?
@include = params[:include].split(",").map { |i| i.downcase.underscore }.join(",")
@include = [@include]
else
@include = nil
end
end
def index
@works = Work.where(params)
options = {}
options[:meta] = @works[:meta]
options[:include] = @include
@works = @works[:data]
render json: DatasetSerializer.new(@works, options).serialized_json, status: :ok
end
def show
@work = Work.where(id: params[:id])
fail AbstractController::ActionNotFound unless @work.present?
options = {}
options[:include] = @include
@work = @work[:data]
render json: DatasetSerializer.new(@work, options).serialized_json, status: :ok
end
end
|
Milstein/spinone
|
vendor/middleman/config.ru
|
require 'middleman-core/load_paths'
::Middleman.setup_load_paths
require 'middleman-core'
require 'middleman-core/rack'
require 'fileutils'
app = ::Middleman::Application.new
run ::Middleman::Rack.new(app).to_app
|
Milstein/spinone
|
spec/models/user_story_spec.rb
|
<filename>spec/models/user_story_spec.rb
require 'rails_helper'
describe UserStory, type: :model, vcr: true do
it "get_total" do
total = UserStory.get_total
expect(total).to eq(51)
end
it "user_stories" do
user_stories = UserStory.all[:data]
expect(user_stories.size).to eq(25)
user_story = user_stories.first
expect(user_story.title).to eq("Add labels for prefixes")
end
it "user_story" do
user_story = UserStory.where(id: "59")[:data]
expect(user_story.title).to eq("Automatic DOI suffix generation")
expect(user_story.description).to start_with("<p>As a data center, I want the option")
expect(user_story.milestone).to eq("Develop new DOI Fabrica service")
expect(user_story.projects).to eq(["DOI Fabrica"])
expect(user_story.stakeholders).to eq(["data center"])
expect(user_story.state).to eq("done")
end
end
|
Milstein/spinone
|
app/controllers/index_controller.rb
|
<filename>app/controllers/index_controller.rb
class IndexController < ApplicationController
def index
meta = { meta: { name: ENV['SITE_TITLE'] }}.to_json
render json: meta
end
def routing_error
fail AbstractController::ActionNotFound
end
end
|
Milstein/spinone
|
spec/rails_helper.rb
|
# set ENV variables for testing
ENV["RAILS_ENV"] = "test"
# set up Code Climate
require 'simplecov'
SimpleCov.start
require File.expand_path('../../config/environment', __FILE__)
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# only load rspec modules that don't use ActiveRecord
require 'rspec/rails/view_rendering'
require 'rspec/rails/matchers'
require 'rspec/rails/file_fixture_support'
require 'rspec/rails/fixture_file_upload_support'
require "shoulda-matchers"
require "webmock/rspec"
require "rack/test"
require "colorize"
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
WebMock.disable_net_connect!(
allow: ['codeclimate.com:443', ENV['PRIVATE_IP'], ENV['HOSTNAME']],
allow_localhost: true
)
VCR.configure do |c|
c.cassette_library_dir = "spec/fixtures/vcr_cassettes"
c.hook_into :webmock
c.ignore_localhost = true
c.ignore_hosts "codeclimate.com"
c.filter_sensitive_data("<GITHUB_PERSONAL_ACCESS_TOKEN>") { ENV["GITHUB_PERSONAL_ACCESS_TOKEN"] }
c.configure_rspec_metadata!
end
RSpec.configure do |config|
# config.include WebMock::API
config.include Rack::Test::Methods, :type => :request
# add custom json method
config.include RequestSpecHelper, type: :request
def app
Rails.application
end
end
|
Milstein/spinone
|
lib/custom_error.rb
|
module CustomError
# source is either inactive or disabled
class AgentInactiveError < StandardError; end
# more requests than rate-limits allow
class TooManyRequestsError < StandardError; end
# we have received too many errors (and will disable the source)
class TooManyErrorsBySourceError < StandardError; end
# we don't have enough available workers for this source
class NotEnoughWorkersError < StandardError; end
# something went wrong with Active Job
class ActiveJobError < StandardError; end
# Default filter error
class ApiResponseError < StandardError; end
end
|
Milstein/spinone
|
app/serializers/milestone_serializer.rb
|
<filename>app/serializers/milestone_serializer.rb
class MilestoneSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
attributes :id, :url, :title, :description, :open_issues, :closed_issues, :year, :quarter, :created, :updated, :closed, :released
end
|
Milstein/spinone
|
app/models/user_story.rb
|
class UserStory < Base
attr_reader :id, :url, :title, :description, :state, :milestone, :comments, :projects, :stakeholders, :inactive, :created, :updated, :closed
LABEL_COLORS = {
"project" => "006b75",
"stakeholder" => "f9cfb9",
"state" => "ededed",
"inactive" => "c8d1da"
}
def initialize(attributes, options={})
@id = attributes.fetch("number", nil)
@title = attributes.fetch("title", nil)
@description = GitHub::Markdown.render_gfm(attributes.fetch("body", nil))
state = attributes.fetch("state", nil) == "closed" ? "done" : "inbox"
@comments = attributes.fetch("comments", nil)
labels = Array.wrap(attributes.fetch("labels", nil))
.select { |l| l["name"] != "user story" }
@projects = labels
.select { |l| l["color"] == LABEL_COLORS["project"] }
.map { |l| l["name"] }
@stakeholders = labels
.select { |l| l["color"] == LABEL_COLORS["stakeholder"] }
.map { |l| l["name"] }
@state = labels
.select { |l| l["color"] == LABEL_COLORS["state"] }
.map { |l| l["name"] }.first || state
@inactive = labels
.select { |l| l["color"] == LABEL_COLORS["inactive"] }
.map { |l| l["name"] }
@milestone = attributes.dig("milestone", "title")
@created = attributes.fetch("created_at", nil)
@updated = attributes.fetch("updated_at", nil)
@closed = attributes.fetch("closed_at", nil)
end
def self.get_query_url(options={})
if options[:id].present?
"#{ENV["GITHUB_MILESTONES_URL"]}/issues/#{options[:id]}?github_token=#{ENV['GITHUB_PERSONAL_ACCESS_TOKEN']}"
else
label = ["user story", options[:project], options[:stakeholder]].compact
.map { |l| "label:\"#{l}\"" }.join(" ")
milestone = [options[:milestone]].compact
.map { |m| "milestone:\"#{m}\"" }.first
if options[:state] == "done"
state = "state:closed"
elsif options[:state] == "open"
state = "state:open"
elsif options[:state] == "inbox"
state = "state:open -label:discussion -label:planning -label:ready -label:\"in progress\" -label:\"needs review\""
else
state = [options[:state]].compact
.map { |m| "label:\"#{m}\"" }.first
end
params = { q: ["repo:datacite/datacite", label, milestone, state, options[:query]].compact.join(" "),
page: options[:page] || 1,
per_page: options[:per_page] || 100,
sort: "created",
order: "desc",
github_token: ENV['GITHUB_PERSONAL_ACCESS_TOKEN'] }.compact
url + "?" + URI.encode_www_form(params)
end
end
def self.get_total(options={})
query_url = get_query_url(options.merge(per_page: 0))
result = Maremma.get(query_url, options)
result.body.dig("data", "total_count").to_i
end
def self.get_data(options={})
if options[:id].present?
query_url = get_query_url(options)
Maremma.get(query_url, options)
else
total = get_total(options)
data = []
if total > 0
# walk through paginated results
total_pages = (total.to_f / 100).ceil
(1..total_pages).each do |page|
options[:page][:number] = page
query_url = get_query_url(options)
result = Maremma.get(query_url, options)
data += (result.body.dig("data", "items") || [])
end
end
{ data: data, total: total }
end
end
def self.parse_data(result, options={})
return nil if result.blank? || result['errors']
if options[:id].present?
item = result.body.fetch("data", {})
return {} unless item.present?
{ data: parse_item(item) }
else
data = parse_items(result.fetch(:data, []))
meta = { total: result[:total],
projects: parse_meta(data, "projects"),
milestones: parse_meta(data, "milestone"),
stakeholders: parse_meta(data, "stakeholders"),
state: parse_meta(data, "state") }
page = (options.dig(:page, :number) || 1).to_i
per_page = (options.dig(:page, :size) || 25).to_i
offset = (page - 1) * per_page
data = data[offset...offset + per_page] || []
{ data: data, meta: meta }
end
end
def self.parse_meta(items, label)
it = items.reduce({}) do |sum, i|
Array.wrap(i.send(label)).each { |tag| sum[tag] = sum[tag].to_i + 1 }
sum
end
if label == "state"
{ "inbox" => it["inbox"].to_i,
"discussion" => it["discussion"].to_i,
"planning" => it["planning"].to_i,
"ready" => it["ready"].to_i,
"needs review" => it["needs review"].to_i,
"done" => it["done"].to_i }.select { |k,v| v > 0 }
else
it.sort_by {|_key, value| -value}[0..10].to_h
end
end
def url
"#{ENV["GITHUB_ISSUES_REPO_URL"]}/issues/#{id}"
end
def self.url
"https://api.github.com/search/issues"
end
def is_closed?
state == "closed"
end
end
|
Milstein/spinone
|
spec/concerns/authorable_spec.rb
|
require 'rails_helper'
describe Work do
describe "get_one_author" do
it 'should handle author in comma-delimited format' do
author = "<NAME>."
result = subject.get_one_author(author)
expect(result).to eq("given"=>"<NAME>.", "family"=>"Zaranek")
end
it 'should handle authors with incomplete names' do
author = "Zaranek"
result = subject.get_one_author(author)
expect(result).to eq("literal"=>"Zaranek")
end
it 'should ignore names that are not comma-delimited' do
author = "Zaranek University"
result = subject.get_one_author(author)
expect(result).to eq("literal"=>"Zaranek University")
end
end
describe "cleanup_author" do
it 'should titleize names' do
author = "<NAME>"
result = subject.cleanup_author(author)
expect(result).to eq("<NAME>")
end
it 'should handle hyphens in names' do
author = "<NAME>. ball-regiers"
result = subject.cleanup_author(author)
expect(result).to eq("<NAME>")
end
it 'should handle camel case in names' do
author = "<NAME>"
result = subject.cleanup_author(author)
expect(result).to eq("<NAME>")
end
it 'should detect name initials' do
author = "Ball M.P."
result = subject.cleanup_author(author)
expect(result).to eq("Ball, M.P.")
end
it 'should detect name initials with hypen' do
author = "Ball M.-P."
result = subject.cleanup_author(author)
expect(result).to eq("Ball, M.-P.")
end
it 'should handle special whitespace characters in names' do
author = "Pampel, Heinz"
result = subject.cleanup_author(author)
expect(result).to eq("<NAME>")
end
end
describe "is_personal_name?" do
it 'should detect personal name' do
author = "Zaranek, <NAME>."
result = subject.is_personal_name?(author)
expect(result).to be true
end
it 'should detect institution name' do
author = "Project THOR"
result = subject.is_personal_name?(author)
expect(result).to be false
end
end
end
|
Milstein/spinone
|
app/models/data_center.rb
|
<gh_stars>1-10
class DataCenter < Base
attr_reader :id, :name, :prefixes, :member_id, :ids, :year, :created, :updated, :member, :cache_key
# include helper module for caching infrequently changing resources
include Cacheable
def initialize(item, options={})
attributes = item.fetch('attributes', {})
@id = item.fetch("id", nil).downcase
@name = attributes.fetch("name", nil)
@year = attributes.fetch("year", nil)
@created = attributes.fetch("created", nil)
@updated = attributes.fetch("updated", nil)
@prefixes = attributes.fetch("prefixes", [])
@member_id = @id.split('.', 2).first
@member_id = @member_id.downcase if @member_id.present?
@cache_key = "data-center/#{@id}-#{@updated}"
end
alias_attribute :title, :name
# associations
def member
cached_member_response(member_id.to_s.upcase)
end
def self.get_query_url(options={})
if options[:id].present?
"#{url}/#{options[:id]}"
else
params = { query: options.fetch(:query, nil),
ids: options.fetch(:ids, nil),
year: options.fetch(:year, nil),
"provider-id": options.fetch(:member_id, nil),
"page[size]" => options.dig(:page, :size),
"page[number]" => options.dig(:page, :number),
include: "provider" }.compact
url + "?" + URI.encode_www_form(params)
end
end
def self.parse_data(result, options={})
return nil if result.body.blank? || result.body['errors']
if options[:id].present?
item = result.body.fetch("data", {})
return nil unless item.present?
{ data: parse_item(item) }
else
items = result.body.fetch("data", [])
meta = result.body.fetch("meta", {})
{ data: parse_items(items), meta: meta }
end
end
def self.url
"#{ENV["API_URL"]}/clients"
end
end
|
Milstein/spinone
|
app/serializers/dataset_serializer.rb
|
<gh_stars>1-10
class DatasetSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
attributes :identifiers, :title, :types, :creators, :dates, :container_title, :description
set_type :dats
end
|
Milstein/spinone
|
app/serializers/resource_type_serializer.rb
|
class ResourceTypeSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
set_type "resource-types"
cache_options enabled: true, cache_length: 1.month
attributes :title, :updated
end
|
Milstein/spinone
|
app/models/work.rb
|
<reponame>Milstein/spinone
class Work < Base
attr_reader :id, :doi, :identifier, :cache_key, :url, :author, :title, :container_title, :description, :resource_type_subtype, :data_center_id, :member_id, :resource_type_id, :data_center, :member, :resource_type, :license, :version, :results, :related_identifiers, :schema_version, :xml, :media, :checked, :published, :registered, :updated
# include author methods
include Authorable
# include helper module for extracting identifier
include Identifiable
# include metadata helper methods
include Metadatable
# include helper module for caching infrequently changing resources
include Cacheable
# include helper module for date calculations
include Dateable
def initialize(attributes={}, options={})
@doi = attributes.fetch("doi", "").downcase.presence
@identifier = attributes.fetch("id", nil).presence || doi_as_url(attributes.fetch("doi", nil))
@id = @identifier
@xml = attributes.fetch('xml', "PGhzaD48L2hzaD4=\n")
@media = attributes.fetch('media', nil)
@media = @media.map { |m| { media_type: m.split(":", 2).first, url: m.split(":", 2).last }} if @media.present?
@author = get_authors(attributes.fetch("creator", nil))
@url = attributes.fetch("url", nil)
@title = ActionController::Base.helpers.sanitize(attributes.fetch("title", []).first, tags: %w(strong em b i code pre sub sup br))
@container_title = attributes.fetch("publisher", nil)
@description = ActionController::Base.helpers.sanitize(attributes.fetch("description", []).first, tags: %w(strong em b i code pre sub sup br)).presence || nil
@published = attributes.fetch("publicationYear", nil)
@registered = attributes.fetch("minted", nil)
@updated = attributes.fetch("updated", nil)
@checked = attributes.fetch("checked", nil)
@resource_type_subtype = attributes.fetch("resourceType", nil).presence || nil
@license = normalize_license(attributes.fetch("rightsURI", []))
@version = attributes.fetch("version", nil)
@schema_version = attributes.fetch("schema_version", nil)
@related_identifiers = attributes.fetch('relatedIdentifier', [])
.select { |id| id =~ /:DOI:.+/ }
.map do |i|
relation_type, _related_identifier_type, related_identifier = i.split(':', 3)
{ "relation-type-id" => relation_type,
"related-identifier" => doi_as_url(related_identifier.upcase) }
end
@results = @related_identifiers.reduce({}) do |sum, i|
k = i["relation-type-id"]
v = sum[k].to_i + 1
sum[k] = v
sum
end.map { |k,v| { id: k, title: k.underscore.humanize, count: v } }
.sort { |a, b| b[:count] <=> a[:count] }
@data_center_id = attributes.fetch("datacentre_symbol", nil)
@data_center_id = @data_center_id.downcase if @data_center_id.present?
@member_id = attributes.fetch("allocator_symbol", nil)
@member_id = @member_id.downcase if @member_id.present?
@resource_type_id = attributes.fetch("resourceTypeGeneral", nil)
@resource_type_id = @resource_type_id.underscore.dasherize if @resource_type_id.present?
@cache_key = "work/#{@id}-#{@updated}"
end
# associations
def data_center
cached_data_center_response(data_center_id.to_s.upcase) if data_center_id.present?
end
def member
cached_member_response(member_id.to_s.upcase) if member_id.present?
end
def resource_type
cached_resource_type_response(resource_type_id) if resource_type_id.present?
end
def identifiers
[{ "identifier" => "doi:#{doi}",
"identifier-source" => "DataCite" }]
end
def types
[{ "information" => { "value" => resource_type } }]
end
def creators
author.map { |a| { "first-name" => a["given"], "last-name" => a["family"] } }
end
def dates
[{ "date" => published,
"type" => { "ontologyTermIRI" => "http://schema.datacite.org/meta/kernel-3.1/metadata.xsd", "value" => "publicationYear" }
},
{ "date" => registered,
"type" => { "ontologyTermIRI" => "http://schema.datacite.org/meta/kernel-3.1/metadata.xsd", "value" => "Issued" }
},
{ "date" => updated,
"type" => { "ontologyTermIRI" => "http://schema.datacite.org/meta/kernel-3.1/metadata.xsd", "value" => "Updated" }
}]
end
def self.get_query_url(options={})
if options[:id].present?
params = { q: "doi:#{options[:id]}",
wt: "json" }
elsif options[:work_id].present?
params = { q: "relatedIdentifier:#{options[:work_id]}",
fl: "doi,relatedIdentifier",
wt: "json" }
else
if options[:ids].present?
ids = options[:ids].split(",")[0..99]
options[:query] = options[:query].to_s + " " + ids.join(" ")
options[:rows] = ids.length
options[:sort] = "registered"
options[:mm] = 1
end
if options[:sample].present?
sort = Rails.env.test? ? "random_1234" : "random_#{rand(1...100000)}"
elsif options[:sort].present?
sort = case options[:sort]
when "registered" then "minted"
when "published" then "publicationYear"
when "updated" then "updated"
else "score"
end
else
sort = options[:query].present? ? "score" : "minted"
end
order = options[:order] == "asc" ? "asc" : "desc"
# grouping for sampling
group = nil
group_field = nil
group_ngroups = nil
group_format = nil
group_limit = nil
if options[:sample].present? && options[:sample_group].present?
group_field = case options[:sample_group]
when "client" then "datacentre_symbol"
when "data-center" then "datacentre_symbol"
when "provider" then "allocator_symbol"
when "resource-type" then "resourceTypeGeneral"
else nil
end
if group_field.present?
group = "true"
group_ngroups = "true"
group_format = "simple"
group_limit = (1..100).include?(options[:sample].to_i) ? options[:sample].to_i : 10
else
options.delete(:sample_group)
end
end
page = (options.dig(:page, :number) || 1).to_i
if options[:sample].present? && options[:sample_group].present?
samples_per_page = (1..100).include?(options[:sample].to_i) ? options[:sample].to_i : 10
per_page = options.dig(:page, :size).to_i * samples_per_page
per_page = (1..1000).include?(per_page) ? per_page : 1000
elsif options[:sample].present? && options[:sample_group].blank?
per_page = (1..100).include?(options[:sample].to_i) ? options[:sample].to_i : 10
else
per_page = options.dig(:page, :size) && (1..1000).include?(options.dig(:page, :size).to_i) ? options.dig(:page, :size).to_i : 25
end
offset = (page - 1) * per_page
created_date = options[:from_created_date].present? || options[:until_created_date].present?
created_date = get_solr_date_range(options[:from_created_date], options[:until_created_date]) if created_date
update_date = options[:from_update_date].present? || options[:until_update_date].present?
update_date = get_solr_date_range(options[:from_update_date], options[:until_update_date]) if update_date
registered = get_solr_date_range(options[:registered], options[:registered]) if options[:registered].present?
checked = "(checked:[* TO #{get_datetime_from_input(options[:checked])}] OR (*:* NOT checked:[* TO *]))" if options[:checked].present?
fq = %w(has_metadata:true is_active:true)
fq << "resourceTypeGeneral:#{options[:resource_type_id].underscore.camelize}" if options[:resource_type_id].present?
fq << "datacentre_symbol:#{options[:data_center_id].upcase}" if options[:data_center_id].present?
fq << "allocator_symbol:#{options[:member_id].upcase}" if options[:member_id].present?
fq << "nameIdentifier:ORCID\\:#{options[:person_id]}" if options[:person_id].present?
fq << "minted:#{created_date}" if created_date
fq << "updated:#{update_date}" if update_date
fq << "checked:#{checked}" if checked
fq << "minted:#{registered}" if registered
fq << "publicationYear:#{options[:year]}" if options[:year].present?
fq << "schema_version:#{options[:schema_version]}" if options[:schema_version].present?
if options[:url].present?
q = "url:#{options[:url]}"
elsif options[:query].present?
q = options[:query]
else
q = "*:*"
end
params = { q: q,
start: offset,
rows: per_page,
fl: "doi,url,title,creator,description,publisher,publicationYear,resourceType,resourceTypeGeneral,rightsURI,version,datacentre_symbol,allocator_symbol,schema_version,xml,media,minted,updated,checked",
fq: fq.join(" AND "),
facet: "true",
'facet.field' => %w(publicationYear datacentre_facet resourceType_facet schema_version minted),
'facet.limit' => 15,
'facet.mincount' => 1,
'facet.range' => 'minted',
'f.minted.facet.range.start' => '2004-01-01T00:00:00Z',
'f.minted.facet.range.end' => '2024-01-01T00:00:00Z',
'f.minted.facet.range.gap' => '+1YEAR',
group: group,
'group.field' => group_field,
'group.ngroups' => group_ngroups,
'group.format' => group_format,
'group.limit' => group_limit,
sort: "#{sort} #{order}",
defType: "edismax",
bq: "updated:[NOW/DAY-1YEAR TO NOW/DAY]",
mm: options[:mm],
wt: "json" }.compact
end
url + "?" + URI.encode_www_form(params)
end
def self.get_data(options={})
# sometimes don't query DataCite MDS
return {} if (options[:data_center_id].present? && options[:data_center_id].exclude?("."))
query_url = get_query_url(options)
Maremma.get(query_url, options)
end
def self.parse_data(result, options={})
return result if result['errors']
data = nil
if options[:id].present?
return nil if result.body.blank?
items = result.body.fetch("data", {}).fetch('response', {}).fetch('docs', [])
return nil if items.blank?
item = items.first
meta = result[:meta]
resource_type = nil
resource_type_id = item.fetch("resourceTypeGeneral", nil)
resource_type = ResourceType.where(id: resource_type_id.downcase.underscore.dasherize) if resource_type_id.present?
resource_type = resource_type[:data] if resource_type.present?
data_center = nil
data_center_id = item.fetch("datacentre_symbol", nil)
data_center = DataCenter.where(id: data_center_id.downcase) if data_center_id.present?
data_center = data_center[:data] if data_center.present?
data = parse_item(item)
{ data: data, meta: meta }
else
if options[:work_id].present?
return { data: [], meta: [] } if result.body.blank?
items = result.body.fetch("data", {}).fetch('response', {}).fetch('docs', [])
return { data: [], meta: [] } if items.blank?
item = items.first
related_doi_identifiers = item.fetch('relatedIdentifier', [])
.select { |id| id =~ /:DOI:.+/ }
.map { |i| i.split(':', 3).last.strip.upcase }
return { data: [], meta: [] } if related_doi_identifiers.blank?
options = options.except(:work_id)
query_url = get_query_url(options.merge(ids: related_doi_identifiers.join(",")))
result = Maremma.get(query_url, options)
end
# check for grouped samples
if result.body.dig("data", "grouped").present?
grouped = result.body.dig("data", "grouped")
items = grouped.values[0].dig("doclist", "docs") || []
total = grouped.values[0].fetch("ngroups", 0)
else
response = result.body.dig("data", "response")
items = response.fetch('docs', [])
total = response.fetch("numFound", 0)
end
facets = result.body.fetch("data", {}).fetch("facet_counts", {})
page = (options.dig(:page, :number) || 1).to_i
if options[:sample].present? && options[:sample_group].present?
samples_per_page = (1..100).include?(options[:sample].to_i) ? options[:sample].to_i : 10
per_page = options.dig(:page, :size).to_i * samples_per_page
per_page = (1..1000).include?(per_page) ? per_page : 1000
elsif options[:sample].present? && options[:sample_group].blank?
per_page = (1..100).include?(options[:sample].to_i) ? options[:sample].to_i : 10
end
if options.dig(:page, :size).present?
per_page = [options.dig(:page, :size).to_i, 1000].min
max_number = per_page > 0 ? 10000/per_page : 1
else
per_page = 25
max_number = 10000/per_page
end
page = page.to_i > 0 ? [page.to_i, max_number].min : 1
offset = (page - 1) * per_page
total_pages = (total.to_f / per_page).ceil
meta = parse_facet_counts(facets, options)
meta = meta.merge(total: total, "total-pages" => total_pages, page: page)
data_centers = facets.fetch("facet_fields", {}).fetch("datacentre_facet", [])
.each_slice(2)
.map do |p|
id, title = p.first.split(' - ', 2)
[DataCenter, { "id" => id, "title" => title }]
end
data_centers = Array(data_centers).map do |item|
parse_include(item.first, item.last)
end
data = parse_items(items)
{ data: data, meta: meta }
end
end
def self.parse_facet_counts(facets, options={})
resource_types = Array.wrap(facets.dig("facet_fields", "resourceType_facet"))
.each_slice(2)
.map { |k,v| { id: k.underscore.dasherize, title: k.underscore.humanize, count: v } }
registered = Array.wrap(facets.dig("facet_ranges", "minted", "counts"))
.each_slice(2)
.sort { |a, b| b.first <=> a.first }
.map { |i| { id: i[0][0..3], title: i[0][0..3], count: i[1] } }
data_centers = Array.wrap(facets.dig("facet_fields", "datacentre_facet"))
.each_slice(2)
.map do |p|
id, title = p.first.split(' - ', 2)
[id, p.last]
end.to_h
data_centers = get_data_center_facets(data_centers)
schema_versions = facets.fetch("facet_fields", {}).fetch("schema_version", [])
.each_slice(2)
.sort { |a, b| b.first <=> a.first }
.map { |i| { id: i[0], title: "Schema #{i[0]}", count: i[1] } }
if options[:data_center_id].present? && data_centers.empty?
dc = DataCenter.where(id: options[:data_center_id])
return [] unless dc[:data].present?
data_centers = [{ "id" => options[:data_center_id].upcase,
"title" => dc[:data].name,
"count" => 0 }]
end
{ "resource-types" => resource_types,
"registered" => registered,
"data-centers" => data_centers,
"schema-versions" => schema_versions }
end
def self.get_data_center_facets(data_centers, options={})
return [] unless data_centers.present?
response = DataCenter.where(ids: data_centers.keys.join(","))
response.fetch(:data, [])
.map { |p| { id: p.id.downcase, title: p.name, count: data_centers.fetch(p.id.upcase, 0) } }
.sort { |a, b| b[:count] <=> a[:count] }
end
def self.url
"#{ENV["SOLR_URL"]}"
end
# find Creative Commons or OSI license in rightsURI array
def normalize_license(licenses)
uri = licenses.map { |l| URI.parse(l) }.find { |l| l.host && l.host[/(creativecommons.org|opensource.org)$/] }
return nil unless uri.present?
# use HTTPS
uri.scheme = "https"
# use host name without subdomain
uri.host = Array(/(creativecommons.org|opensource.org)/.match uri.host).last
# normalize URLs
if uri.host == "creativecommons.org"
uri.path = uri.path.split('/')[0..-2].join("/") if uri.path.split('/').last == "legalcode"
uri.path << '/' unless uri.path.end_with?('/')
else
uri.path = uri.path.gsub(/(-license|\.php|\.html)/, '')
uri.path = uri.path.sub(/(mit|afl|apl|osl|gpl|ecl)/) { |match| match.upcase }
uri.path = uri.path.sub(/(artistic|apache)/) { |match| match.titleize }
uri.path = uri.path.sub(/([^0-9\-]+)(-)?([1-9])?(\.)?([0-9])?$/) do
m = Regexp.last_match
text = m[1]
if m[3].present?
version = [m[3], m[5].presence || "0"].join(".")
[text, version].join("-")
else
text
end
end
end
uri.to_s
rescue URI::InvalidURIError
nil
end
end
|
Milstein/spinone
|
app/models/ability.rb
|
<filename>app/models/ability.rb
class Ability
include CanCan::Ability
# To simplify, all admin permissions are linked to the Notification resource
def initialize(user)
user ||= User.new(nil) # Guest user
if user.role == "staff_admin"
can :manage, :all
elsif user.role == "staff_user"
can :read, :all
can [:update, :show], User, :id => user.id
elsif %w(member_admin member_user datacenter_admin datacenter_user user).include?(user.role )
can [:read], User
can [:update, :show], User, :id => user.id
end
end
end
|
Milstein/spinone
|
app/models/concerns/dateable.rb
|
module Dateable
extend ActiveSupport::Concern
module ClassMethods
def get_solr_date_range(from_date, until_date)
from_date_string = get_datetime_from_input(from_date) || "*"
until_date_string = get_datetime_from_input(until_date, until_date: true) || "*"
until_date_string = get_datetime_from_input(from_date, until_date: true) if until_date_string != "*" && until_date_string < from_date_string
"[" + from_date_string + " TO " + until_date_string + "]"
end
def get_datetime_from_input(iso8601_time, options={})
return nil unless iso8601_time.present?
time = get_datetime_from_iso8601(iso8601_time, options)
return nil unless time.present?
time.iso8601
end
# parsing of incomplete iso8601 timestamps such as 2015-04 is broken
# in standard library
# return nil if invalid iso8601 timestamp
def get_datetime_from_iso8601(iso8601_time, options={})
if options[:until_date]
if iso8601_time[8..9].present?
ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_day
elsif iso8601_time[5..6].present?
ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_month
else
ISO8601::DateTime.new(iso8601_time).to_time.utc.at_end_of_year
end
else
ISO8601::DateTime.new(iso8601_time).to_time.utc
end
rescue
nil
end
end
end
|
Milstein/spinone
|
config/application.rb
|
<gh_stars>0
require_relative 'boot'
require "rails"
require "active_model/railtie"
require "action_controller/railtie"
require "rails/test_unit/railtie"
require 'syslog/logger'
require 'securerandom'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
# load ENV variables from .env file if it exists
env_file = File.expand_path("../../.env", __FILE__)
if File.exist?(env_file)
require 'dotenv'
Dotenv.load! env_file
end
# load ENV variables from container environment if json file exists
# see https://github.com/phusion/baseimage-docker#envvar_dumps
env_json_file = "/etc/container_environment.json"
if File.exist?(env_json_file)
env_vars = JSON.parse(File.read(env_json_file))
env_vars.each { |k, v| ENV[k] = v }
end
# default values for some ENV variables
ENV['APPLICATION'] ||= "spinone"
ENV['HOSTNAME'] ||= "api.local"
ENV['MEMCACHE_SERVERS'] ||= "memcached:11211"
ENV['SITE_TITLE'] ||= "DataCite REST API"
ENV['LOG_LEVEL'] ||= "info"
ENV['BRACCO_URL'] ||= "https://doi.test.datacite.org"
ENV['API_URL'] ||= "https://api.test.datacite.org"
ENV['SOLR_URL'] ||= "https://solr.test.datacite.org/api"
ENV['LAGOTTO_URL'] ||= "https://eventdata.test.datacite.org/api"
ENV['VOLPINO_URL'] ||= "https://profiles.test.datacite.org/api"
ENV['BLOG_URL'] ||= "https://blog.test.datacite.org"
ENV['SCHEMA_URL'] ||= "https://schema.test.datacite.org"
ENV['GITHUB_URL'] ||= "https://github.com/datacite/spinone"
ENV['GITHUB_ISSUES_REPO_URL'] ||= "https://github.com/datacite/datacite"
ENV['GITHUB_MILESTONES_URL'] ||= "https://api.github.com/repos/datacite/datacite"
ENV['TRUSTED_IP'] ||= "127.0.0.0/8"
module Spinone
class Application < Rails::Application
config.api_only = true
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += Dir["#{config.root}/app/models/**/**", "#{config.root}/app/controllers/**/"]
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# serve assets via web server
config.public_file_server.enabled = false
# configure caching
config.cache_store = :dalli_store, nil, { :namespace => ENV['APPLICATION'] }
# Configure the default encoding used in templates for Ruby.
config.encoding = "utf-8"
# secret_key_base is not used by Rails API, as there are no sessions
config.secret_key_base = 'blipblapblup'
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :authentication_token, :jwt]
# Skip validation of locale
I18n.enforce_available_locales = false
# Disable IP spoofing check
config.action_dispatch.ip_spoofing_check = false
# compress responses with deflate or gzip
config.middleware.use Rack::Deflater
# parameter keys that are not explicitly permitted will raise error
config.action_controller.action_on_unpermitted_parameters = :raise
end
end
|
Milstein/spinone
|
app/models/base.rb
|
require 'cgi'
class Base
extend ActiveModel::Naming
include ActiveModel::Serialization
DEFAULT_ROWS = 1000
def self.all
collect_data
end
def self.where(options={})
collect_data(options)
end
def self.collect_data(options = {})
options[:page] ||= {}
data = get_data(options)
parse_data(data, options)
end
def self.get_data(options={})
query_url = get_query_url(options)
Maremma.get(query_url, options)
end
def self.parse_item(item, options={})
self.new(item, options)
end
def self.parse_items(items, options={})
Array(items).map do |item|
parse_item(item, options)
end
end
def self.parse_include(klass, params)
klass.new(params)
end
end
|
Milstein/spinone
|
spec/concerns/identifiable_spec.rb
|
require 'rails_helper'
describe Work, type: :model, vcr: true do
context "from url" do
let(:url) { "http://doi.org/10.5061/dryad.8515" }
it "doi_from_url" do
response = subject.doi_from_url(url)
expect(response).to eq("10.5061/DRYAD.8515")
end
it "doi_from_url https" do
url = "https://doi.org/10.5061/dryad.8515"
response = subject.doi_from_url(url)
expect(response).to eq("10.5061/DRYAD.8515")
end
it "doi_from_url dx.doi.org" do
url = "http://dx.doi.org/10.5061/dryad.8515"
response = subject.doi_from_url(url)
expect(response).to eq("10.5061/DRYAD.8515")
end
it "doi_from_url not a doi" do
url = "https://handle.net/10.5061/dryad.8515"
response = subject.doi_from_url(url)
expect(response).to be_nil
end
it "orcid_from_url" do
url = "http://orcid.org/0000-0002-2590-225X"
response = subject.orcid_from_url(url)
expect(response).to eq("0000-0002-2590-225X")
end
it "orcid_from_url https" do
url = "https://orcid.org/0000-0002-2590-225X"
response = subject.orcid_from_url(url)
expect(response).to eq("0000-0002-2590-225X")
end
it "validate_orcid" do
orcid = "http://orcid.org/0000-0002-2590-225X"
response = subject.validate_orcid(orcid)
expect(response).to eq("0000-0002-2590-225X")
end
it "validate_orcid https" do
orcid = "https://orcid.org/0000-0002-2590-225X"
response = subject.validate_orcid(orcid)
expect(response).to eq("0000-0002-2590-225X")
end
it "validate_orcid id" do
orcid = "0000-0002-2590-225X"
response = subject.validate_orcid(orcid)
expect(response).to eq("0000-0002-2590-225X")
end
it "validate_orcid wrong id" do
orcid = "0000 0002 1394 3097"
response = subject.validate_orcid(orcid)
expect(response).to be_nil
end
it "github_from_url release" do
url = "https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry/tree/v0.1"
response = subject.github_from_url(url)
expect(response).to eq(owner: "Troy-Wilson", repo: "ASV-Autonomous-Bathymetry", release: "v0.1")
end
it "github_from_url repo" do
url = "https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry"
response = subject.github_from_url(url)
expect(response).to eq(owner: "Troy-Wilson", repo: "ASV-Autonomous-Bathymetry")
end
it "github_from_url owner" do
url = "https://github.com/Troy-Wilson"
response = subject.github_from_url(url)
expect(response).to eq(owner: "Troy-Wilson")
end
it "github_release_from_url" do
url = "https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry/tree/v0.1"
response = subject.github_release_from_url(url)
expect(response).to eq("v0.1")
end
it "github_repo_from_url" do
url = "https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry/tree/v0.1"
response = subject.github_repo_from_url(url)
expect(response).to eq("ASV-Autonomous-Bathymetry")
end
it "github_owner_from_url" do
url = "https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry/tree/v0.1"
response = subject.github_owner_from_url(url)
expect(response).to eq("Troy-Wilson")
end
end
context "to url" do
let(:doi) { "10.5061/DRYAD.8515" }
let(:github_hash) {{ owner: "Troy-Wilson", repo: "ASV-Autonomous-Bathymetry", release: "v0.1" }}
it "doi_as_url" do
response = subject.doi_as_url(doi)
expect(response).to eq("https://handle.test.datacite.org/10.5061/dryad.8515")
end
it "doi_as_url with invalid doi" do
doi = "(:tba)"
response = subject.doi_as_url(doi)
expect(response).to eq(nil)
end
it "github_as_owner_url" do
response = subject.github_as_owner_url(github_hash)
expect(response).to eq("https://github.com/Troy-Wilson")
end
it "github_as_repo_url" do
response = subject.github_as_repo_url(github_hash)
expect(response).to eq("https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry")
end
it "github_as_release_url" do
response = subject.github_as_release_url(github_hash)
expect(response).to eq("https://github.com/Troy-Wilson/ASV-Autonomous-Bathymetry/tree/v0.1")
end
end
end
|
Milstein/spinone
|
app/models/concerns/cacheable.rb
|
<reponame>Milstein/spinone
module Cacheable
extend ActiveSupport::Concern
included do
def cached_member_response(id)
Rails.cache.fetch("member_response/#{id}", expires_in: 12.hours) do
member = Member.where(id: id)
member.present? ? member[:data] : nil
end
end
def cached_resource_type_response(id)
Rails.cache.fetch("resource_type_response/#{id}", expires_in: 1.month) do
resource_type = ResourceType.where(id: id)
resource_type.present? ? resource_type[:data] : nil
end
end
def cached_data_center_response(id)
Rails.cache.fetch("data_center_response/#{id}", expires_in: 12.hours) do
data_center = DataCenter.where(id: id)
data_center.present? ? data_center[:data] : nil
end
end
end
end
|
Milstein/spinone
|
app/controllers/user_stories_controller.rb
|
<filename>app/controllers/user_stories_controller.rb<gh_stars>1-10
class UserStoriesController < ApplicationController
def index
@user_stories = UserStory.where(params)
options = {}
options[:meta] = @user_stories[:meta]
@user_stories = @user_stories[:data]
render json: UserStorySerializer.new(@user_stories, options).serialized_json, status: :ok
end
def show
@user_story = UserStory.where(id: params[:id])
fail AbstractController::ActionNotFound unless @user_story.present?
@user_story = @user_story[:data]
render json: UserStorySerializer.new(@user_story).serialized_json, status: :ok
end
end
|
Milstein/spinone
|
app/controllers/pages_controller.rb
|
class PagesController < ApplicationController
def index
@pages = Page.where(params)
options = {}
options[:meta] = @pages[:meta]
@pages = @pages[:data]
render json: PageSerializer.new(@pages, options).serialized_json, status: :ok
end
def show
@page = Page.where(id: params[:id])
fail AbstractController::ActionNotFound unless @page.present?
@page = @page[:data]
render json: PageSerializer.new(@page).serialized_json, status: :ok
end
end
|
Milstein/spinone
|
spec/models/page_spec.rb
|
require 'rails_helper'
describe Page, type: :model, vcr: true do
it "pages" do
pages = Page.all[:data]
expect(pages.length).to eq(25)
page = pages.first
expect(page.title).to eq("DOI Registrations for Software")
end
it "query" do
pages = Page.where(query: "thor")[:data]
expect(pages.length).to eq(5)
page = pages.first
expect(page.title).to eq("PIDs for conferences - your comments are welcome!")
end
end
|
Milstein/spinone
|
app/models/concerns/authenticable.rb
|
module Authenticable
extend ActiveSupport::Concern
require 'jwt'
included do
# encode token using SHA-256 hash algorithm
def encode_token(payload)
# replace newline characters with actual newlines
private_key = OpenSSL::PKey::RSA.new(ENV['JWT_PRIVATE_KEY'].to_s.gsub('\n', "\n"))
JWT.encode(payload, private_key, 'RS256')
end
# decode token using SHA-256 hash algorithm
def decode_token(token)
public_key = OpenSSL::PKey::RSA.new(ENV['JWT_PUBLIC_KEY'].to_s.gsub('\n', "\n"))
payload = (JWT.decode token, public_key, true, { :algorithm => 'RS256' }).first
# check whether token has expired
return {} unless Time.now.to_i < payload["exp"]
payload
rescue JWT::DecodeError => error
Rails.logger.error "JWT::DecodeError: " + error.message + " for " + token
return {}
rescue OpenSSL::PKey::RSAError => error
public_key = ENV['JWT_PUBLIC_KEY'].presence || "nil"
Rails.logger.error "OpenSSL::PKey::RSAError: " + error.message + " for " + public_key
return {}
end
end
end
|
Milstein/spinone
|
spec/models/resource_type_spec.rb
|
require 'rails_helper'
describe ResourceType, type: :model, vcr: true do
it "all" do
resource_types = ResourceType.all[:data]
expect(resource_types.length).to eq(15)
resource_type = resource_types.first
expect(resource_type.title).to eq("Audiovisual")
end
it "one" do
resource_type = ResourceType.where(id: "text")[:data]
expect(resource_type.title).to eq("Text")
end
end
|
Milstein/spinone
|
app/models/milestone.rb
|
<gh_stars>1-10
class Milestone < Base
attr_reader :id, :title, :description, :open_issues, :closed_issues, :state, :due_on, :year, :quarter, :created, :updated, :closed
def initialize(attributes, options={})
@id = attributes.fetch("number", nil)
@title = attributes.fetch("title", nil)
@description = attributes.fetch("description", nil)
@description = GitHub::Markdown.render_gfm(@description) if @description.present?
@open_issues = attributes.fetch("open_issues", nil)
@closed_issues = attributes.fetch("closed_issues", nil)
@state = attributes.fetch("state", nil)
@due_on = attributes.fetch("due_on", nil)
@created = attributes.fetch("created_at", nil)
@updated = attributes.fetch("updated_at", nil)
@closed = attributes.fetch("closed_at", nil)
@cache_key = "milestones/#{@id}/#{@updated}"
end
def self.get_query_url(options={})
if options[:id].present?
"#{url}/#{options[:id]}?github_token=#{ENV['GITHUB_PERSONAL_ACCESS_TOKEN']}"
elsif options[:state] == "closed"
url + "?github_token=#{ENV['GITHUB_PERSONAL_ACCESS_TOKEN']}&state=closed"
else
url + "?github_token=#{ENV['GITHUB_PERSONAL_ACCESS_TOKEN']}"
end
end
def self.parse_data(result, options={})
return nil if result.body.blank? || result.body['errors']
if options[:id].present?
item = result.body.fetch("data", {})
return {} unless item.present?
{ data: parse_item(item) }
else
items = result.body.fetch("data", [])
data = parse_items(items)
.select { |m| m.due_on.present? }
.sort_by { |m| m.due_on }
data = data.select { |m| m.year == options[:year].to_i } if options[:year].present?
{ data: data, meta: { total: data.length } }
end
end
def url
"#{ENV["GITHUB_ISSUES_REPO_URL"]}/milestone/#{id}"
end
def self.url
"#{ENV["GITHUB_MILESTONES_URL"]}/milestones"
end
def is_closed?
state == "closed"
end
def year
return nil unless due_on.present?
Time.parse(due_on).year
end
def quarter
return nil unless due_on.present?
(Time.parse(due_on).month / 3.to_f).ceil
end
def released
is_closed? ? due_on : nil
end
end
|
Milstein/spinone
|
app/models/concerns/metadatable.rb
|
<gh_stars>1-10
module Metadatable
extend ActiveSupport::Concern
included do
# remove non-printing whitespace
def clean_doi(doi)
doi.gsub(/\u200B/, '')
end
def github_from_url(url)
return {} unless /\Ahttps:\/\/github\.com\/(.+)(?:\/)?(.+)?(?:\/tree\/)?(.*)\z/.match(url)
words = URI.parse(url).path[1..-1].split('/')
{ owner: words[0],
repo: words[1],
release: words[3] }.compact
end
def github_repo_from_url(url)
github_from_url(url).fetch(:repo, nil)
end
def github_release_from_url(url)
github_from_url(url).fetch(:release, nil)
end
def github_owner_from_url(url)
github_from_url(url).fetch(:owner, nil)
end
def github_as_owner_url(github_hash)
"https://github.com/#{github_hash[:owner]}" if github_hash[:owner].present?
end
def github_as_repo_url(github_hash)
"https://github.com/#{github_hash[:owner]}/#{github_hash[:repo]}" if github_hash[:repo].present?
end
def github_as_release_url(github_hash)
"https://github.com/#{github_hash[:owner]}/#{github_hash[:repo]}/tree/#{github_hash[:release]}" if github_hash[:release].present?
end
end
end
|
Milstein/spinone
|
config/initializers/lograge.rb
|
Rails.application.configure do
config.lograge.enabled = true
config.lograge.formatter = Lograge::Formatters::Logstash.new
config.lograge.logger = LogStashLogger.new(type: :stdout)
config.lograge.ignore_actions = ['HeartbeatController#index', 'IndexController#index']
config.lograge.base_controller_class = 'ActionController::API'
config.log_level = ENV['LOG_LEVEL'].to_sym
config.lograge.custom_options = lambda do |event|
exceptions = %w(controller action format id)
{
params: event.payload[:params].except(*exceptions)
}
end
end
|
Milstein/spinone
|
app/serializers/data_center_serializer.rb
|
<reponame>Milstein/spinone<filename>app/serializers/data_center_serializer.rb
class DataCenterSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
set_type "data-centers"
attributes :title, :member_id, :year, :created, :updated
belongs_to :member, record_type: :members, serializer: :Member
end
|
Milstein/spinone
|
app/serializers/page_serializer.rb
|
<filename>app/serializers/page_serializer.rb<gh_stars>1-10
class PageSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
attributes :id, :author, :title, :container_title, :description, :license, :url, :image_url, :tags, :issued, :updated
end
|
Milstein/spinone
|
spec/models/member_spec.rb
|
require 'rails_helper'
describe Member, type: :model, vcr: true do
it "members" do
members = Member.all[:data]
expect(members.length).to eq(25)
member = members.first
expect(member.title).to eq("ALBA Synchrotron Light Source")
end
it "member" do
member = Member.where(id: "ands")[:data]
expect(member.title).to eq("Australian National Data Service")
end
end
|
Milstein/spinone
|
spec/requests/data_centers_spec.rb
|
require 'rails_helper'
describe "DataCenters", type: :request, vcr: true do
it "data centers" do
get '/data-centers'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(1559)
expect(meta["members"].size).to eq(15)
expect(meta["members"].first).to eq("id"=>"cdl", "title"=>"California Digital Library", "count"=>234)
expect(meta["years"].size).to eq(9)
expect(meta["years"].first).to eq("count"=>6, "id"=>"2010", "title"=>"2010")
expect(json["data"].size).to eq(25)
data_center = json["data"].first
expect(data_center["id"]).to eq("ethz.ubasojs")
expect(data_center.dig("attributes", "title")).to eq("027.7 - Zeitschrift für Bibliothekskultur")
end
it "data centers with query" do
get '/data-centers?query=california'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(5)
expect(meta["members"].size).to eq(2)
expect(meta["members"].first).to eq("id"=>"cdl", "title"=>"California Digital Library", "count"=>4)
expect(meta["years"].size).to eq(4)
expect(meta["years"].first).to eq("count"=>1, "id"=>"2012", "title"=>"2012")
expect(json["data"].size).to eq(5)
data_center = json["data"].first
expect(data_center["id"]).to eq("cdl.ucsdcca")
expect(data_center.dig("attributes", "title")).to eq("California Coastal Atlas")
end
it "data center" do
get '/data-centers/ethz.ubasojs'
expect(last_response.status).to eq(200)
data_center = json["data"]
expect(data_center["id"]).to eq("ethz.ubasojs")
expect(data_center.dig("attributes", "title")).to eq("027.7 - Zeitschrift für Bibliothekskultur")
end
end
|
Milstein/spinone
|
app/serializers/member_serializer.rb
|
class MemberSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
set_type :members
attributes :title, :description, :member_type, :region, :country, :year, :logo_url, :email, :website, :phone, :created, :updated
end
|
Milstein/spinone
|
spec/requests/pages_spec.rb
|
<reponame>Milstein/spinone<filename>spec/requests/pages_spec.rb
require 'rails_helper'
describe "Pages", type: :request, vcr: true do
it "pages" do
get '/pages'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(91)
expect(meta["tags"].size).to eq(15)
expect(meta["tags"].first).to eq(["datacite", 20])
expect(json["data"].size).to eq(25)
page = json["data"].first
expect(page["id"]).to eq("https://doi.org/10.5438/1nmy-9902")
expect(page.dig("attributes", "title")).to eq("DOI Registrations for Software")
end
it "pages query" do
get '/pages?query=orcid'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(9)
expect(meta["tags"].size).to eq(8)
expect(meta["tags"].first).to eq(["orcid", 8])
expect(json["data"].size).to eq(9)
page = json["data"].first
expect(page["id"]).to eq("https://doi.org/10.5438/spfw-5q39")
expect(page.dig("attributes", "title")).to eq("Next steps for the Organization ID Initiative: Report from the Stakeholder Meeting")
end
it "pages page 2" do
get '/pages?page[number]=2'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(91)
expect(meta["tags"].size).to eq(15)
expect(meta["tags"].first).to eq(["datacite", 20])
expect(json["data"].size).to eq(25)
page = json["data"].first
expect(page["id"]).to eq("https://doi.org/10.5438/0000-9g41")
expect(page.dig("attributes", "title")).to eq("Welcoming Children’s Hospital of Philadelphia to the community!")
end
it "pages query by tag" do
get '/pages?tag=orcid'
expect(last_response.status).to eq(200)
meta = json["meta"]
expect(meta["total"]).to eq(14)
expect(meta["tags"].size).to eq(9)
expect(meta["tags"].first).to eq(["orcid", 14])
expect(json["data"].size).to eq(14)
page = json["data"].first
expect(page["id"]).to eq("https://doi.org/10.5438/spfw-5q39")
expect(page.dig("attributes", "title")).to eq("Next steps for the Organization ID Initiative: Report from the Stakeholder Meeting")
end
it "single page" do
get '/pages/10.5438/zwsf-4y7y'
expect(last_response.status).to eq(200)
page = json["data"]
expect(page["id"]).to eq("https://doi.org/10.5438/zwsf-4y7y")
expect(page.dig("attributes", "title")).to eq("2016 in review")
end
end
|
Milstein/spinone
|
app/models/resource_type.rb
|
class ResourceType < Base
attr_reader :id, :title, :updated, :cache_key
def initialize(attributes, options={})
@id = attributes.fetch("id").underscore.dasherize
@title = attributes.fetch("title", nil)
@updated = DATACITE_SCHEMA_DATE + "T00:00:00Z"
@cache_key = "resource-type/#{@id}-#{@updated}"
end
def self.get_query_url(options={})
RESOURCE_TYPES_URL
end
def self.parse_data(result, options={})
return nil if result.body.blank? || result.body['errors']
items = result.body.fetch("data", {}).fetch("schema", {}).fetch("simpleType", {}).fetch('restriction', {}).fetch('enumeration', [])
items = items.map do |item|
id = item.fetch("value").underscore.dasherize
{ "id" => id, "title" => id.underscore.humanize }
end
if options[:id]
item = items.find { |i| i["id"] == options[:id] }
return nil if item.nil?
{ data: parse_item(item) }
else
{ data: parse_items(items), meta: { total: items.length } }
end
end
end
|
Milstein/spinone
|
config/initializers/string.rb
|
<filename>config/initializers/string.rb<gh_stars>1-10
class String
def my_titleize
self.gsub(/(\b|_)(.)/) { "#{$1}#{$2.upcase}" }
end
end
|
Milstein/spinone
|
app/models/page.rb
|
<reponame>Milstein/spinone
class Page < Base
attr_reader :id, :author, :title, :container_title, :description, :license, :url, :image_url, :tags, :issued, :updated
def initialize(attributes, options={})
@id = attributes.fetch("@id")
@author = attributes.fetch("author", []).map { |a| { "given" => a["givenName"],
"family" => a["familyName"],
"orcid" => a["@id"] } }
@title = attributes.fetch("name", nil)
@container_title = attributes.fetch("publisher", nil)
@description = attributes.fetch("description", nil)
@license = attributes.fetch("license", nil)
@url = attributes.fetch("url", nil)
@image_url = attributes.fetch("image", nil)
@tags = attributes.fetch("keywords", "").split(", ")
@issued = attributes.fetch("datePublished", nil)
@updated = @issued
end
def self.get_query_url(options={})
url
end
def self.parse_data(result, options={})
return nil if result.body.blank? || result.body['errors']
items = result.body.fetch("data", [])
if options[:id]
item = items.find { |i| i["@id"] == "https://doi.org/" + options[:id] }
return nil if item.nil?
{ data: parse_item(item) }
else
items = items.select { |i| (i.fetch("title", "").downcase + i.fetch("description", "").downcase).include?(options[:query]) } if options[:query]
items = items.select { |i| i.fetch("keywords", "").split(", ").include?(options[:tag]) } if options[:tag]
page = (options.dig(:page, :number) || 1).to_i
per_page = options.dig(:page, :size) && (1..1000).include?(options.dig(:page, :size).to_i) ? options.dig(:page, :size).to_i : 25
total_pages = (items.length.to_f / per_page).ceil
meta = { total: items.length, "total-pages" => total_pages, page: page, tags: parse_meta(items) }
offset = (page - 1) * per_page
items = items[offset...offset + per_page] || []
{ data: parse_items(items), meta: meta }
end
end
def self.parse_meta(items)
items.reduce({}) do |sum, i|
i.fetch("keywords", "").split(", ").each { |tag| sum[tag] = sum[tag].to_i + 1 }
sum
end.sort_by {|_key, value| -value}[0..14].to_h
end
def self.url
"#{ENV["BLOG_URL"]}/posts.json"
end
end
|
Milstein/spinone
|
app/serializers/work_serializer.rb
|
<gh_stars>1-10
class WorkSerializer
include FastJsonapi::ObjectSerializer
set_key_transform :dash
set_type :works
cache_options enabled: true, cache_length: 8.hours
attributes :doi, :identifier, :url, :author, :title, :container_title, :description, :resource_type_subtype, :data_center_id, :member_id, :resource_type_id, :version, :license, :schema_version, :results, :related_identifiers, :published, :registered, :checked, :updated, :media, :xml
belongs_to :data_center, record_type: "data-centers", serializer: :DataCenter
belongs_to :member, record_type: :members, serializer: :Member
belongs_to :resource_type, record_type: "resource-types", serializer: :ResourceType
end
|
Milstein/spinone
|
spec/concerns/dateable_spec.rb
|
<reponame>Milstein/spinone<filename>spec/concerns/dateable_spec.rb
require 'rails_helper'
describe Work do
describe "get_datetime_from_input" do
it 'year' do
date = "2015"
result = Work.get_datetime_from_input(date)
expect(result).to eq("2015-01-01T00:00:00Z")
end
it 'year month' do
date = "2015-10"
result = Work.get_datetime_from_input(date)
expect(result).to eq("2015-10-01T00:00:00Z")
end
it 'year month day' do
date = "2015-10-12"
result = Work.get_datetime_from_input(date)
expect(result).to eq("2015-10-12T00:00:00Z")
end
it 'year month day until date' do
date = "2015-10-12"
result = Work.get_datetime_from_input(date, until_date: true)
expect(result).to eq("2015-10-12T23:59:59Z")
end
it 'invalid date' do
date = "x"
result = Work.get_datetime_from_input(date)
expect(result).to be_nil
end
end
describe "get_solr_date_range" do
it 'year' do
from_date = "2015"
until_date = "2015"
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[2015-01-01T00:00:00Z TO 2015-12-31T23:59:59Z]")
end
it 'year month' do
from_date = "2015-02"
until_date = "2015-03"
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[2015-02-01T00:00:00Z TO 2015-03-31T23:59:59Z]")
end
it 'year month day' do
from_date = "2015-02-04"
until_date = "2015-03-15"
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[2015-02-04T00:00:00Z TO 2015-03-15T23:59:59Z]")
end
it 'year month day until_date before' do
from_date = "2015-02-04"
until_date = "2015-01-15"
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[2015-02-04T00:00:00Z TO 2015-02-04T23:59:59Z]")
end
it 'year no from date' do
from_date = nil
until_date = "2015"
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[* TO 2015-12-31T23:59:59Z]")
end
it 'year month no until date' do
from_date = "2015-02"
until_date = nil
result = Work.get_solr_date_range(from_date, until_date)
expect(result).to eq("[2015-02-01T00:00:00Z TO *]")
end
end
end
|
Milstein/spinone
|
spec/models/milestone_spec.rb
|
<filename>spec/models/milestone_spec.rb
require 'rails_helper'
describe Milestone, type: :model, vcr: true do
it "milestones" do
milestones = Milestone.all[:data]
expect(milestones.length).to eq(8)
milestone = milestones.first
expect(milestone.title).to eq("Public API to manage clients and prefixes")
end
it "milestone" do
milestone = Milestone.where(id: "10")[:data]
expect(milestone.title).to eq("Develop new DOI Fabrica service")
end
end
|
Milstein/spinone
|
app/models/user.rb
|
<gh_stars>1-10
class User
# include jwt encode and decode
include Authenticable
attr_accessor :name, :uid, :email, :role, :jwt, :orcid, :member_id, :datacenter_id
def initialize(token)
if token.present?
payload = decode_token(token)
@jwt = token
@uid = payload.fetch("uid", nil)
@name = payload.fetch("name", nil)
@email = payload.fetch("email", nil)
@role = payload.fetch("role", nil)
@member_id = payload.fetch("member_id", nil)
@datacenter_id = payload.fetch("datacenter_id", nil)
else
@role = "anonymous"
end
end
alias_method :orcid, :uid
alias_method :id, :uid
# Helper method to check for admin user
def is_admin?
role == "staff_admin"
end
# Helper method to check for admin or staff user
def is_admin_or_staff?
["staff_admin", "staff_user"].include?(role)
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.