repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
deivid-rodriguez/activeadmin_addons
|
activeadmin_addons.gemspec
|
# rubocop:disable Metrics/LineLength
$:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "activeadmin_addons/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "activeadmin_addons"
s.version = ActiveadminAddons::VERSION
s.authors = ["Platanus", "<NAME>", "<NAME>", "<NAME>"]
s.email = ["<EMAIL>", "<EMAIL>", "<EMAIL>", "<EMAIL>"]
s.homepage = "https://github.com/platanus/activeadmin_addons"
s.summary = "Set of addons to help with the activeadmin ui"
s.description = "Set of addons to help with the activeadmin ui"
s.license = "MIT"
s.files = Dir["{app,config,db,lib,vendor}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.add_dependency "railties"
s.add_dependency "sass-rails"
s.add_dependency "select2-rails", "~> 4.0"
s.add_dependency "xdan-datetimepicker-rails", "~> 2.5.1"
s.add_dependency "require_all"
s.add_development_dependency "rails", "~> 4.2"
s.add_development_dependency "coffee-rails"
s.add_development_dependency "sqlite3"
s.add_development_dependency "enumerize", "~> 2.0"
s.add_development_dependency "paperclip"
s.add_development_dependency "aasm"
s.add_development_dependency "rspec-rails"
s.add_development_dependency "pry-rails"
s.add_development_dependency "factory_bot_rails"
s.add_development_dependency "shoulda-matchers"
s.add_development_dependency "guard"
s.add_development_dependency "guard-rspec"
s.add_development_dependency "capybara-selenium"
s.add_development_dependency "database_cleaner"
end
|
nikukyugamer/character-recognition-by-vision-api
|
lib/character_recognition_by_vision_api.rb
|
require "character_recognition_by_vision_api/version"
require 'rest-client'
require 'base64'
require 'json'
module CharacterRecognitionByVisionApi
extend self
def extract_to_text(image_file, api_key)
JSON.parse(response_json(image_file, api_key))['responses'][0]['fullTextAnnotation']['text']
end
def payload(image_file)
{
requests: [
{
image:
{
content: Base64.encode64(open(image_file).read)
},
features: [
{
type: "TEXT_DETECTION",
maxResults: 10,
}
]
}
]
}
end
def headers_for_post
{
'Content-Type': 'application/json',
}
end
def request_uri(api_key)
"https://vision.googleapis.com/v1/images:annotate?key=#{api_key}"
end
def response_json(image_file, api_key)
begin
RestClient.post(
request_uri(api_key),
payload(image_file).to_json,
headers_for_post,
)
rescue => e
puts e
exit(1)
end
end
end
|
nikukyugamer/character-recognition-by-vision-api
|
spec/character_recognition_by_vision_api_spec.rb
|
RSpec.describe CharacterRecognitionByVisionApi do
it "has a version number" do
expect(CharacterRecognitionByVisionApi::VERSION).not_to be nil
end
before do
@image_file = 'spec/sample_image.png'
end
it 'the return value is correct headers' do
expect(CharacterRecognitionByVisionApi.headers_for_post).to eq(
{
'Content-Type': 'application/json',
}
)
end
it 'the return value is correct uri' do
expect(CharacterRecognitionByVisionApi.request_uri('abcdefghijklmnopqrstuvwxyz')).to eq("https://vision.googleapis.com/v1/images:annotate?key=<KEY>")
end
it 'the return value is correct json for post' do
expect(CharacterRecognitionByVisionApi.payload(@image_file)).to eq(
{
requests: [
{
image:
{
content: Base64.encode64(open(@image_file).read)
},
features: [
{
type: "TEXT_DETECTION",
maxResults: 10,
}
]
}
]
}
)
end
# oops... 'extract_to_text' and 'response_json' method are NOT tested...
end
|
JimmyLTS/JLLayoutConstraint
|
JLLayoutConstraint.podspec
|
<filename>JLLayoutConstraint.podspec
Pod::Spec.new do |s|
s.name = "JLLayoutConstraint"
s.version = "0.0.2"
s.summary = "A until for layout constraint"
s.description = <<-DESC
It is a until to add layout constraint.
DESC
s.homepage = "https://github.com/JimmyLTS/JLLayoutConstraint"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "吕同生" => "<EMAIL>" }
s.source = { :git => "https://github.com/JimmyLTS/JLLayoutConstraint.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'JLLayoutConstraint/JLLayoutConstraint/*.{h,m}'
end
|
machine23/cobalt2-vim-theme
|
test_files/test.rb
|
require "gem"
string = "base16"
symbol = :base16
fixnum = 0
float = 0.00
array = Array.new
array = ['chris', 85]
hash = {"test" => "test"}
regexp = /^\b1?[-.\s]?(\d{3})|([A-z])[-.\s]?\d{3}[-.\s]?\d{4}\?\b$/
# TODO This is a comment
module Earth
class Person < Creature
attr_accessor :name
def initialize(attributes = {})
@name = attributes[:name]
end
def self.greet
"hello"
end
def foreach
begin
[].each do |item|
puts item
end
end
end
def while
$i = 0
$num = 5
while $i < $num
puts("Inside the loop i = #$i")
$i += 1
end
end
def interpolation
test = "ab"
out = "My name is #{ test }"
end
def compare_numbers
1 < 2
1 <= 2
1 == 2
1 >= 2
1 > 2
test = 2
end
def operators
test = 1 + 2
test = 1 - 2
test = 1 / 2
test = 1 * 2
test = 1 % 2
test -= 1
test += 2
end
def switch_case
test = 1
case test
when 0
"zero"
when 1
"one"
when 2
"two"
else
"test"
end
end
def debugger
debugger
binding.pry
byebug
end
end
end
person1 = Earth::Person.new(:name => "Chris")
print Earth::Person::greet, " ", person1.name, "\n"
puts "another #{Person::greet} #{person1.name}"
|
jgarcia/recommendify
|
lib/recommendify/base.rb
|
class Recommendify::Base
attr_reader :similarity_matrix, :input_matrices, :max_neighbors
def initialize(input_matrices = nil, opts = {})
@max_neighbors = opts[:max_neighbors] || Recommendify::DEFAULT_MAX_NEIGHBORS
@input_matrices = if input_matrices
Hash[input_matrices.map{ |key, opts|
opts.merge!(:key => key, :redis_prefix => redis_prefix)
[ key, Recommendify::InputMatrix.create(opts) ]
}]
else
{}
end
@similarity_matrix = Recommendify::SimilarityMatrix.new(
:max_neighbors => max_neighbors,
:key => :similarities,
:redis_prefix => redis_prefix
)
end
def redis_prefix
"recommendify"
end
def method_missing(method, *args)
if @input_matrices.has_key?(method)
@input_matrices[method]
else
raise NoMethodError.new(method.to_s)
end
end
def respond_to?(method)
@input_matrices.has_key?(method) ? true : super
end
def all_items
@input_matrices.map{ |k,m| m.all_items }.flatten.uniq
end
def for(item_id)
similarity_matrix[item_id].map do |item_id, similarity|
Recommendify::Neighbor.new(
:item_id => item_id,
:similarity => similarity
)
end.sort
end
def process!
all_items.each{ |item_id,n| process_item!(item_id) }
end
def process_item!(item_id)
input_matrices.map do |k,m|
neighbors = m.similarities_for(item_id).map do |i,w|
[i,w*m.weight]
end
similarity_matrix.update(item_id, neighbors)
end
similarity_matrix.commit_item!(item_id)
end
def delete_item!(item_id)
input_matrices.map do |k,m|
m.delete_item(item_id)
end
end
end
|
bogdanmatasaru/Keychain.swift
|
Simple-KeychainSwift.podspec
|
#
# Be sure to run `pod lib lint Simple-KeychainSwift.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'Simple-KeychainSwift'
s.version = '4.1.0'
s.summary = 'A simple drop in Swift wrapper class for the Keychain'
s.homepage = 'https://github.com/ashleymills/Keychain.swift'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '<NAME>' => '<EMAIL>' }
s.source = { :git => "https://github.com/ashleymills/Keychain.swift.git", :tag => "v"+s.version.to_s }
s.social_media_url = "http://twitter.com/ashleymills"
s.swift_versions = ['4.0','4.2','5']
s.ios.deployment_target = '9.0'
s.osx.deployment_target = '10.11'
s.source_files = 'Simple-KeychainSwift/Classes/Keychain.swift'
end
|
qiniu/ruby-sdk
|
lib/qiniu/resumable_upload.rb
|
# -*- encoding: utf-8 -*-
require 'zlib'
require 'yaml'
require 'tmpdir'
require 'fileutils'
require 'mime/types'
require 'digest/sha1'
require 'qiniu/abstract'
require 'qiniu/exceptions'
require 'json'
module Qiniu
module Storage
module AbstractClass
class ChunkProgressNotifier
include Qiniu::Abstract
abstract_methods :notify
# def notify(block_index, block_put_progress); end
end
class BlockProgressNotifier
include Qiniu::Abstract
abstract_methods :notify
# def notify(block_index, checksum); end
end
end # module AbstractClass
class ChunkProgressNotifier < AbstractClass::ChunkProgressNotifier
def notify(index, progress)
logmsg = "chunk #{progress['offset']/Config.settings[:chunk_size]} in block #{index} successfully uploaded.\n" + progress.to_s
Utils.debug(logmsg)
end
end # class ChunkProgressNotifier
class BlockProgressNotifier < AbstractClass::BlockProgressNotifier
def notify(index, checksum)
Utils.debug "block #{index}: {ctx: #{checksum}} successfully uploaded."
Utils.debug "block #{index}: {checksum: #{checksum}} successfully uploaded."
end
end # class BlockProgressNotifier
class << self
include Utils
def resumable_upload_with_token(uptoken,
local_file,
bucket,
key = nil,
mime_type = nil,
custom_meta = nil,
customer = nil,
callback_params = nil,
rotate = nil,
resume_record_file = nil,
version = :v1,
part_size = Config.settings[:block_size])
File.open(local_file, 'rb') do |ifile|
fh = FileData.new(ifile)
fsize = fh.data_size
key = Digest::SHA1.hexdigest(local_file + fh.mtime.to_s) if key.nil?
if mime_type.nil? || mime_type.empty?
mime_type = MIME::Types.type_for(local_file).first || 'application/octet-stream'
end
code, data = _resumable_upload(uptoken, fh, fsize, bucket, key, mime_type.to_s, custom_meta, customer, callback_params, rotate, resume_record_file, version, part_size)
[code, data]
end
end # resumable_upload_with_token
private
class FileData
attr_accessor :fh
def initialize(fh)
@fh = fh
end
def data_size
@fh.stat.size
end
def get_data(offset, length)
@fh.seek(offset)
@fh.read(length)
end
def path
@fh.path
end
def mtime
@fh.mtime
end
end # class FileData
def _new_block_put_progress_data
{'ctx' => nil, 'offset' => 0, 'restsize' => nil, 'status_code' => nil, 'host' => nil}
end # _new_block_put_progress_data
def _new_block_put_progress_data_v2
{'etag' => nil, 'offset' => 0, 'restsize' => nil, 'partNumber' => 0, 'status_code' => nil, 'host' => nil}
end
def _record_upload_progress
{'ctx' => nil, 'offset' => 0, 'upload_extra' => {}}
end
def get_upload_record(resume_record_file)
JSON.load(File.read(resume_record_file))
rescue
nil
end
def _call_binary_with_token(uptoken, url, data, content_type = nil, retry_times = 0)
options = {
:headers => {
:content_type => 'application/octet-stream',
'Authorization' => 'UpToken ' + uptoken
}
}
if content_type && !content_type.empty? then
options[:headers][:content_type] = content_type
end
code, data, raw_headers = HTTP.api_post(url, data, options)
unless HTTP.is_response_ok?(code)
retry_times += 1
if Config.settings[:auto_reconnect] && retry_times < Config.settings[:max_retry_times]
return _call_binary_with_token(uptoken, url, data, options[:content_type], retry_times)
end
end
return code, data, raw_headers
end # _call_binary_with_token
def _mkblock(bucket, uptoken, block_size, body)
url = Config.up_host(bucket) + "/mkblk/#{block_size}"
_call_binary_with_token(uptoken, url, body)
end # _mkblock
def _putblock(uphost, uptoken, ctx, offset, body)
url = uphost + "/bput/#{ctx}/#{offset}"
_call_binary_with_token(uptoken, url, body)
end # _putblock
def _resumable_put_block(bucket,
uptoken,
fh,
block_index,
block_size,
chunk_size,
progress,
retry_times,
notifier)
code, data = 0, {}
fpath = fh.path
# this block has never been uploaded.
if progress['ctx'].nil? || progress['ctx'].empty?
progress['offset'] = 0
progress['restsize'] = block_size
# choose the smaller one
body_length = [block_size, chunk_size].min
for i in 1..retry_times
seek_pos = block_index * Config.settings[:block_size]
body = fh.get_data(seek_pos, body_length)
result_length = body.length
if result_length != body_length
raise FileSeekReadError.new(fpath, block_index, seek_pos, body_length, result_length)
end
code, data, raw_headers = _mkblock(bucket, uptoken, block_size, body)
Utils.debug "Mkblk : #{code.inspect} #{data.inspect} #{raw_headers.inspect}"
body_crc32 = Zlib.crc32(body)
if HTTP.is_response_ok?(code) && data["crc32"] == body_crc32
progress['ctx'] = data["ctx"]
progress['offset'] = body_length
progress['restsize'] = block_size - body_length
progress['status_code'] = code
progress['host'] = data["host"]
if notifier && notifier.respond_to?("notify")
notifier.notify(block_index, progress)
end
break
elsif i == retry_times && data["crc32"] != body_crc32
Log.logger.error %Q(Uploading block error. Expected crc32: #{body_crc32}, but got: #{data["crc32"]})
return code, data, raw_headers
end
end
elsif progress['offset'] + progress['restsize'] != block_size
raise BlockSizeNotMathchError.new(fpath, block_index, progress['offset'], progress['restsize'], block_size)
end
# loop uploading other chunks except the first one
while progress['restsize'].to_i > 0 && progress['restsize'] < block_size
# choose the smaller one
body_length = [progress['restsize'], chunk_size].min
for i in 1..retry_times
seek_pos = block_index*Config.settings[:block_size] + progress['offset']
body = fh.get_data(seek_pos, body_length)
result_length = body.length
if result_length != body_length
raise FileSeekReadError.new(fpath, block_index, seek_pos, body_length, result_length)
end
code, data, raw_headers = _putblock(progress['host'], uptoken, progress['ctx'], progress['offset'], body)
Utils.debug "Bput : #{code.inspect} #{data.inspect} #{raw_headers.inspect}"
body_crc32 = Zlib.crc32(body)
if HTTP.is_response_ok?(code) && data["crc32"] == body_crc32
progress['ctx'] = data["ctx"]
progress['offset'] += body_length
progress['restsize'] -= body_length
progress['status_code'] = code
progress['host'] = data["host"]
if notifier && notifier.respond_to?("notify")
notifier.notify(block_index, progress)
end
break
elsif i == retry_times && data["crc32"] != body_crc32
Log.logger.error %Q(Uploading block error. Expected crc32: #{body_crc32}, but got: #{data["crc32"]})
return code, data, raw_headers
end
end
end
# return
return code, data, raw_headers
end # _resumable_put_block
def _resumeble_put_block_v2(bucket,
uptoken,
fh,
block_index,
progress,
retry_times,
notifier,
part_size,
upload_extra,
restsize)
fpath = fh.path
if restsize > part_size
for i in 1..retry_times
seek_positon = block_index * part_size
body = fh.get_data(seek_positon, part_size)
if body.length != part_size
raise FileSeekReadError.new(fpath, block_index, seek_positon, body.length, body.length)
end
body_md5 = Digest::MD5.hexdigest(body)
code, data, raw_headers = _upload_part(upload_extra['host'], uptoken, body, bucket, block_index + 1, upload_extra['upload_id'], upload_extra['encoded_object_name'])
if HTTP.is_response_ok?(code) && data["md5"] == body_md5
progress['etag'] = data["etag"]
progress['offset'] = seek_positon
progress['restsize'] = restsize - part_size * block_index
progress['status_code'] = code
progress['host'] = data["host"]
if notifier && notifier.respond_to?("notify")
notifier.notify(block_index, progress)
end
break
elsif i == retry_times && data["md5"] != body_md5
Log.logger.error %Q(Uploading block error. Expected md5: #{body_md5}, but got: #{data["md5"]})
break
end
end
else
for i in 1..retry_times
seek_positon = block_index * part_size
body = fh.get_data(seek_positon, restsize)
if body.length != restsize
raise FileSeekReadError.new(fpath, block_index, seek_positon, body.length, body.length)
end
body_md5 = Digest::MD5.hexdigest(body)
code, data, raw_headers = _upload_part(upload_extra['host'], uptoken, body, bucket, block_index + 1, upload_extra['upload_id'], upload_extra['encoded_object_name'])
if HTTP.is_response_ok?(code) && data["md5"] == body_md5
progress['etag'] = data["etag"]
progress['offset'] = seek_positon
progress['restsize'] = restsize - part_size
progress['status_code'] = code
progress['host'] = data["host"]
if notifier && notifier.respond_to?("notify")
notifier.notify(block_index, progress)
end
break
elsif i == retry_times && data["md5"] != body_md5
Log.logger.error %Q(Uploading block error. Expected md5: #{body_md5}, but got: #{data["md5"]})
break
end
end
end
return code, data, raw_headers
end
def _block_count(fsize)
((fsize + Config.settings[:block_size] - 1) / Config.settings[:block_size]).to_i
end # _block_count
def _resumable_put(bucket,
uptoken,
fh,
checksums,
progresses,
block_notifier = nil,
chunk_notifier = nil,
complete_block = nil,
version = :v1,
part_size = Config.settings[:block_size],
upload_record = nil,
resume_record_file = null)
upload_extra = upload_record['upload_extra']
code, data = 0, {}
fsize = fh.data_size
block_count = _block_count(fsize)
progress_count = progresses.length
if progress_count != block_count
checksums = []
progresses = []
complete_block = 0
end
complete_block.upto(block_count-1).each do |block_index|
if checksums[block_index].nil? || checksums[block_index].empty?
block_size = part_size
if block_index == block_count - 1
block_size = fsize - block_index * part_size
end
if version == :v1
progresses[block_index] ||= _new_block_put_progress_data
else
progresses[block_index] ||= _new_block_put_progress_data_v2
end
if version == :v1
code, data = _resumable_put_block(bucket, uptoken, fh, block_index, block_size, block_size, progresses[block_index], Config.settings[:max_retry_times], chunk_notifier)
else
restsize = fsize - part_size * block_index
code, data = _resumeble_put_block_v2(bucket, uptoken, fh, block_index, progresses[block_index], Config.settings[:max_retry_times], chunk_notifier, part_size, upload_extra, restsize)
end
if HTTP.is_response_ok?(code)
if version == :v1
checksums[block_index] = data["ctx"]
else
checksums[block_index] = {'etag' => data["etag"], 'partNumber' => block_index + 1}
end
upload_record['ctx'] = checksums
if resume_record_file && !resume_record_file.empty?
File.write(resume_record_file, upload_record.to_json)
end
if block_notifier && block_notifier.respond_to?("notify")
block_notifier.notify(block_index, checksums[block_index])
end
end
end
end
return [code, data]
end # _resumable_put
def _mkfile(uphost,
uptoken,
entry_uri,
fsize,
checksums,
mime_type = nil,
custom_meta = nil,
customer = nil,
callback_params = nil,
rotate = nil)
path = '/rs-mkfile/' + Utils.urlsafe_base64_encode(entry_uri) + "/fsize/#{fsize}"
path += '/mimeType/' + Utils.urlsafe_base64_encode(mime_type) if mime_type && !mime_type.empty?
path += '/meta/' + Utils.urlsafe_base64_encode(custom_meta) if custom_meta && !custom_meta.empty?
path += '/customer/' + customer if customer && !customer.empty?
callback_query_string = HTTP.generate_query_string(callback_params) if callback_params && !callback_params.empty?
path += '/params/' + Utils.urlsafe_base64_encode(callback_query_string) if callback_query_string && !callback_query_string.empty?
path += '/rotate/' + rotate if rotate && rotate.to_i >= 0
url = uphost + path
body = checksums.join(',')
_call_binary_with_token(uptoken, url, body, 'text/plain')
end # _mkfile
def _resumable_upload(uptoken,
fh,
fsize,
bucket,
key,
mime_type = nil,
custom_meta = nil,
customer = nil,
callback_params = nil,
rotate = nil,
resume_record_file = nil,
version = :v1,
part_size = Config.settings[:block_size])
if part_size.nil?
part_size = Config.settings[:block_size]
end
upload_extra = {}
host = Config.up_host(bucket)
upload_extra['host'] = host
if version == :v2
encoded_object_name = _encode_object_name(key)
upload_extra['encoded_object_name'] = encoded_object_name
end
block_count = _block_count(fsize)
chunk_notifier = ChunkProgressNotifier.new()
block_notifier = BlockProgressNotifier.new()
progresses = []
checksums = []
complete_block = 0
upload_record = _record_upload_progress
record = get_upload_record(resume_record_file) if resume_record_file
if record.nil?
upload_record['upload_extra'] = upload_extra
if version == :v1
progresses = block_count.times.map { _new_block_put_progress_data }
else
data = _init_req(host, encoded_object_name, bucket, uptoken)
upload_extra['upload_id'] = data['uploadId']
upload_extra['expired'] = data['expireAt']
progresses = block_count.times.map { _new_block_put_progress_data_v2 }
end
else
ctx = record['ctx']
complete_block = ctx.length
if version == :v1
upload_record = record
checksums = upload_record['ctx']
progresses = block_count.times.map { _new_block_put_progress_data }
elsif version == :v2
extra = record['upload_extra']
if extra['upload_id'] && extra['expired'] > Time.now.to_i
upload_extra = extra
upload_record = record
checksums = upload_record['ctx']
progresses = block_count.times.map { _new_block_put_progress_data_v2 }
else
data = _init_req(host,encoded_object_name, bucket, uptoken)
upload_extra['upload_id'] = data['uploadId']
upload_extra['expired'] = data['expireAt']
upload_record['upload_extra'] = extra
progresses = block_count.times.map { _new_block_put_progress_data_v2 }
end
else
Log.logger.error 'only support :v1 / :v2 now!'
end
end
code, data, raw_headers = _resumable_put(bucket, uptoken, fh, checksums, progresses, block_notifier, chunk_notifier, complete_block, version, part_size, upload_record, resume_record_file)
FileUtils.rm_f(resume_record_file) if resume_record_file
if HTTP.is_response_ok?(code)
uphost = data["host"]
if version == :v1
entry_uri = bucket + ':' + key
code, data, raw_headers = _mkfile(uphost, uptoken, entry_uri, fsize, checksums, mime_type, custom_meta, customer, callback_params, rotate)
Utils.debug "Mkfile : #{code.inspect} #{data.inspect} #{raw_headers.inspect}"
else
code, data, raw_headers = _complete_parts(key, uptoken, upload_extra, bucket, checksums, mime_type, customer)
end
end
if HTTP.is_response_ok?(code)
Utils.debug "File #{fh.path} {size: #{fsize}} successfully uploaded."
end
return code, data, raw_headers
end # _resumable_upload
def _init_req(host,
encoded_object_name,
bucket,
uptoken)
url = host + '/buckets/' + bucket + '/objects/' + encoded_object_name + '/uploads'
options = {
:headers => {
:content_type => 'application/json',
'Authorization' => 'UpToken ' + uptoken
}
}
_, body, _ = HTTP.api_post(url, '', options)
return body
end # init request
def _upload_part(host,
uptoken,
block,
bucket,
part_number,
upload_id,
encoded_object_name)
options = {
:headers => {
'Authorization' => 'UpToken ' + uptoken,
'Content-Type' => 'application/octet-stream',
'Content-MD5' => Digest::MD5.hexdigest(block)
}
}
url = "#{host}/buckets/#{bucket}/objects/#{encoded_object_name}/uploads/#{upload_id}/#{part_number}"
code, body, raw_headers = HTTP.api_put(url, block, options)
return code, body, raw_headers
end # upload block
def _complete_parts(fname,
uptoken,
upload_extra,
bucket,
etags,
mime_type = nil,
customer = nil)
options = {
:headers => {
'Authorization' => 'UpToken ' + uptoken,
'Content-Type' => 'application/json'
}
}
body = {
'fname' => fname,
'mimeType' => mime_type,
'customVars' => customer,
'parts' => etags
}
json_body = body.to_json
url = "#{upload_extra["host"]}/buckets/#{bucket}/objects/#{upload_extra["encoded_object_name"]}/uploads/#{upload_extra["upload_id"]}"
code, data, raw_headers = HTTP.api_post(url, json_body, options)
return code, data, raw_headers
end #coplete upload
def _encode_object_name(key)
return '~' if key.nil?
Utils.urlsafe_base64_encode(key)
end
end # self class
end # module Storage
end # module Qiniu
|
qiniu/ruby-sdk
|
lib/qiniu/host_manager.rb
|
require 'thread'
require 'cgi'
module Qiniu
# @deprecated
class BucketIsMissing < RuntimeError; end
class HostManager
def initialize(config)
@config = config
@mutex = Mutex.new
@hosts = {}
end
def up_host(bucket, opts = {})
if !multi_region_support?
"#{extract_protocol(opts)}://up.qiniup.com"
elsif bucket
host = hosts(bucket)
"#{extract_protocol(opts)}://" + host.dig('up', 'acc', 'main', 0) rescue "#{extract_protocol(opts)}://" + host.dig('up', 'src', 'main', 0)
end
end
def fetch_host(bucket, opts = {})
if !multi_region_support?
"#{extract_protocol(opts)}://iovip.qbox.me"
elsif bucket
host = hosts(bucket)
"#{extract_protocol(opts)}://" + host.dig('io', 'acc', 'main', 0) rescue "#{extract_protocol(opts)}://" + host.dig('io', 'src', 'main', 0)
end
end
def up_hosts(bucket, opts = {})
if multi_region_support?
host = hosts(bucket)['up']
multi_region_hosts = []
multi_region_hosts |= host.dig('acc', 'main') || []
multi_region_hosts |= host.dig('src', 'main') || []
return multi_region_hosts
else
raise 'HostManager#up_hosts: multi_region must be enabled'
end
end
def global(bucket, opts = {})
if multi_region_support?
!!hosts(bucket)['global']
else
raise 'HostManager#global: multi_region must be enabled'
end
end
private
def extract_protocol(opts)
(opts[:protocol] || @config[:protocol]).to_s
end
def multi_region_support?
@config[:multi_region]
end
def hosts(bucket)
host = read_host(bucket)
if host
if host_expired?(host)
delete_host(bucket)
else
return host
end
end
url = @config[:uc_host] + '/v2/query?' + HTTP.generate_query_string(ak: @config[:access_key], bucket: bucket)
status, body = HTTP.api_get(url)
if HTTP.is_response_ok?(status)
Utils.debug("Query #{bucket} hosts Success: #{body}")
host = body.merge(:time => Time.now)
write_host(bucket, host)
host
else
Utils.debug("Query #{bucket} hosts Error: #{body}")
raise "Host query is failed"
end
end
def host_expired?(host)
host[:time] + host['ttl'] < Time.now
end
def read_host(bucket)
@mutex.synchronize do
@hosts[bucket]
end
end
def write_host(bucket, host)
@mutex.synchronize do
@hosts[bucket] = host
end
end
def delete_host(bucket)
@mutex.synchronize do
@hosts.delete(bucket)
end
end
end
end
|
qiniu/ruby-sdk
|
lib/qiniu/upload.rb
|
<filename>lib/qiniu/upload.rb<gh_stars>100-1000
# -*- encoding: utf-8 -*-
# vim: sw=2 ts=2
require 'stringio'
module Qiniu
module Storage
class << self
include Utils
def upload_with_token(uptoken,
local_file,
bucket,
key = nil,
mime_type = nil,
custom_meta = nil,
callback_params = nil,
enable_crc32_check = false,
rotate = nil)
action_params = _generate_action_params(
local_file,
bucket,
key,
mime_type,
custom_meta,
enable_crc32_check,
rotate
)
if callback_params.nil?
callback_params = {:bucket => bucket, :key => key, :mime_type => mime_type}
end
callback_query_string = HTTP.generate_query_string(callback_params)
File.open(local_file, 'rb') do |file|
url = Config.up_host(bucket) + '/upload'
post_data = {
:params => callback_query_string,
:action => action_params,
:file => file,
:multipart => true
}
if !uptoken.nil? then
post_data[:auth] = uptoken unless uptoken.nil?
end
return HTTP.api_post(url, post_data)
end
end # upload_with_token
def upload_with_token_2(uptoken,
local_file,
key = nil,
x_vars = nil,
opts = {})
### 构造URL
_, _, _, bucket = Auth.decode_uptoken(uptoken)
url = Config.up_host(bucket)
url[/\/*$/] = ''
url += '/'
### 构造HTTP Body
File.open(local_file, 'rb') do |file|
if opts[:content_type]
file.define_singleton_method("content_type") do
opts[:content_type]
end
end
post_data = {
:file => file,
:multipart => true,
:crc32 => Utils::crc32checksum(local_file),
}
post_data[:token] = uptoken if uptoken
post_data[:key] = key if key
if x_vars.is_a?(Hash)
post_data.merge!(x_vars)
end
### 发送请求
HTTP.api_post(url, post_data)
end
end # upload_with_token_2
def upload_buffer_with_token(uptoken,
buf,
key = nil,
x_vars = nil,
opts = {})
### 构造 URL
_, _, _, bucket = Auth.decode_uptoken(uptoken)
url = Config.up_host(bucket)
url[/\/*$/] = ''
url += '/'
### 构造 HTTP Body
if buf.is_a?(String)
data = StringIO.new(buf)
elsif buf.respond_to?(:read)
data = buf
end
data.define_singleton_method("path") do
'NO-PATH'
end
data.define_singleton_method("original_filename") do
'A-MASS-OF-DATA'
end
data.define_singleton_method("content_type") do
opts[:content_type] || 'application/octet-stream'
end
post_data = {
:file => data,
:multipart => true,
}
post_data[:token] = uptoken if uptoken
post_data[:key] = key if key
if x_vars.is_a?(Hash)
post_data.merge!(x_vars)
end
### 发送请求
HTTP.api_post(url, post_data)
end # upload_with_token_2
### 授权举例
# put_policy.bucket | put_policy.key | key | 语义 | 授权
# :---------------- | :------------- | :------ | :--- | :---
# trivial_bucket | <nil> | <nil> | 新增 | 允许,最终key为1)使用put_policy.save_key生成的值或2)资源内容的Hash值
# trivial_bucket | <nil> | foo.txt | 新增 | 允许
# trivial_bucket | <nil> | bar.jpg | 新增 | 允许
# trivial_bucket | foo.txt | <nil> | 覆盖 | 允许,由SDK将put_policy.key赋值给key实现
# trivial_bucket | foo.txt | foo.txt | 覆盖 | 允许
# trivial_bucket | foo.txt | bar.jpg | 覆盖 | 禁止,put_policy.key与key不一致
def upload_with_put_policy(put_policy,
local_file,
key = nil,
x_vars = nil,
opts = {})
uptoken = Auth.generate_uptoken(put_policy)
if key.nil? then
key = put_policy.key
end
return upload_with_token_2(uptoken, local_file, key, x_vars, opts)
end # upload_with_put_policy
def upload_buffer_with_put_policy(put_policy,
buf,
key = nil,
x_vars = nil,
opts = {})
uptoken = Auth.generate_uptoken(put_policy)
if key.nil? then
key = put_policy.key
end
return upload_buffer_with_token(uptoken, buf, key, x_vars, opts)
end # upload_buffer_with_put_policy
private
def _generate_action_params(local_file,
bucket,
key = nil,
mime_type = nil,
custom_meta = nil,
enable_crc32_check = false,
rotate = nil)
raise NoSuchFileError, local_file unless File.exist?(local_file)
if key.nil?
key = Digest::SHA1.hexdigest(local_file + Time.now.to_s)
end
entry_uri = bucket + ':' + key
if mime_type.nil? || mime_type.empty?
mime = MIME::Types.type_for local_file
mime_type = mime.empty? ? 'application/octet-stream' : mime[0].content_type
end
action_params = '/rs-put/' + Utils.urlsafe_base64_encode(entry_uri) + '/mimeType/' + Utils.urlsafe_base64_encode(mime_type)
action_params += '/meta/' + Utils.urlsafe_base64_encode(custom_meta) unless custom_meta.nil?
action_params += '/crc32/' + Utils.crc32checksum(local_file).to_s if enable_crc32_check
action_params += '/rotate/' + rotate if !rotate.nil? && rotate.to_i >= 0
action_params
end # _generate_action_params
end # class << self
end # module Storage
end # module Qiniu
|
qiniu/ruby-sdk
|
spec/qiniu/pfop_spec.rb
|
<reponame>qiniu/ruby-sdk
# -*- encoding: utf-8 -*-
# vim: sw=2 ts=2
require 'spec_helper'
require 'qiniu/auth'
require 'qiniu'
require 'qiniu/fop'
module Qiniu
module Fop
module Persistance
describe Persistance do
before :all do
@bucket = 'rubysdk'
pic_fname = "image_logo_for_test.png"
@key = make_unique_key_in_bucket(pic_fname)
local_file = File.expand_path('../' + pic_fname, __FILE__)
### 检查测试文件存在性
code, body, headers = Qiniu::Storage.stat(@bucket, @key)
if code == 404 || code == 612 then
# 文件不存在,尝试上传
pp = Qiniu::Auth::PutPolicy.new(@bucket, @key)
code, body, headers = Qiniu::Storage.upload_with_put_policy(
pp,
local_file,
nil,
nil,
bucket: @bucket
)
puts "Put a test file for Persistance cases"
puts code.inspect
puts body.inspect
puts headers.inspect
end
end
context ".pfop" do
it "should works" do
pp = Persistance::PfopPolicy.new(
@bucket,
@key,
'imageView2/1/w/80/h/80', # fops
'www.baidu.com' # notify_url
)
code, data, headers = Qiniu::Fop::Persistance.pfop(pp)
code.should == 200
puts data.inspect
end
end
context ".prefop" do
it "should works" do
code, data, headers = Qiniu::Fop::Persistance.prefop('fakePersistentId')
code.should == 404
puts code.inspect
puts data.inspect
puts headers.inspect
end
end
context ".p1" do
it "should works" do
url = 'http://fake.qiniudn.com/fake.jpg'
fop = 'imageView2/1/w/80/h/80'
target_url = "#{url}?p/1/#{CGI.escape(fop).gsub('+', '%20')}"
p1_url = Qiniu::Fop::Persistance.generate_p1_url(url, fop)
p1_url.should == target_url
puts p1_url.inspect
end
end
end
end # module Persistance
end # module Fop
end # module Qiniu
|
qiniu/ruby-sdk
|
spec/qiniu/auth_spec.rb
|
# -*- encoding: utf-8 -*-
# vim: sw=2 ts=2
require 'spec_helper'
require 'qiniu/auth'
require 'qiniu/config'
require 'qiniu/storage'
require 'digest/sha1'
module Qiniu
module Auth
describe Auth do
before :all do
@bucket = 'rubysdk'
end
after :all do
end
### 测试私有资源下载
context ".download_private_file" do
it "should works" do
### 生成Key
key = 'a_private_file'
key = make_unique_key_in_bucket(key)
puts "key=#{key}"
### 上传测试文件
pp = Auth::PutPolicy.new(@bucket, key)
code, data, raw_headers = Qiniu::Storage.upload_with_put_policy(
pp,
__FILE__,
nil,
nil,
bucket: @bucket
)
code.should == 200
puts data.inspect
puts raw_headers.inspect
### 获取下载地址
code, domains, = Qiniu::Storage.domains(@bucket)
code.should == 200
domains.should_not be_empty
domain = domains.first['domain']
url = "http://#{domain}/#{key}"
### 授权下载地址(不带参数)
download_url = Qiniu::Auth.authorize_download_url(url)
puts "download_url=#{download_url}"
result = RestClient.get(download_url)
result.code.should == 200
result.body.should_not be_empty
### 授权下载地址(带参数)
download_url = Qiniu::Auth.authorize_download_url(url, fop: 'qhash/md5')
puts "download_url=#{download_url}"
result = RestClient.get(download_url)
result.code.should == 200
result.body.should_not be_empty
### 删除文件
code, data = Qiniu::Storage.delete(@bucket, key)
puts data.inspect
code.should == 200
end
it "should generate uphosts and global for multi_region" do
origin_multi_region = Config.settings[:multi_region]
begin
Config.settings[:multi_region] = true
### 生成Key
key = 'a_private_file'
key = make_unique_key_in_bucket(key)
puts "key=#{key}"
### 生成 PutPolicy
pp = Auth::PutPolicy.new(@bucket, key)
expect(pp.instance_variable_get(:@uphosts)).to eq ["upload.qiniup.com", "up.qiniup.com"]
expect(pp.instance_variable_get(:@global)).to be false
ensure
Config.settings[:multi_region] = origin_multi_region
end
end
end
end
### 测试回调签名
context ".authenticate_callback_request" do
it "should works" do
url = '/test.php'
body = 'name=xxx&size=1234'
false.should == Qiniu::Auth.authenticate_callback_request('ABCD', url, body)
false.should == Qiniu::Auth.authenticate_callback_request(Config.settings[:access_key], url, body)
false.should == Qiniu::Auth.authenticate_callback_request('QBox ' + Config.settings[:access_key] + ':', url, body)
false.should == Qiniu::Auth.authenticate_callback_request('QBox ' + Config.settings[:access_key] + ':????', url, body)
acctoken = Qiniu::Auth.generate_acctoken(url, body)
auth_str = 'QBox ' + acctoken
false.should == Qiniu::Auth.authenticate_callback_request(auth_str + ' ', url, body)
true.should == Qiniu::Auth.authenticate_callback_request(auth_str, url, body)
true.should == Qiniu::Auth.authenticate_callback_request(acctoken, url, body)
end
end
end # module Auth
module Exception_Auth
describe Exception_Auth, :not_set_ak_sk => true do
### 测试未设置 ak/sk 的异常抛出情况
context ".not_set_ak_sk" do
it "should works" do
puts Qiniu::Config.instance_variable_get("@settings").inspect
begin
uptoken = Qiniu::Auth.generate_uptoken({})
rescue => e
e.message.should == "Please set Qiniu's access_key and secret_key before authorize any tokens."
else
fail "Not raise any exception."
end
begin
download_url = Qiniu::Auth.authorize_download_url("http://test.qiniudn.com/a_private_file")
rescue => e
e.message.should == "Please set Qiniu's access_key and secret_key before authorize any tokens."
else
fail "Not raise any exception."
end
begin
acctoken = Qiniu::Auth.generate_acctoken("http://rsf.qbox.me/list")
rescue => e
e.message.should == "Please set Qiniu's access_key and secret_key before authorize any tokens."
else
fail "Not raise any exception."
end
end
end
end
end # module Exception_Auth
end # module Qiniu
|
sous-chefs/nano
|
test/fixtures/cookbooks/test/recipes/default.rb
|
nano_install 'nano' do
end
colour = {}
options = {}
colour['yellow'] = '\\<(BEGIN|END|alias|and|begin|break|case|class|def|defined\\?|do|else|elsif|end|ensure|false|for|if|in|module|next|nil|not|or|redo|rescue|retry|return|self|super|then|true|undef|unless|until|when|while|yield)\\>'
options['header'] = '"^#!.*/(env +)?ruby( |$)"'
nano_config 'ruby' do
filename_regex '"\\.rb$" "Gemfile" "config.ru" "Rakefile" "Capfile" "Vagrantfile"'
colour colour
options options
end
|
sous-chefs/nano
|
resources/config.rb
|
unified_mode true
property :syntax_name, String, name_property: true
property :filename_regex, String, required: true
property :colour, Hash, default: {}
property :options, Hash, default: {}
property :config_directory, String, default: '/etc/nanorc.d'
property :config_file, String, default: '/etc/nanorc'
property :cookbook, String, default: 'nano'
property :source, String, default: 'nanorc_conf.erb'
action :create do
file_name = ::File.join(new_resource.config_directory, "#{new_resource.syntax_name}.nanorc")
directory new_resource.config_directory do
end
with_run_context :root do
edit_resource(:template, new_resource.config_file) do |new_resource|
node.run_state['nano'] ||= { 'conf_template_source' => {}, 'conf_cookbook' => {} }
source 'nanorc.erb'
cookbook lazy { node.run_state['nano']['conf_cookbook'][new_resource.cookbook] ||= 'nano' }
variables['global'] ||= {}
variables['global']['nano'] ||= {}
variables['global']['nano']['rcfiles'] ||= {}
variables['global']['nano']['rcfiles'][new_resource.syntax_name] ||= file_name
action :nothing
delayed_action :create
end
end
template file_name do
source new_resource.source
cookbook new_resource.cookbook
variables(
syntax_name: new_resource.syntax_name,
filename_regex: new_resource.filename_regex,
colour: new_resource.colour,
options: new_resource.options
)
end
end
action :delete do
file file_name do
action :delete
end
end
|
sous-chefs/nano
|
test/integration/default/default_spec.rb
|
describe file('/etc/nanorc') do
it { should be_a_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
its('content') { should match(%r{include /etc/nanorc.d/ruby.nanorc}) }
end
describe file('/etc/nanorc.d') do
it { should be_a_directory }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
end
describe file('/etc/nanorc.d/ruby.nanorc') do
it { should be_a_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
its('content') { should match(/syntax \"ruby\" \"\\\.rb\$\" \"Gemfile\" \"config.ru\" \"Rakefile\" \"Capfile\" \"Vagrantfile\"/) }
its('content') { should match(%r{header \"\^\#\!\.\*\/\(env \+\)\?ruby\( \|\$\)\"}) }
its('content') { should match(/color yellow \"\\\<\(BEGIN\|END\|/) }
end
|
sous-chefs/nano
|
resources/install.rb
|
unified_mode true
property :package_name, String, name_property: true
action :install do
package new_resource.package_name do
action :install
end
end
action :remove do
package new_resource.package_name do
action :remove
end
end
|
sous-chefs/nano
|
metadata.rb
|
<filename>metadata.rb
name 'nano'
maintainer '<NAME>'
maintainer_email '<EMAIL>'
license 'Apache-2.0'
description 'Installs and configures nano'
version '3.0.0'
source_url 'https://github.com/sous-chefs/nano'
issues_url 'https://github.com/sous-chefs/nano/issues'
chef_version '>= 15.3'
%w(debian ubuntu arch redhat centos fedora scientific oracle amazon suse opensuseleap).each do |os|
supports os
end
|
nWoKcir33/programming-univbasics-3-labs-with-tdd-online-web-prework
|
calculator.rb
|
# Add your variables here
first_number = 1
second_number = 2
sum = 3
difference = -1
product = 2
quotient = 0.5
|
everydayhero/money
|
lib/money/version.rb
|
<reponame>everydayhero/money
class Money
VERSION = "6.1.0.beta1"
end
|
lisk-builders/lisk.rb
|
lib/lisk/api.rb
|
require "time"
# The Lisk API Ruby wrapper gem.
module Lisk
# Helper functions to wrap raw legacy APIs into meaningul methods.
class API < Legacy
# Returns true if chain is syncing.
def is_syncing?
synced = self.loader_status_sync
if synced["success"]
return synced["syncing"]
else
return nil
end
end
# Returns true if chain is loaded.
def is_chain_loaded?
loaded = self.loader_status
if loaded["success"]
return loaded["loaded"]
else
return nil
end
end
# Get the Lisk node version string.
def get_version
version = self.peers_version
if version["success"]
return version["version"]
else
return nil
end
end
# Get the Lisk node version build date.
def get_version_build
version = self.peers_version
if version["success"]
return version["build"]
else
return nil
end
end
# Get the Lisk node version commit.
def get_version_commit
version = self.peers_version
if version["success"]
return version["commit"]
else
return nil
end
end
# Get the height of the local best known block.
def get_best_block
synced = self.loader_status_sync
if synced["success"]
return synced["height"]
else
return nil
end
end
# Get the number of remaining local sync blocks.
def get_remaining_blocks
synced = self.loader_status_sync
if synced["success"]
return synced["blocks"]
else
return nil
end
end
# Get the number of remaining local sync blocks.
def get_block id
block = self.blocks_get_by_id id
if block["success"]
return block["block"]
else
return nil
end
end
# Get block ping, returns true if block received in 120 seconds.
def get_block_health
healthy = self.loader_status_ping
return healthy["success"]
end
# Get the global best block in the network.
def get_chain_best_block
blocks = self.blocks_get_height
if blocks["success"]
return blocks["height"]
else
return nil
end
end
# Get the broad hash.
def get_broadhash
blocks = self.blocks_get_status
if blocks["success"]
return blocks["broadhash"]
else
return nil
end
end
# Get the net hash.
def get_nethash
blocks = self.blocks_get_nethash
if blocks["success"]
return blocks["nethash"]
else
return nil
end
end
# Get the current epoch date.
def get_epoch
blocks = self.blocks_get_status
if blocks["success"]
epoch = Time.parse blocks["epoch"]
return epoch
else
return nil
end
end
# Get the current milestone.
def get_milestone
blocks = self.blocks_get_milestone
if blocks["success"]
return blocks["milestone"]
else
return nil
end
end
# Get the current block reward.
def get_block_reward
blocks = self.blocks_get_reward
if blocks["success"]
return blocks["reward"]
else
return nil
end
end
# Get the available supply.
def get_available_supply
blocks = self.blocks_get_supply
if blocks["success"]
return blocks["supply"]
else
return nil
end
end
# Get fees for sending transactions
def get_fee_send
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["send"]
else
return nil
end
end
# Get fees for voting
def get_fee_vote
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["vote"]
else
return nil
end
end
# Get fees for second signatures
def get_fee_secondsignature
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["secondsignature"]
else
return nil
end
end
# Get fees for delegate registration
def get_fee_delegate
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["delegate"]
else
return nil
end
end
# Get fees for multisignature registration
def get_fee_multisignature
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["multisignature"]
else
return nil
end
end
# Get fees for dapps registration
def get_fee_dapp
fee = self.blocks_get_fees
if fee["success"]
return fee["fees"]["dapp"]
else
return nil
end
end
# Get an array of all known peers.
def get_peers
peers = self.peers
end
# Get the number of all known peers.
def get_peer_count
peers = self.get_peers
if not peers.nil?
count = peers.count
else
count = 0
end
end
# Get an array of all connected peers.
def get_connected_peers
filter_by_state = { :state => 2 }
connected = self.peers filter_by_state
end
# Get an array of all disconnected peers.
def get_disconnected_peers
filter_by_state = { :state => 1 }
disconnected = self.peers filter_by_state
end
# Get an array of all banned peers.
def get_banned_peers
filter_by_state = { :state => 0 }
banned = self.peers filter_by_state
end
# Handles unimplemented methods
def method_missing name, *args, &block
todo "#{self}::#{name} METHOD MISSING"
end
end
end
|
lisk-builders/lisk.rb
|
examples/legacy_api.rb
|
#!/usr/bin/env ruby
require 'lisk'
# Try to connect a local Lisk client on test network.
# Warning: Think twice and test thoroughly before enabling this on main network!
client = Lisk::Client.new "127.0.0.1", 7000
legacy_api = Lisk::Legacy.new client
# Test data
_user = "4fryn_lorem_ipsum_42"
_secret = "lorem ipsum dolor sit amet et semper aperiam est duo modus zril"
_second_secret = "amet aperiam dolor duo est et ipsum lorem modus semper sit zril"
_address = "6824694156543443160L"
_public_key = "<KEY>"
_votes = {
:secret => _secret,
:public_key => _public_key,
# :secondSecret => _second_secret,
:delegates => [
"+473c354cdf627b82e9113e02a337486dd3afc5615eb71ffd311c5a0beda37b8c",
"+eaa049295d96618c51eb30deffe7fc2cc8bfc13190cb97f3b513dd060b000a46",
"+961d1a1057a09f865291873e9ba3d0af7b2a3a1e971bb7576a2aab1c526acbcd",
"+71e1e34dd0529d920ee6c38497b028352c57b7130d55737c8a778ff3974ec29f",
<KEY>",
<KEY>",
<KEY>",
<KEY>",
"+e683da7b4fe46164b9db3fd599481ad0630d2d892546c1ac63e59a5acb903140",
<KEY>",
"+0911107983da4b581a109b5fac9579d89e29f06f10d803370f88a41100c3374e",
"+6beaa7c569c1000f4fcef4ce3133b18609aea52adf95d5992970ea5e0cedda87",
"+19bdab59b24f7ef2a9d0b1b0942cff450875302e0c59c437a372eb6bb27a0b43",
"+279320364fc3edd39b77f1fa29594d442e39220b165956fa729f741150b0dc4d",
"+f4871371ff27f467e71087dd6bb38c975c5a49bdff02de6b5ca5e43bbf5b3c3b",
"+5fb6b29846e95c88d464e003c835ca4f88260eaa57cd8deded8fc8038dfcbc60",
"+8a1a3df89bf87e6c7bd29e06aaf7e3d7f1eef45f2058413a70bed0f4e3cb37f8",
"+2dc40508f548b405fa2a64a24e91c9b6ea80ccf28f4cd80686627e55a91efc4b",
"+6089206bdd49e8e6c824b4896f5b3c2d71207c30c6bf056d430ba0d8838e7c51",
"+e8720600afd888455fe9eea4c859d08efd8122f4f732bba94504cfefc318de55",
"+d4ce34592854e06370a79ee95e4bdf8eeb9d0d37dd0c802d9ad2357fd4cb9ec7"
]
}
_tx_filter = {
:blockId => "39391848391772781",
:senderId => "14815133512790761431L",
:recipientId => "10020978176543317477L",
:limit => 1, :offset => 0,
:orderBy => "blockId"
}
_raw_tx = {
:secret => _secret,
:publicKey => _public_key,
# :secondSecret => _second_secret,
:recipientId => "15709494141295217973L",
:amount => 1e7
}
_tx_id = "17278680718005275020"
_peer_filter = {
:state => 2,
:os => "linux3.10.0-042stab123.6",
:version => "0.9.10b",
:limit => 10,
:offset => 0,
:orderBy => "version"
}
_ip_filter = {
:ip => "172.16.17.32",
:port => 7000
}
_block_filter = {
:generatorPublicKey => "5ad8b74e39ad7502c8eeea080c8627b3aa9bb28651b988ef38112f21367b132d",
:height => 3481431,
:previousBlock => "8589373032001092432",
:totalAmount => 0,
:totalFee => 0,
:limit => 1,
:offset => 0,
:orderBy => "height"
}
_block_id = "8589373032001092432"
_secrets = {
:secret => _secret,
:secondSecret => _second_secret,
:publicKey => _public_key
}
_ms_secrets = {
:secret => _secret,
# :secondSecret => _second_secret,
:lifetime => 72,
:min => 2,
:keysgroup => [
<KEY>",
"+e8720600afd888455fe9eea4c859d08efd8122f4f732bba94504cfefc318de55",
"+d4ce34592854e06370a79ee95e4bdf8eeb9d0d37dd0c802d9ad2357fd4cb9ec7"
]
}
_ms_signature = {
:secret => _secret,
:publicKey => _public_key,
:transactionId => _tx_id
}
_delegate = {
:secret => _secret,
# :secondSecret => _second_secret,
:username => _user
}
_delegate_filter = {
:limit => 3,
:offset => 0,
:orderBy => "username"
}
_delegate_query = {
:q => "4fryn",
:orderBy => "producedblocks:desc"
}
# Testing legacy API against https://github.com/lisk-builders/lisk.rb/issues/4
account = legacy_api.accounts_open _secret
p account
balance = legacy_api.accounts_get_balance _address
p balance
public_key = legacy_api.accounts_get_public_key _address
p public_key
public_key = legacy_api.accounts_generate_public_key _secret
p public_key
account = legacy_api.accounts _address
p account
delegate = legacy_api.accounts_delegates_get_by_address _address
p delegate
votes = legacy_api.accounts_delegates_put _votes
p votes
syncing = legacy_api.loader_status_sync
p syncing
status = legacy_api.loader_status
p status
ping = legacy_api.loader_status_ping
p ping
transactions = legacy_api.transactions
p transactions
transactions = legacy_api.transactions _tx_filter
p transactions
transaction = legacy_api.transactions_put _raw_tx
p transaction
transaction = legacy_api.transactions_get_by_id _tx_id
p transaction
transaction = legacy_api.transactions_unconfirmed_get_by_id _tx_id
p transaction
transaction = legacy_api.transactions_unconfirmed
p transaction
transaction = legacy_api.transactions_queued
p transaction
transaction = legacy_api.transactions_queued_get_by_id _tx_id
p transaction
peers = legacy_api.peers
p peers
peers = legacy_api.peers _peer_filter
p peers
peer = legacy_api.peers_get _ip_filter
p peer
version = legacy_api.peers_version
p version
blocks = legacy_api.blocks
p blocks
blocks = legacy_api.blocks _block_filter
p blocks
block = legacy_api.blocks_get_by_id _block_id
p block
fee = legacy_api.blocks_get_fee
p fee
fees = legacy_api.blocks_get_fees
p fees
reward = legacy_api.blocks_get_reward
p reward
supply = legacy_api.blocks_get_supply
p supply
height = legacy_api.blocks_get_height
p height
status = legacy_api.blocks_get_status
p status
nethash = legacy_api.blocks_get_nethash
p nethash
milestone = legacy_api.blocks_get_milestone
p milestone
fee = legacy_api.signatures_fee
p fee
transaction = legacy_api.signatures_put _secrets
p transaction
delegate = legacy_api.delegates_put _delegate
p delegate
delegates = legacy_api.delegates
p delegates
delegates = legacy_api.delegates _delegate_filter
p delegates
delegate = legacy_api.delegates_get_by_key _public_key
p delegate
delegate = legacy_api.delegates_get_by_name _user
p delegate
delegates = legacy_api.delegates_search _delegate_query
p delegates
count = legacy_api.delegates_count
p count
account = legacy_api.accounts_delegates _address
p account
voters = legacy_api.delegates_voters _public_key
p voters
address = legacy_api.delegates_forging_enable _secret
p address
address = legacy_api.delegates_forging_disable _secret
p address
forged = legacy_api.delegates_forging_get_forged_by_account _public_key
p forged
forgers = legacy_api.delegates_get_next_forgers
p forgers
forgers = legacy_api.delegates_get_next_forgers 3
p forgers
#legacy_api.dapps_put ### UNIMPLEMENTED (#4)
#legacy_api.dapps filter ### UNIMPLEMENTED (#4)
#legacy_api.dapps_get_by_id id ### UNIMPLEMENTED (#4)
#legacy_api.dapps_search query ### UNIMPLEMENTED (#4)
#legacy_api.dapps_install ### UNIMPLEMENTED (#4)
#legacy_api.dapps_installed ### UNIMPLEMENTED (#4)
#legacy_api.dapps_installed_ids ### UNIMPLEMENTED (#4)
#legacy_api.dapps_uninstall ### UNIMPLEMENTED (#4)
#legacy_api.dapps_launch ### UNIMPLEMENTED (#4)
#legacy_api.dapps_installing ### UNIMPLEMENTED (#4)
#legacy_api.dapps_uninstalling ### UNIMPLEMENTED (#4)
#legacy_api.dapps_launched ### UNIMPLEMENTED (#4)
#legacy_api.dapps_categories ### UNIMPLEMENTED (#4)
#legacy_api.dapps_stop ### UNIMPLEMENTED (#4)
transaction = legacy_api.multisignatures_put _ms_secrets
p transaction
accounts = legacy_api.multisignatures_accounts _public_key
p accounts
transaction = legacy_api.multisignatures_sign _ms_signature
p transaction
transactions = legacy_api.multisignatures_pending _public_key
p transactions
|
lisk-builders/lisk.rb
|
examples/status.rb
|
#!/usr/bin/env ruby
require 'lisk'
# Try to connect a local Lisk client.
node = Lisk::Client.new
# Configure host and port of the Lisk client.
node = node.configure "127.0.0.1", 7000
# Same as above, just in one line, let's stick to test network for now.
node = Lisk::Client.new "127.0.0.1", 7000
# Lisk tools wraps the raw API in meaningful methods.
api = Lisk::API.new node
# Only proceed if the client is connected.
if node.is_alive? or api.is_syncing?
# Lisk version API example.
version = api.get_version
commit = api.get_version_commit
build = api.get_version_build
p "Lisk node version #{version} commit #{commit} build #{build}..."
# Lisk node status API example.
connected = node.is_alive?
loaded = api.is_chain_loaded?
p "Lisk node is connected: #{connected}... Blockchain loaded: #{loaded}..."
# Lisk node syncing API example.
synced = api.is_syncing?
blocks = api.get_remaining_blocks
height = api.get_best_block
p "Lisk node is syncing: #{synced}... #{blocks} remaining blocks to latest block #{height}..."
# Lisk node peers API example.
cond = api.get_connected_peers.count
disd = api.get_disconnected_peers.count
band = api.get_banned_peers.count
all = api.get_peer_count
p "Lisk node saw #{all} peers... #{cond} connected, #{disd} disconnected, #{band} banned..."
# Lisk blockchain API example.
chain_height = api.get_chain_best_block
block_reward = api.get_block_reward
total_supply = api.get_available_supply
p "Lisk chain latest block: #{chain_height}... total supply: #{total_supply / 1e8}... block reward: #{block_reward / 1e8}"
else
p 'Lisk node disconnected ...'
end
|
lisk-builders/lisk.rb
|
lib/lisk/legacy.rb
|
# The Lisk API Ruby wrapper gem.
module Lisk
# Implements raw legacy APIs of the Lisk Core pre-1.0.0 node.
class Legacy
# A "lisk/client" connecting to a Lisk Core API node.
attr_accessor :client
# Initializing the legacy API with a legacy Lisk Core API client.
def initialize client
if not client.nil?
@client = client
return self
else
return nil
end
end
# Request information about an account.
# `POST /accounts/open`
def accounts_open secret
params = { :secret => secret }
account = @client.query_post "accounts/open", params
if account["success"]
return account["account"]
else
return nil
end
end
# Request the balance of an account.
# `GET /accounts/getBalance?address=address`
def accounts_get_balance address
params = { :address => address }
balance = @client.query_get "accounts/getBalance", params
end
# Get the public key of an account. If the account does not exist the API call will return an error.
# `GET /accounts/getPublicKey?address=address`
def accounts_get_public_key address
params = { :address => address }
public_key = @client.query_get "accounts/getPublicKey", params
end
# Returns the public key of the provided secret key.
# `POST /accounts/generatePublicKey`
def accounts_generate_public_key secret
params = { :secret => secret }
public_key = @client.query_post "accounts/generatePublicKey", params
end
# Returns account information of an address.
# `GET /accounts?address=address`
def accounts address
params = { :address => address }
account = @client.query_get "accounts", params
end
# Returns delegate vote accounts by address.
# `GET /accounts/delegates?address=address`
def accounts_delegates_get_by_address address
params = { :address => address }
delegate = @client.query_get "accounts/delegates", params
end
# Vote for the selected delegates. Maximum of 33 delegates at once.
# `PUT /accounts/delegates`
def accounts_delegates_put votes
delegate = @client.query_put "accounts/delegates", votes
end
# Get the synchronization status of the client.
# `GET /loader/status`
def loader_status_sync
sync = @client.query_get "loader/status/sync"
end
# Returns the sync status of the blockchain.
# `GET /loader/status/sync`
def loader_status
status = @client.query_get "loader/status"
end
# Get the status of last received block.
# Returns true if block was received in the past 120 seconds.
# `GET /loader/status/ping`
def loader_status_ping
ping = @client.query_get "loader/status/ping"
end
# List of transactions matched by provided parameters.
# `GET /transactions?blockId=blockId&senderId=senderId&recipientId=recipientId&limit=limit&offset=offset&orderBy=field`
def transactions filter = nil
transactions = @client.query_get "transactions", filter
if transactions["success"]
return transactions["transactions"]
else
return nil
end
end
# Send transaction to broadcast network.
# `PUT /transactions`
def transactions_put transaction
transaction = @client.query_put "transactions", transaction
end
# Get transaction that matches the provided id.
# `GET /transactions/get?id=id`
def transactions_get_by_id id
params = { :id => id }
transaction = @client.query_get "transactions/get", params
end
# Get unconfirmed transaction that matches the provided id.
# `GET /transactions/unconfirmed/get?id=id`
def transactions_unconfirmed_get_by_id id
params = { :id => id }
transaction = @client.query_get "transactions/unconfirmed/get", params
end
# Gets a list of unconfirmed transactions.
# `GET /transactions/unconfirmed`
def transactions_unconfirmed
transaction = @client.query_get "transactions/unconfirmed"
end
# Gets a list of queued transactions.
# `GET /transactions/queued`
def transactions_queued
transaction = @client.query_get "transactions/queued"
end
# Get queued transaction that matches the provided id.
# `GET /transactions/queued/get?id=id`
def transactions_queued_get_by_id id
params = { :id => id }
transaction = @client.query_get "transactions/queued/get", params
end
# Gets list of peers.
# `GET /peers?state=state&os=os&version=version&limit=limit&offset=offset&orderBy=orderBy`
def peers filter = nil
peers = @client.query_get "peers", filter
if peers["success"]
return peers["peers"]
else
return nil
end
end
# Gets peer by IP address and port.
# `GET /peers/get?ip=ip&port=port`
def peers_get filter = nil
peer = @client.query_get "peers/get", filter
end
# Gets version and build time.
# `GET /peers/version`
def peers_version
version = @client.query_get "peers/version"
end
# Gets all blocks by provided filter(s).
# `GET /blocks?generatorPublicKey=generatorPublicKey&height=height&previousBlock=previousBlock&totalAmount=totalAmount&totalFee=totalFee&limit=limit&offset=offset&orderBy=orderBy`
def blocks filter = nil
blocks = @client.query_get "blocks", filter
if blocks["success"]
return blocks["blocks"]
else
return nil
end
end
# Gets block by provided id.
# `GET /blocks/get?id=id`
def blocks_get_by_id id
params = { :id => id }
block = @client.query_get "blocks/get", params
end
# Get transaction fee for sending "normal" transactions.
# `GET /blocks/getFee`
def blocks_get_fee
fee = @client.query_get "blocks/getFee"
end
# Get transaction fee for all types of transactions.
# `GET /blocks/getFees`
def blocks_get_fees
fees = @client.query_get "blocks/getFees"
end
# Gets the forging reward for blocks.
# `GET /blocks/getReward`
def blocks_get_reward
reward = @client.query_get "blocks/getReward"
end
# Gets the total amount of Lisk in circulation
# `GET /blocks/getSupply`
def blocks_get_supply
supply = @client.query_get "blocks/getSupply"
end
# Gets the blockchain height of the client.
# `GET /blocks/getHeight`
def blocks_get_height
height = @client.query_get "blocks/getHeight"
end
# Gets status of height, fee, milestone, blockreward and supply.
# `GET /blocks/getStatus`
def blocks_get_status
status = @client.query_get "blocks/getStatus"
end
# Gets the nethash of the blockchain on a client.
# `GET /blocks/getNethash`
def blocks_get_nethash
nethash = @client.query_get "blocks/getNethash"
end
# Gets the milestone of the blockchain on a client.
# `GET /blocks/getMilestone`
def blocks_get_milestone
milestone = @client.query_get "blocks/getMilestone"
end
# Gets the second signature status of an account.
# `GET /signatures/fee`
def signatures_fee
fee = @client.query_get "signatures/fee"
end
# Add a second signature to an account.
# `PUT /signatures`
def signatures_put secrets
transaction = @client.query_put "signatures", secrets
end
# Puts request to create a delegate.
# `PUT /delegates`
def delegates_put delegate
delegate = @client.query_put "delegates", delegate
end
# Gets list of delegates by provided filter.
# `GET /delegates?limit=limit&offset=offset&orderBy=orderBy`
def delegates filter = nil
delegates = @client.query_get "delegates", filter
if delegates["success"]
return delegates["delegates"]
else
return nil
end
end
# Gets delegate by public key.
# `GET /delegates/get?publicKey=publicKey`
def delegates_get_by_key public_key
params = { :publicKey => public_key }
delegate = @client.query_get "delegates/get", params
if delegate["success"]
return delegate["delegate"]
else
return nil
end
end
# Gets delegate by username.
# `GET /delegates/get?username=username`
def delegates_get_by_name user_name
params = { :username => user_name }
delegate = @client.query_get "delegates/get", params
if delegate["success"]
return delegate["delegate"]
else
return nil
end
end
# Search for Delegates by "fuzzy" username.
# `GET /delegates/search?q=username&orderBy=producedblocks:desc`
def delegates_search query
delegates = @client.query_get "delegates/search", query
if delegates["success"]
return delegates["delegates"]
else
return nil
end
end
# Get total count of registered delegates.
# `GET /delegates/count`
def delegates_count
count = @client.query_get "delegates/count"
end
# Get votes by account wallet address.
# `GET /accounts/delegates/?address=address`
def accounts_delegates address
params = { :address => address }
account = @client.query_get "accounts/delegates", params
end
# Gets voters of delegate.
# `GET /delegates/voters?publicKey=publicKey`
def delegates_voters public_key
params = { :publicKey => public_key }
voters = @client.query_get "delegates/voters", params
if voters["success"]
return voters["accounts"]
else
return nil
end
end
# Enables forging for a delegate on the client node.
# `POST /delegates/forging/enable`
def delegates_forging_enable secret
params = { :secret => secret }
count = @client.query_post "delegates/forging/enable", params
end
# Disables forging for a delegate on the client node.
# `POST /delegates/forging/disable`
def delegates_forging_disable secret
params = { :secret => secret }
count = @client.query_post "delegates/forging/disable", params
end
# Get amount of Lisk forged by an account.
# `GET /delegates/forging/getForgedByAccount?generatorPublicKey=generatorPublicKey`
def delegates_forging_get_forged_by_account public_key
params = { :generatorPublicKey => public_key }
forged = @client.query_get "delegates/forging/getForgedByAccount", params
end
# Get next delegate lining up to forge.
# `GET /delegates/getNextForgers?limit=limit`
def delegates_get_next_forgers limit = 10
params = { :limit => limit }
forgers = @client.query_get "delegates/getNextForgers", params
end
#####################################################
# https://github.com/lisk-builders/lisk.rb/issues/4 #
#####################################################
# `PUT /dapps`
def dapps_put
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps?category=category&name=name&type=type&link=link&limit=limit&offset=offset&orderBy=orderBy`
def dapps filter
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/get?id=id`
def dapps_get_by_id id
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/search?q=q&category=category&installed=installed`
def dapps_search query
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `POST /dapps/install`
def dapps_install
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/installed`
def dapps_installed
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/installedIds`
def dapps_installed_ids
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `POST /dapps/uninstall`
def dapps_uninstall
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `POST /dapps/launch`
def dapps_launch
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/installing`
def dapps_installing
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/uninstalling`
def dapps_uninstalling
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/launched`
def dapps_launched
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `GET /dapps/categories`
def dapps_categories
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# `POST /dapps/stop`
def dapps_stop
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
#####################################################
# Create a multi-signature account.
# `PUT /multisignatures`
def multisignatures_put secrets
transaction = @client.query_put "multisignatures", secrets
end
# Gets a list of accounts that belong to a multi-signature account.
# `GET /multisignatures/accounts?publicKey=publicKey`
def multisignatures_accounts public_key
params = { :publicKey => public_key }
accounts = @client.query_get "multisignatures/accounts", params
end
# Signs a transaction that is awaiting signature.
# `POST /multisignatures/sign`
def multisignatures_sign signature
transaction = @client.query_post "multisignatures/sign", signature
end
# Returns a list of multi-signature transactions that waiting for signature by publicKey.
# `GET /multisignatures/pending?publicKey=publicKey`
def multisignatures_pending public_key
params = { :publicKey => public_key }
transactions = @client.query_get "multisignatures/pending", params
end
# Handles unimplemented methods
def method_missing name, *args, &block
todo "#{self}::#{name} METHOD MISSING"
end
end
end
|
lisk-builders/lisk.rb
|
lib/lisk/delegate.rb
|
module Lisk
class Delegate
attr_accessor :api
attr_accessor :delegate_name
attr_accessor :address
attr_accessor :public_key
attr_accessor :secret
attr_accessor :secret_secondary
attr_accessor :registered
def initialize api, delegate_name
@api = api
@delegate_name = delegate_name
delegate = @api.delegates_get_by_name delegate_name
if delegate.nil?
@registered = false
else
@registered = true
@address = delegate["address"]
@public_key = delegate["publicKey"]
end
end
def is_registered?
return registered
end
def set_secrets secret, second_secret = nil
@secret = secret
@secret_secondary = second_secret
end
def register
if not self.is_registered?
delegate = {}
delegate[:username] = @delegate_name
if not @secret_secondary.nil?
delegate[:secondSecret] = @secret_secondary
end
if not @secret.nil?
delegate[:secret] = @secret
registration = api.delegates_put delegate
return registration
else
return false
end
else
return false
end
end
end
end
|
lisk-builders/lisk.rb
|
lib/lisk/raw.rb
|
<reponame>lisk-builders/lisk.rb
# The Lisk API Ruby wrapper gem.
module Lisk
# Implements raw APIs of the Lisk Core node.
class Raw
# A "lisk/client" connecting to a Lisk Core API node.
attr_accessor :client
# Initializing the API with a Lisk Core API client.
def initialize client
if not client.nil?
@client = client
return self
else
return nil
end
end
#####################################################
# https://github.com/lisk-builders/lisk.rb/issues/1 #
#####################################################
# The "accounts" API
def accounts
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "blocks" API
def blocks
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "dapps" API
def dapps
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates" API
def delegates
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates/forgers" API
def delegates_forgers
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates/forging" API
def delegates_forging
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "node/constants" API
def node_constants
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "node/status" API
def node_status
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "peers" API
def peers
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "signatures" API
def signatures
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions" API
def transactions
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unsigned" API
def transactions_unsigned
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unconfirmed" API
def transactions_unconfirmed
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unprocessed" API
def transactions_unprocessed
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "votes" API
def votes
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "voters" API
def voters
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "accounts" API
def accounts
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "blocks" API
def blocks
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "dapps" API
def dapps
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates" API
def delegates
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates/forgers" API
def delegates_forgers
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "delegates/forging" API
def delegates_forging
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "node/constants" API
def node_constants
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "node/status" API
def node_status
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "peers" API
def peers
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "signatures" API
def signatures
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions" API
def transactions
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unsigned" API
def transactions_unsigned
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unconfirmed" API
def transactions_unconfirmed
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "transactions/unprocessed" API
def transactions_unprocessed
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "votes" API
def votes
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# The "voters" API
def voters
todo "#{self}::#{__method__} UNIMPLEMENTED"
end
# Handles unimplemented methods
def method_missing name, *args, &block
todo "#{self}::#{name} METHOD MISSING"
end
end
end
|
lisk-builders/lisk.rb
|
examples/delegates.rb
|
<gh_stars>1-10
#!/usr/bin/env ruby
require 'lisk'
node = Lisk::Client.new
api = Lisk::API.new node
delegate = Lisk::Delegate.new api, "4fryn"
p delegate.is_registered?
delegate = Lisk::Delegate.new api, "not_4fryn"
p delegate.is_registered?
delegate.set_secrets "lorem ipsum foo bar", "foo bar lorem ipsum"
p delegate.register
|
lisk-builders/lisk.rb
|
lib/lisk/client.rb
|
require "net/http"
require "uri"
require "json"
# The Lisk API Ruby wrapper gem.
module Lisk
# A simple HTTP client connecting to a Lisk Core API node.
class Client
# Host and port of the API endpoint.
attr_accessor :host, :port, :ssl, :active
# Initializes the Lisk HTTP client and defaults to localhost port 7000.
def initialize host = "127.0.0.1", port = 7000
@host = host
@port = port
@ssl = false
if self.is_alive?
@active = true
return self
else
@active = false
return nil
end
end
# Allows reconfiguring of the Lisk HTTP client's host and port.
def configure host, port
if not host.empty? or not port.empty?
@host = host
@port = port
@ssl = false
if self.is_alive?
@active = true
return self
else
@active = false
return nil
end
else
@active = false
return nil
end
end
# Get the status of last received block.
# Returns true if block was received in the past 120 seconds.
def is_alive?
connected = self.query_get "loader/status/ping"
@active = connected["success"]
end
# Handles GET requests to the given Lisk Core API endpoint
def query_get endpoint, params = nil
if not @ssl
# fixme "#{self}::#{__method__} Allow HTTPS requests"
begin
node = ::Net::HTTP.new @host, @port
uri = URI.parse "http://#{host}:#{port}/api/#{endpoint}"
if not params.nil?
uri.query = URI.encode_www_form params
end
request = ::Net::HTTP::Get.new uri
response = node.request request
@active = true
result = JSON::parse response.body
rescue Timeout::Error => e
@active = false
p "Can't connect to the Lisk node: Timeout!"
rescue Errno::EHOSTUNREACH => e
@active = false
p "Can't connect to the Lisk node: Host Unreachable!"
rescue Errno::ECONNREFUSED => e
@active = false
p "Can't connect to the Lisk node: Connection Refused!"
end
end
end
# Handles POST requests to the given Lisk Core API endpoint
def query_post endpoint, params
if not @ssl
# fixme "#{self}::#{__method__} Allow HTTPS requests"
begin
node = ::Net::HTTP.new @host, @port
header = {'Content-Type': 'application/json'}
uri = URI.parse "http://#{host}:#{port}/api/#{endpoint}"
request = ::Net::HTTP::Post.new uri, header
request.body = params.to_json
response = node.request request
@active = true
result = JSON::parse response.body
rescue Timeout::Error => e
@active = false
p "Can't connect to the Lisk node: Timeout!"
rescue Errno::EHOSTUNREACH => e
@active = false
p "Can't connect to the Lisk node: Host Unreachable!"
rescue Errno::ECONNREFUSED => e
@active = false
p "Can't connect to the Lisk node: Connection Refused!"
end
end
end
# Handles PUT requests to the given Lisk Core API endpoint
def query_put endpoint, params
if not @ssl
# fixme "#{self}::#{__method__} Allow HTTPS requests"
begin
node = ::Net::HTTP.new @host, @port
header = {'Content-Type': 'application/json'}
uri = URI.parse "http://#{host}:#{port}/api/#{endpoint}"
uri.query = URI.encode_www_form params
request = ::Net::HTTP::Put.new uri, header
request.body = params.to_json
response = node.request request
@active = true
result = JSON::parse response.body
rescue Timeout::Error => e
@active = false
p "Can't connect to the Lisk node: Timeout!"
rescue Errno::EHOSTUNREACH => e
@active = false
p "Can't connect to the Lisk node: Host Unreachable!"
rescue Errno::ECONNREFUSED => e
@active = false
p "Can't connect to the Lisk node: Connection Refused!"
end
end
end
# Handles unimplemented methods
def method_missing name, *args, &block
todo "#{self}::#{name} METHOD MISSING"
end
end
end
|
lisk-builders/lisk.rb
|
examples/lsk-shorty.rb
|
#!/bin/env ruby
require "lisk"
require "bip_mnemonic"
require "securerandom"
@counter = 0
@node = Lisk::Client.new "127.0.0.1", 7000
@lisk = Lisk::API.new @node
@start = Time.now.to_i.to_f
def runner id
target = 18
while true
@counter += 1
entropy = SecureRandom.hex
phrase = BipMnemonic.to_mnemonic(entropy: entropy)
account = @lisk.accounts_open(phrase)
address = account["address"]
length = address.length
if length < target or length < 10
snapshot = Time.now.to_i.to_f
velocity = @counter.to_f / (snapshot - @start)
printf "\##{id}\t#{length}\t#{address}\t#{phrase}\t#{@counter}\t#{'%.1f' % velocity}/s\n"
target = length
end
end
end
t0 = Thread.new{ runner(0) }
t1 = Thread.new{ runner(1) }
t2 = Thread.new{ runner(2) }
t3 = Thread.new{ runner(3) }
t0.join
t1.join
t2.join
t3.join
# ...
|
lisk-builders/lisk.rb
|
lisk.gemspec
|
<reponame>lisk-builders/lisk.rb<filename>lisk.gemspec
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "lisk/version"
Gem::Specification.new do |lisk|
lisk.name = "lisk"
lisk.version = Lisk::VERSION
lisk.authors = ["<NAME>"]
lisk.email = ["<EMAIL>"]
lisk.description = 'A Ruby wrapper for the Lisk blockchain platform API.'
lisk.homepage = 'https://github.com/lisk-builders/lisk.rb'
lisk.summary = 'Lisk API Ruby wrapper.'
lisk.license = "MIT"
lisk.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
lisk.bindir = "exe"
lisk.executables = lisk.files.grep(%r{^exe/}) { |f| File.basename(f) }
lisk.require_paths = ["lib", "bin"]
lisk.add_development_dependency "bundler", "~> 1.15"
lisk.add_development_dependency "rake", "~> 10.0"
lisk.add_development_dependency "rspec", "~> 3.0"
lisk.add_dependency "todonotes", "~> 0.2.2"
end
|
lisk-builders/lisk.rb
|
examples/payout.rb
|
#!/usr/bin/env ruby
require 'lisk'
# Try to connect a local Lisk client on test network.
# Warning: Think twice and test thoroughly before enabling this on main network!
client = Lisk::Client.new "127.0.0.1", 7000
# The pre-1.0.0 legacy API connected to the client.
legacy_api = Lisk::Legacy.new client
# Only proceed if the client is connected, active, and fully synchronized.
if legacy_api.loader_status_ping
# Get the desired delegate by name.
delegate = legacy_api.delegates_get_by_name "4fryn"
if not delegate.nil?
p "Delegate #{delegate["username"]} is rank \##{delegate["rank"]} with #{delegate["approval"]}\% approval and #{delegate["productivity"]}\% productivity."
# Get a list of voters for a delegate.
deleagte_public_key = delegate["publicKey"]
deleagte_address = delegate["address"]
deleagte_voters = legacy_api.delegates_voters deleagte_public_key
# Get the total vote weight of our delegate
deleagte_total_weight = delegate["vote"].to_f
# Get the forging rewards from balance
payout_balance = legacy_api.accounts_get_balance deleagte_address
payout_balance = payout_balance["balance"].to_f
# Our pool is sharing 80%, and keeping 20%
pool_share = 0.80
# Filter out voters with out any Lisk
voter_threshold = 0
_debug_payout_sum = 0
# Iterate all voters
deleagte_voters.each do | voter |
voter_balance = voter["balance"].to_f
# Only handle voters with balance above threshold
if voter_balance > voter_threshold # and not voter["address"].eql? deleagte_address
voter_share = voter["balance"].to_f / deleagte_total_weight
payout = voter_share * payout_balance * pool_share
# @TODO: do some handling of dust amounts
# @TODO: do sanity checks and create transactions here ...
p "Sending #{payout / 1e8} LSK to voter #{voter["address"]}..."
_debug_payout_sum += payout
end
end
# @TODO: sending 20% delegate share to her private address
p "Sending #{payout_balance * 0.20 / 1e8} LSK to delegate private funds..."
# _debug_payout_diff should be 0
_debug_payout_sum += payout_balance * 0.20
_debug_payout_diff = payout_balance - _debug_payout_sum
p "#{_debug_payout_diff === 0}"
end
else
p 'Lisk node disconnected, inactive, or not fully synchronized ...'
end
|
lisk-builders/lisk.rb
|
lib/lisk.rb
|
<filename>lib/lisk.rb
require "lisk/version"
require "todonotes"
# The Lisk API Ruby wrapper gem.
module Lisk
require "lisk/client"
require "lisk/legacy"
require "lisk/raw"
require "lisk/api"
require "lisk/delegate"
# Handles unimplemented methods
def method_missing name, *args, &block
todo "#{self}::#{name} METHOD MISSING"
end
end
|
jibberia/rediscluster
|
lib/crc16.rb
|
# Copyright (C) 2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# -----------------------------------------------------------------------------
#
# This is the CRC16 algorithm used by Redis Cluster to hash keys.
# Implementation according to CCITT standards.
#
# This is actually the XMODEM CRC 16 algorithm, using the
# following parameters:
#
# Name : "XMODEM", also known as "ZMODEM", "CRC-16/ACORN"
# Width : 16 bit
# Poly : 1021 (That is actually x^16 + x^12 + x^5 + 1)
# Initialization : 0000
# Reflect Input byte : False
# Reflect Output CRC : False
# Xor constant to output CRC : 0000
# Output for "123456789" : 31C3
module RedisClusterCRC16
def RedisClusterCRC16.crc16(bytes)
crc = 0
bytes.each_byte{|b|
crc = ((crc<<8) & 0xffff) ^ XMODEMCRC16Lookup[((crc>>8)^b) & 0xff]
}
crc
end
private
XMODEMCRC16Lookup = [
0x0000,0x1021,0x2042,0x3063,0x4084,0x50a5,0x60c6,0x70e7,
0x8108,0x9129,0xa14a,0xb16b,0xc18c,0xd1ad,0xe1ce,0xf1ef,
0x1231,0x0210,0x3273,0x2252,0x52b5,0x4294,0x72f7,0x62d6,
0x9339,0x8318,0xb37b,0xa35a,0xd3bd,0xc39c,0xf3ff,0xe3de,
0x2462,0x3443,0x0420,0x1401,0x64e6,0x74c7,0x44a4,0x5485,
0xa56a,0xb54b,0x8528,0x9509,0xe5ee,0xf5cf,0xc5ac,0xd58d,
0x3653,0x2672,0x1611,0x0630,0x76d7,0x66f6,0x5695,0x46b4,
0xb75b,0xa77a,0x9719,0x8738,0xf7df,0xe7fe,0xd79d,0xc7bc,
0x48c4,0x58e5,0x6886,0x78a7,0x0840,0x1861,0x2802,0x3823,
0xc9cc,0xd9ed,0xe98e,0xf9af,0x8948,0x9969,0xa90a,0xb92b,
0x5af5,0x4ad4,0x7ab7,0x6a96,0x1a71,0x0a50,0x3a33,0x2a12,
0xdbfd,0xcbdc,0xfbbf,0xeb9e,0x9b79,0x8b58,0xbb3b,0xab1a,
0x6ca6,0x7c87,0x4ce4,0x5cc5,0x2c22,0x3c03,0x0c60,0x1c41,
0xedae,0xfd8f,0xcdec,0xddcd,0xad2a,0xbd0b,0x8d68,0x9d49,
0x7e97,0x6eb6,0x5ed5,0x4ef4,0x3e13,0x2e32,0x1e51,0x0e70,
0xff9f,0xefbe,0xdfdd,0xcffc,0xbf1b,0xaf3a,0x9f59,0x8f78,
0x9188,0x81a9,0xb1ca,0xa1eb,0xd10c,0xc12d,0xf14e,0xe16f,
0x1080,0x00a1,0x30c2,0x20e3,0x5004,0x4025,0x7046,0x6067,
0x83b9,0x9398,0xa3fb,0xb3da,0xc33d,0xd31c,0xe37f,0xf35e,
0x02b1,0x1290,0x22f3,0x32d2,0x4235,0x5214,0x6277,0x7256,
0xb5ea,0xa5cb,0x95a8,0x8589,0xf56e,0xe54f,0xd52c,0xc50d,
0x34e2,0x24c3,0x14a0,0x0481,0x7466,0x6447,0x5424,0x4405,
0xa7db,0xb7fa,0x8799,0x97b8,0xe75f,0xf77e,0xc71d,0xd73c,
0x26d3,0x36f2,0x0691,0x16b0,0x6657,0x7676,0x4615,0x5634,
0xd94c,0xc96d,0xf90e,0xe92f,0x99c8,0x89e9,0xb98a,0xa9ab,
0x5844,0x4865,0x7806,0x6827,0x18c0,0x08e1,0x3882,0x28a3,
0xcb7d,0xdb5c,0xeb3f,0xfb1e,0x8bf9,0x9bd8,0xabbb,0xbb9a,
0x4a75,0x5a54,0x6a37,0x7a16,0x0af1,0x1ad0,0x2ab3,0x3a92,
0xfd2e,0xed0f,0xdd6c,0xcd4d,0xbdaa,0xad8b,0x9de8,0x8dc9,
0x7c26,0x6c07,0x5c64,0x4c45,0x3ca2,0x2c83,0x1ce0,0x0cc1,
0xef1f,0xff3e,0xcf5d,0xdf7c,0xaf9b,0xbfba,0x8fd9,0x9ff8,
0x6e17,0x7e36,0x4e55,0x5e74,0x2e93,0x3eb2,0x0ed1,0x1ef0
]
end
|
jibberia/rediscluster
|
lib/rediscluster.rb
|
<gh_stars>0
# Copyright (C) 2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'rediscluster/version'
require 'rubygems'
require 'redis'
require_relative 'crc16.rb'
class RedisCluster
RedisClusterHashSlots = 16384
RedisClusterRequestTTL = 16
RedisClusterDefaultTimeout = 1
def initialize(startup_nodes,connections,opt={})
@startup_nodes = startup_nodes
@max_connections = connections
@connections = {}
@opt = opt
@refresh_table_asap = false
initialize_slots_cache
end
def get_redis_link(host,port)
timeout = @opt[:timeout] or RedisClusterDefaultTimeout
Redis.new(:host => host, :port => port, :timeout => timeout)
end
# Given a node (that is just a Ruby hash) give it a name just
# concatenating the host and port. We use the node name as a key
# to cache connections to that node.
def set_node_name!(n)
if !n[:name]
n[:name] = "#{n[:host]}:#{n[:port]}"
end
end
# Contact the startup nodes and try to fetch the hash slots -> instances
# map in order to initialize the @slots hash.
def initialize_slots_cache
@startup_nodes.each{|n|
begin
@slots = {}
@nodes = []
r = get_redis_link(n[:host],n[:port])
r.cluster("slots").each {|r|
(r[0]..r[1]).each{|slot|
ip,port = r[2]
name = "#{ip}:#{port}"
node = {
:host => ip, :port => port,
:name => name
}
@nodes << node
@slots[slot] = node
}
}
populate_startup_nodes
@refresh_table_asap = false
rescue
# Try with the next node on error.
next
end
# Exit the loop as long as the first node replies
break
}
end
# Use @nodes to populate @startup_nodes, so that we have more chances
# if a subset of the cluster fails.
def populate_startup_nodes
# Make sure every node has already a name, so that later the
# Array uniq! method will work reliably.
@startup_nodes.each{|n| set_node_name! n}
@nodes.each{|n| @startup_nodes << n}
@startup_nodes.uniq!
end
# Flush the cache, mostly useful for debugging when we want to force
# redirection.
def flush_slots_cache
@slots = {}
end
# Return the hash slot from the key.
def keyslot(key)
# Only hash what is inside {...} if there is such a pattern in the key.
# Note that the specification requires the content that is between
# the first { and the first } after the first {. If we found {} without
# nothing in the middle, the whole key is hashed as usually.
s = key.index "{"
if s
e = key.index "}",s+1
if e && e != s+1
key = key[s+1..e-1]
end
end
RedisClusterCRC16.crc16(key) % RedisClusterHashSlots
end
# Return the first key in the command arguments.
#
# Currently we just return argv[1], that is, the first argument
# after the command name.
#
# This is indeed the key for most commands, and when it is not true
# the cluster redirection will point us to the right node anyway.
#
# For commands we want to explicitly bad as they don't make sense
# in the context of cluster, nil is returned.
def get_key_from_command(argv)
case argv[0].to_s.downcase
when "info","multi","exec","slaveof","config","shutdown"
return nil
else
# Unknown commands, and all the commands having the key
# as first argument are handled here:
# set, get, ...
return argv[1]
end
end
# If the current number of connections is already the maximum number
# allowed, close a random connection. This should be called every time
# we cache a new connection in the @connections hash.
def close_existing_connection
while @connections.length >= @max_connections
@connections.each{|n,r|
@connections.delete(n)
begin
r.client.disconnect
rescue
end
break
}
end
end
# Return a link to a random node, or raise an error if no node can be
# contacted. This function is only called when we can't reach the node
# associated with a given hash slot, or when we don't know the right
# mapping.
#
# The function will try to get a successful reply to the PING command,
# otherwise the next node is tried.
def get_random_connection
e = ""
@startup_nodes.shuffle.each{|n|
begin
set_node_name!(n)
conn = @connections[n[:name]]
if !conn
# Connect the node if it is not connected
conn = get_redis_link(n[:host],n[:port])
if conn.ping == "PONG"
close_existing_connection
@connections[n[:name]] = conn
return conn
else
# If the connection is not good close it ASAP in order
# to avoid waiting for the GC finalizer. File
# descriptors are a rare resource.
conn.client.disconnect
end
else
# The node was already connected, test the connection.
return conn if conn.ping == "PONG"
end
rescue => e
# Just try with the next node.
end
}
raise "Can't reach a single startup node. #{e}"
end
# Given a slot return the link (Redis instance) to the mapped node.
# Make sure to create a connection with the node if we don't have
# one.
def get_connection_by_slot(slot)
node = @slots[slot]
# If we don't know what the mapping is, return a random node.
return get_random_connection if !node
set_node_name!(node)
if not @connections[node[:name]]
begin
close_existing_connection
@connections[node[:name]] =
get_redis_link(node[:host],node[:port])
rescue
# This will probably never happen with recent redis-rb
# versions because the connection is enstablished in a lazy
# way only when a command is called. However it is wise to
# handle an instance creation error of some kind.
return get_random_connection
end
end
@connections[node[:name]]
end
# Dispatch commands.
def send_cluster_command(argv)
initialize_slots_cache if @refresh_table_asap
ttl = RedisClusterRequestTTL; # Max number of redirections
e = ""
asking = false
try_random_node = false
while ttl > 0
ttl -= 1
key = get_key_from_command(argv)
raise "No way to dispatch this command to Redis Cluster." if !key
slot = keyslot(key)
if try_random_node
r = get_random_connection
try_random_node = false
else
r = get_connection_by_slot(slot)
end
begin
# TODO: use pipelining to send asking and save a rtt.
r.asking if asking
asking = false
return r.send(argv[0].to_sym,*argv[1..-1])
rescue Errno::ECONNREFUSED, Redis::TimeoutError, Redis::CannotConnectError, Errno::EACCES
try_random_node = true
sleep(0.1) if ttl < RedisClusterRequestTTL/2
rescue => e
errv = e.to_s.split
if errv[0] == "MOVED" || errv[0] == "ASK"
if errv[0] == "ASK"
asking = true
else
# Serve replied with MOVED. It's better for us to
# ask for CLUSTER NODES the next time.
@refresh_table_asap = true
end
newslot = errv[1].to_i
node_ip,node_port = errv[2].split(":")
if !asking
@slots[newslot] = {:host => node_ip,
:port => node_port.to_i}
end
else
raise e
end
end
end
raise "Too many Cluster redirections? (last error: #{e})"
end
# Currently we handle all the commands using method_missing for
# simplicity. For a Cluster client actually it will be better to have
# every single command as a method with the right arity and possibly
# additional checks (example: RPOPLPUSH with same src/dst key, SORT
# without GET or BY, and so forth).
def method_missing(*argv)
send_cluster_command(argv)
end
def conns
@connections
end
def nodes
@nodes
end
def slots
@slots
end
end
|
danhixon/rails-brochure
|
app/controllers/home_controller.rb
|
<filename>app/controllers/home_controller.rb
class HomeController < ApplicationController
def four_zero_four
render 'four_zero_four', :status=>404
end
end
|
danhixon/rails-brochure
|
lib/rails-brochure.rb
|
<reponame>danhixon/rails-brochure
require File.expand_path(File.dirname(__FILE__)) + '/rails-brochure/home_content'
require File.expand_path(File.dirname(__FILE__)) + '/rails-brochure/route_reloader'
require File.expand_path(File.dirname(__FILE__)) + '/rails-brochure/engine'
|
danhixon/rails-brochure
|
config/routes.rb
|
<reponame>danhixon/rails-brochure
Rails.application.routes.draw do
Rails::Brochure::HomeContent.templates.each do |pg|
get "/#{pg}" => "home##{pg}", :as => pg.gsub(/(\/|-)/,'_').to_sym
end
get '*a', :to => 'home#four_zero_four' unless defined?(NONBROCHURE404) && Rails.application.config.consider_all_requests_local
end
|
danhixon/rails-brochure
|
rails-brochure.gemspec
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "rails-brochure/version"
Gem::Specification.new do |s|
s.name = "rails-brochure"
s.version = Rails::Brochure::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.homepage = "http://github.com/danhixon/rails-brochure"
s.summary = %q{Rails engine for brochure pages. Similar to High Voltage but with named routes.}
s.description = %q{Rails engine for brochure pages. Similar to High Voltage but with named routes.
Brochure pages are the semi-static pages like "home", "about us", "FAQ", "pricing", "contact us", etc.
Most of the designers I have worked with really appreciate the power and convenience this plugin provides. They are able to simply create erb files in folders like they are used to with static html or php and it just works. No futzing with routes, controllers etc.}
s.add_dependency('rails', '>= 3.0.0')
s.rubyforge_project = "rails-brochure"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
|
danhixon/rails-brochure
|
lib/rails-brochure/engine.rb
|
<gh_stars>1-10
# Engine injects some middleware to reload
# routes if new files habe been added
module Rails
module Brochure
class Engine < Rails::Engine
initializer "brochure routes" do |app|
app.middleware.use "Rails::Brochure::RouteReloader"
end if Rails.env.development?
end
end
end
|
paul/uri_template
|
spec/spec_helper.rb
|
<reponame>paul/uri_template
require File.expand_path(File.dirname(__FILE__) + "/../lib/uri_template")
|
paul/uri_template
|
spec/examples_from_draft_spec.rb
|
<filename>spec/examples_from_draft_spec.rb
require 'spec_helper'
# These examples are take directly from the draft spec
# http://tools.ietf.org/html/draft-gregorio-uritemplate-07
describe "Examples given in the Draft" do
let(:params) do
{
"dom" => "example.com",
"dub" => "me/too",
"hello" => "Hello World!",
"half" => "50%",
"var" => "value",
"who" => "fred",
"base" => "http://example.com/home/",
"path" => "/foo/bar",
"list" => [ "red", "green", "blue" ],
"keys" => {"semi"=>";", "dot"=>".", "comma" =>","},
"v" => "6",
"x" => "1024",
"y" => "768",
"empty" => "",
"empty_keys" => [],
"undef" => nil
}
end
examples = {}
section = ""
File.read(File.dirname(__FILE__) + "/examples_from_draft.txt").each_line do |line|
next if line.strip.empty?
if line =~ /^##/
section = line.gsub('## ', '').to_s
examples[section] = []
else
template, expected = line.strip.split(/\s+/,2)
examples[section] << [template, expected]
end
end
examples.each do |section, data|
describe section do
data.each do |template, expected|
it "should render #{template.inspect} as #{expected.inspect}" do
UriTemplate.new(template).expand(params).should == expected
end
end
end
end
end
|
paul/uri_template
|
lib/uri_template.rb
|
<filename>lib/uri_template.rb
require 'addressable/uri'
__dir__ = File.dirname(__FILE__)
require __dir__ + "/uri_template/version"
require __dir__ + "/uri_template/parser"
require __dir__ + "/uri_template/transformer"
class UriTemplate
class ParseError < StandardError; end
def initialize(uri_template)
@uri_template = uri_template
tree = Parser.new.parse(@uri_template)
@template = Transformer.new.apply(tree)
rescue Parslet::ParseFailed => ex
raise ParseError, "invalid template: #{uri_template}"
end
def expand(params)
output = ""
@template.each do |part|
case part
when String then output << part
when Hash then output << Expression.new(part).expand(params)
end
end
output
end
class Expression
def initialize(tree)
@tree = tree
end
def expand(params = {})
out = var_list.map do |var|
Var.new(self, var[:var]).value(params)
end.compact
return "" if out.empty?
first_char + out.join(separator)
end
def var_list
@tree[:var_list]
end
def operator
@tree[:operator]
end
def separator
case operator
when nil, "+", "#" then ","
when "." then "."
when "/" then "/"
when ";" then ";"
when "?", "&" then "&"
end
end
def first_char
case operator
when nil, "+" then ""
else operator
end
end
def named?
[";", "?", "&"].include? operator
end
def str_if_empty
case operator
when "?", "&" then "="
else ""
end
end
def encode_set
set = Addressable::URI::CharacterClasses::UNRESERVED
set += Addressable::URI::CharacterClasses::RESERVED if ["+", "#"].include? operator
set
end
end
class Var
attr_reader :exp
def initialize(exp, tree)
@exp, @tree = exp, tree
end
# Algorithm taken directly from the spec.
# TODO: refactor to be Ruby-like
def value(params)
value = params[name] || params[name.to_sym]
out = ""
case value
when nil
return nil
when String
if exp.named?
out << "#{encode(name)}#{value.empty? ? exp.str_if_empty : "="}"
end
out << encode(trim(value))
when Array
if !explode?
if exp.named?
out << "#{encode(name)}#{value.empty? ? exp.str_if_empty : "="}"
end
return if value.empty?
out << value.map{|v| encode(v)}.join(",")
else
return if value.empty?
out << value.map{|v| encode(v)}.join(exp.separator)
end
when Hash
if !explode?
if exp.named?
out << "#{encode(name)}#{value.empty? ? exp.str_if_empty : "="}"
end
return if value.empty?
out << value.map{|k,v| [encode(k), encode(v)]}.flatten.join(",")
else
return if value.empty?
out << value.map{|k,v| "#{encode(k)}=#{encode(v)}"}.join(exp.separator)
end
end
out
end
protected
def name
@tree[:name]
end
def explode?
@tree.has_key?(:explode)
end
def max_length
@tree[:max_length]
end
def trim(str)
max_length ? str[0..(max_length-1)] : str
end
def encode(str)
Addressable::URI.encode_component(str, exp.encode_set)
end
end
end
|
paul/uri_template
|
spec/errors_spec.rb
|
<filename>spec/errors_spec.rb<gh_stars>1-10
require 'spec_helper'
describe UriTemplate do
describe "with good uris" do
good = [
"{var}",
"http://example.com/",
"http://example.com/{foo}",
"http://example.com/search{?q}",
"http://example.com/search{?q,list}"
]
good.each do |tmpl|
it "should parse #{tmpl.inspect}" do
lambda { UriTemplate.new(tmpl) }.should_not raise_error(UriTemplate::ParseError)
end
end
end
describe "with invaldi uris" do
bad = [
"http://example.com/{",
"http://example.com/{^foo}"
]
bad.each do |tmpl|
it "should not parse #{tmpl.inspect}" do
lambda { UriTemplate.new(tmpl) }.should raise_error(UriTemplate::ParseError)
end
end
end
end
|
paul/uri_template
|
lib/uri_template/parser.rb
|
<gh_stars>1-10
require 'parslet'
class UriTemplate
class Parser < Parslet::Parser
rule(:uri_template) do
(literals | expression).repeat
end
rule(:expression) do
str('{') >> operator.maybe.as(:operator) >> var_list.as(:var_list) >> str('}')
end
rule(:operator) do
str("+") | str("#") | str(".") | str("/") | str(";") | str("?") | str("&")
end
rule(:var_list) do
(varspec >> ( str(",") >> varspec ).repeat).maybe.as(:array)
end
rule(:varspec) do
(varname >> modifier.maybe).as(:var)
end
rule(:varname) do
((varchar >> (varchar | str(".")).repeat).as(:string)).as(:name)
end
rule(:varchar) do
(alphanumeric | str("_") | pct_encoded)
end
rule(:modifier) do
prefix | explode
end
rule(:prefix) do
str(':') >> number.as(:max_length)
end
rule(:explode) do
str('*').as(:explode)
end
rule(:literals) do
(unreserved | reserved | pct_encoded).repeat(1).as(:literals)
end
rule(:unreserved) do
alphanumeric | match("[-._~]")
end
rule(:reserved) do
match("[:/?#\\[\\]@!$&'()*+,;=]")
end
rule(:pct_encoded) do
str('%') >> hex >> hex
end
rule(:alphanumeric) do
match('[A-Za-z0-9]')
end
rule(:number) do
match('[0-9]').repeat(1).as(:number)
end
rule(:hex) do
match('[a-fA-F0-9]')
end
root(:uri_template)
end
end
|
paul/uri_template
|
spec/params_spec.rb
|
<gh_stars>1-10
require 'spec_helper'
describe UriTemplate do
describe "params" do
let(:template) { UriTemplate.new("{foo}") }
it "should handle string keys" do
template.expand("foo" => "bar").should == "bar"
end
it "should handle symbol keys" do
template.expand(:foo => "bar").should == "bar"
end
end
end
|
paul/uri_template
|
lib/uri_template/transformer.rb
|
require 'parslet'
class UriTemplate
class Transformer < Parslet::Transform
rule(:literals => simple(:l)) { l.to_s }
rule(:string => simple(:s)) { s.to_s }
rule(:array => subtree(:ar)) { ar.is_a?(Array) ? ar : [ar] }
rule(:explode) { true }
#rule(:name => simple(:name)) { name.to_s }
rule(:number => simple(:x)) { Integer(x) }
end
end
|
Anthonyntilelli/Number_guessing
|
lib/challenge_number.rb
|
<reponame>Anthonyntilelli/Number_guessing<filename>lib/challenge_number.rb
################################################################################
#: Title : ChallengeNumber
#: Author : <NAME>
#: Description : Generates random number for user ot guess
#: Main Methods :
#: #guess - returns if guess is higher,lower or correct and tracks tries
#: #tell - end game with loss and returns hidden_number
#: #new_game! - reset hidden_number, tries and win
#: #tries - count of attempt made to guess hidden_number
#: #win - is game over (win/loss) or ongong
#: #convert_integer - converts input to base 10 integer for use in game
#: returns 'nil' if invalid input
#: Win/Loss :
#: gme ongoing when win == nil
#: win when win == true
#: Loss when win == false
################################################################################
class ChallengeNumber
attr_reader :min_num, :max_num, :tries, :win
def initialize(min_num, max_num, override = nil)
# use override to force hidden_number
self.min_num = min_num
self.max_num = max_num
raise RangeError.new("#{@min_num} < #{@max_num}") unless @min_num < @max_num
new_game!(override)
end
# method used for user to attempt to find hidden_number ad track tries
# retun 1 when integer is Lower then hidden_number
# return -1 when integer is Higher then hidden_number
# return 0 when integer == hidden_number
# return false when integer is out of range(doesn't increment try) or invalid
# finding hidden_number ends game
# will not increment tries after game end
# will still return -1,0,1 after game end
def guess(integer)
# converst from sting if not integer
# Does not enforce base 10 when integer
integer = convert_integer(integer) unless integer.is_a? Integer
# also checks for nil
return false unless integer&.between?(@min_num, @max_num)
@tries += 1 if @win.nil?
compare = @hidden_number <=> integer
@win = true if compare.zero?
compare
end
# end game with loss and returns hidden_number
def tell
@win = false
@hidden_number
end
# reset hidden_number, tries, and win
def new_game!(override = nil)
if override # use override to force hidden_number
ensure_i(override)
@hidden_number = override
else
@hidden_number = rand(@min_num..@max_num)
end
@tries = 0
@win = nil
end
private
# converts to base 10 integer or returns nil
def convert_integer(var)
Integer(var, 10) # allow base 10 only
rescue ArgumentError
nil
end
def ensure_i(integer)
raise TypeError => 'Not an Integer' unless integer.is_a? Integer
end
def min_num=(integer)
ensure_i(integer)
@min_num = integer
end
def max_num=(integer)
ensure_i(integer)
@max_num = integer
end
end
|
Anthonyntilelli/Number_guessing
|
number_guessing.rb
|
<reponame>Anthonyntilelli/Number_guessing<gh_stars>1-10
#!/usr/bin/env ruby
################################################################################
#: Title : Number_guessing
#: Author : <NAME>
#: Description : Commandline game where user guesss a hidden number within a
#: : range. Game provides hints telling user of their guess is
#: : higher or lower to hidden number
################################################################################
require 'optparse'
require_relative 'lib/challenge_number.rb'
# Defaults
lowest_number = nil
highest_number = nil
debug = nil
begin
OptionParser.new do |parser|
parser.banner = 'Usage: number_guessing [options]'
parser.on('-m', '--min INTEGER', Integer, 'Set Minimum number') do |min|
lowest_number = min
end
parser.on('-M', '--Max INTEGER', Integer, 'Set Maximum number') do |max|
highest_number = max
lowest_number = max - 100 if lowest_number.nil?
end
parser.on('-D', '--Debug INTEGER', Integer, 'Set hidden number') do |secret|
debug = secret
end
parser.on('-h', '--help', 'Prints this help') do
puts parser
exit
end
end.parse!
# catch all for non-switch inputs
raise ArgumentError, "Invalid command Line options #{ARGV}" unless ARGV.empty?
rescue StandardError => e
$stdout.puts e.message
exit 3
end
lowest_number = 0 if lowest_number.nil?
highest_number = lowest_number + 100 if highest_number.nil?
game = ChallengeNumber.new(lowest_number, highest_number, debug)
puts 'Do you want to play a game?'
while game.win.nil?
puts "Select a integer between #{game.min_num} and #{game.max_num}"
input = $stdin.gets.chomp
case input
when 'Q!'
puts "Game over, Answer was: #{game.tell}" # quit game
exit
when 'N!'
highest_number = lowest_number + 100
puts 'New Game'
game.new_game!(debug)
next
else
input = game.guess(input)
unless input
puts 'Try again: Invalid Input (base 10 integer only) or out of range'
next
end
end
hint = case input
when -1 then "Try a lower number, attempt# #{game.tries}"
when 0 then "You win, attempt# #{game.tries}"
when 1 then "Try a higher number, attempt# #{game.tries}"
else
# condition should never be invoked
$stdout.puts "Invalid input after guess #{input}"
exit 4
end
puts hint
end
exit
|
Anthonyntilelli/Number_guessing
|
spec/challenge_number_spec.rb
|
require_relative '../lib/challenge_number.rb'
RSpec.describe 'ChallengeNumber ' do
describe '#initialize' do
context 'when initialize' do
subject(:valid) { ChallengeNumber.new(0, 100) }
it 'create new game with valid range' do
expect(valid.min_num).to eql(0)
expect(valid.max_num).to eql(100)
expect(valid.tries).to eql(0)
expect(valid.win).to eql(nil)
end
it 'raise error on invalid input' do
expect { _bad_arg = ChallengeNumber.new(0, 'invalid') }.to raise_error(TypeError)
expect { _bad_arg = ChallengeNumber.new(0, 100, 'invalid') }.to raise_error(TypeError)
expect { _bad_range = ChallengeNumber.new(0, -5) }.to raise_error(RangeError)
end
end
end
describe '#guess' do
context 'when provided a higher number' do
subject(:higher_tester) { ChallengeNumber.new(0, 50, 25) }
it 'returns -1, tries == -1 and win == nil' do
expect(higher_tester.guess('49')).to eql(-1)
expect(higher_tester.win).to eql(nil)
expect(higher_tester.tries).to eql(1)
end
end
context 'when provided a lower number' do
subject(:lower_tester) { ChallengeNumber.new(0, 50, 25) }
it 'returns 1, tries == 1 and win == nil' do
expect(lower_tester.guess(1)).to eql(1)
expect(lower_tester.win).to eql(nil)
expect(lower_tester.tries).to eql(1)
end
end
context 'when provided correct number' do
subject(:tester) { ChallengeNumber.new(0, 100, 50) }
it 'return returns 0 and win == true' do
expect(tester.guess('50')).to eql(0)
expect(tester.win).to eql(true)
end
end
context 'when provided invalid number' do
subject(:invalid_tester) { ChallengeNumber.new(0, 100, 50) }
it 'return returns false tries ==0 and win == nil' do
expect(invalid_tester.guess('999')).to eql(false)
expect(invalid_tester.guess('BATMAN')).to eql(false)
expect(invalid_tester.win).to eql(nil)
expect(invalid_tester.tries).to eql(0)
end
end
end
describe '#tell' do
context 'when provided a higher number' do
subject(:quitter) { ChallengeNumber.new(0, 50, 35) }
it 'returns 35 and win == false' do
expect(quitter.win).to eql(nil)
expect(quitter.tell).to eql(35)
expect(quitter.win).to eql(false)
end
end
end
describe '#new_game!' do
context 'when provided a higher number' do
subject(:reset) { ChallengeNumber.new(0, 50, 33) }
it 'return tries == 0 and win == nil' do
reset.guess(33)
expect(reset.win).to eql(true) # start with win
reset.new_game!
expect(reset.win).to eql(nil)
expect(reset.tries).to eql(0)
end
end
end
end
|
danmayer/server_builder
|
lib/server_builder.rb
|
<filename>lib/server_builder.rb
require 'logger'
require 'server_builder/version'
require 'server_builder/multi_io'
require 'server_builder/fog_builder'
require 'server_builder/ecs_builder'
require 'server_builder/verifier'
module ServerBuilder
class Builder
attr_accessor :logger
def self.run(opts)
puts "server builder run with #{opts.inspect}"
if opts.is_a?(Array)
opts_to_convert = opts
opts = {'cmd' => opts_to_convert[0]}
opts_to_convert.shift
opts_to_convert.each do |el|
puts "el is #{el}"
key, val = el.split('=')
opts.merge!({key => val})
end
end
puts "server builder converted opts #{opts.inspect}"
case opts['cmd']
when 'cluster'
builder = ServerBuilder::EcsBuilder.new(opts)
builder.build(opts)
when 'verify'
builder = Builder.new(opts)
builder.verify_server(opts)
when 'ssh'
builder = Builder.new(opts)
builder.ssh(opts)
when 'redis'
builder = Builder.new(opts)
builder.redis_server(opts)
when 'elastic_search'
builder = Builder.new(opts)
builder.elastic_search_server(opts)
when 'open_street_map'
builder = Builder.new(opts)
builder.open_street_map_server(opts)
when 'docker_registry'
builder = Builder.new(opts)
builder.docker_registry_server(opts)
when 'jenkins'
builder = Builder.new(opts)
builder.jenkins_server(opts)
when 'yeoman_example'
builder = Builder.new(opts)
builder.yoeman_example_server(opts)
when 'app'
builder = Builder.new(opts)
builder.app_on_server(opts)
when 'basics'
builder = Builder.new(opts)
builder.basics_on_server(opts)
when 'build'
builder = Builder.new(opts)
builder.build_server(opts)
when 'stop'
builder = Builder.new(opts)
builder.stop_server(opts)
when 'destroy'
builder = Builder.new(opts)
builder.destroy_server(opts)
else
puts "invalid builder command, run like: server_builder build"
end
end
def initialize(opts = {})
@host = opts.fetch('host'){ "utils.picoappz.com" }
@logger = opts.fetch('logger'){
log_file = File.open("logs/server_builder.log", "a")
Logger.new MultiIO.new(STDOUT, log_file)
}
end
def elastic_search_server(opts = {})
ssh(:execute => 'docker run -d -p 9200:9200 -p 9300:9300 dockerfile/elasticsearch')
end
def redis_server(opts = {})
ssh(:execute => 'docker run -d -p 6379:6379 dockerfile/redis')
end
def open_street_map_server(opts = {})
ssh(:execute => 'docker run -d -p 80:8888 -p 5432:5588 homme/openstreetmap-tiles')
end
def basics_on_server(opts = {})
logger.info ssh(:execute => "sudo apt-get -y update")
logger.info ssh(:execute => "sudo apt-get install -y git emacs wget curl ")
end
def install_docker_to_registry(opts = {})
logger.info ssh(:execute => "mkdir -p /home/ubuntu/apps")
end
# depends on docker registry
# depends on basics_on_server
def app_on_server(opts = {})
repo_url = opts['repo_url']
port = opts['port']
raise "port is a required option to start an app" if port.nil?
raise "`server_builder app repo_url=some_git_repo.git` is required to install an app." if repo_url.nil?
raise "use public accessiable git like https not git@ which asks for auth" if repo_url.match(/git@/)
repo_name = repo_url.split('/').last.gsub('.git','')
app_dir = "/home/ubuntu/apps/#{repo_name}"
logger.info ssh(:execute => "mkdir -p /home/ubuntu/apps")
logger.info ssh(:execute => "git clone #{repo_url} #{app_dir}")
logger.info ssh(:execute => "cd #{app_dir} && docker build -t #{repo_name} .")
logger.info ssh(:execute => "docker run -d -p #{port}:#{port} #{repo_name}")
end
def yoeman_example_server(opts = {})
# server to run
# http://yeoman.io/codelab.html
# Something is wrong if I use this yoeman and install generators they don't appear
# need to try again and perhaps try this http://stackoverflow.com/questions/18081125/why-are-my-yeoman-generators-installing-in-the-wrong-place
cmds = [
"sudo apt-get update -y",
"sudo apt-get install -y python-software-properties python g++ make",
"sudo add-apt-repository -y ppa:chris-lea/node.js",
"sudo apt-get update -y ",
"sudo apt-get install -y nodejs",
"sudo npm install --global yo",
"sudo npm install -y --global yo"
]
cmds.each do |cmd|
logger.info ssh(:execute => cmd)
end
end
def jenkins_server(opts = {})
# could keep this around, but docker jenkins doesn't support dockerized builds
# ssh(:execute => 'docker run -p 8080:8080 -d bacongobbler/jenkins')
cmds = [
"wget -q -O - http://pkg.jenkins-ci.org/debian/jenkins-ci.org.key | sudo apt-key add -",
"sudo sh -c 'echo deb http://pkg.jenkins-ci.org/debian binary/ > /etc/apt/sources.list.d/jenkins.list'",
"sudo apt-get update -y",
"sudo DEBIAN_FRONTEND=noninteractive apt-get -y install jenkins",
"sudo mkdir /home/jenkins/",
"sudo chmod +xr /home/jenkins/",
"sudo usermod -a -G docker jenkins",
"sudo wget -q -O - http://mirrors.jenkins-ci.org/war-stable/latest/jenkins.war > /tmp/jenkins.war",
"sudo cp /tmp/jenkins.war /home/jenkins/",
"sudo curl https://ci.jenkins-ci.org/jnlpJars/jenkins-cli.jar > /tmp/jenkins-cli.jar",
"sudo cp /tmp/jenkins-cli.jar /home/jenkins/",
"sudo chown jenkins:docker /home/jenkins/jenkins.war",
"sudo chown jenkins:docker /home/jenkins/jenkins-cli.jar",
"curl -L http://mirror.xmission.com/jenkins/updates/update-center.json | sed '1d;$d' | curl -X POST -H 'Accept: application/json' -d @- http://127.0.0.1:8080/updateCenter/byId/default/postBack",
"sudo -Hu jenkins java -jar /home/jenkins/jenkins-cli.jar -s http://127.0.0.1:8080/ safe-restart",
"sleep 35",
"sudo -Hu jenkins java -jar /home/jenkins/jenkins-cli.jar -s http://127.0.0.1:8080/ install-plugin Git; true",
"sudo -Hu jenkins java -jar /home/jenkins/jenkins-cli.jar -s http://127.0.0.1:8080/ safe-restart; true"
]
cmds.each do |cmd|
logger.info ssh(:execute => cmd)
end
end
def docker_registry_server(opts = {})
ssh(:execute => 'docker run -d -p 5000:5000 samalba/docker-registry')
end
def ssh(opts = {})
# opposed to using vagrant ssh probably faster to do directly
# ssh -i ~/.ssh/dans.pem ubuntu@ec2-54-225-37-243.compute-1.amazonaws.com
# use vagrant ssh to recent vagrant built server
if opts[:execute]
logger.info "connecting to server to run: #{opts[:execute]}"
`cd config/docker_vagrant && vagrant ssh --command '#{opts[:execute]}'`
else
logger.info "connecting to server..."
Kernel.exec("cd config/docker_vagrant && vagrant ssh")
end
end
def stop_server(opts = {})
logger.info "building a base docker server..."
# use vagrant to install docker on EC2 with offical docker vagrant script
logger.info `cd config/docker_vagrant && vagrant halt`
end
def destroy_server(opts = {})
logger.info "destroying server..."
# use vagrant to install docker on EC2 with offical docker vagrant script
logger.info `cd config/docker_vagrant && vagrant destroy -f`
end
def build_server(opts = {})
logger.info "building a base docker server..."
# use vagrant to install docker on EC2 with offical docker vagrant script
logger.info `cd config/docker_vagrant && vagrant up --provider=aws`
# use fog and bootstrap
# fog_builder = FogBuilder.new(opts, logger)
# fog_builder.build
end
def verify_server(opts = {})
verifier = Verifier.new(opts, logger)
verifier.verify
end
end
end
|
danmayer/server_builder
|
lib/server_builder/fog_builder.rb
|
module ServerBuilder
class FogBuilder
attr_accessor :logger
def initialize(opts, logger)
@logger = logger
end
def build
logger.info "building with fog"
end
end
end
|
danmayer/server_builder
|
lib/server_builder/ecs_builder.rb
|
module ServerBuilder
class EcsBuilder
attr_accessor :logger, :cluster_name, :service_name, :desired_count,
:task_definition
def initialize(opts)
@logger = opts.fetch('logger'){
log_file = File.open("logs/server_builder.log", "a")
Logger.new MultiIO.new(STDOUT, log_file)
}
@cluster_name = opts.fetch('name') { 'auto_cluster' }
@service_name = opts.fetch('service_name') { 'auto_graphite-statsd' }
@desired_count = opts.fetch('desired_count') { 1 }
@task_definition = opts.fetch('task_definition') { 'graphite-statsd:3' }
end
def build(opts)
logger.info "building with EcsBuilder"
#build_cluster
#create_ecs_instance
#add_service
end
private
def build_cluster
`aws ecs create-cluster --cluster-name #{cluster_name}`
end
def add_service
`aws ecs create-service --cluster #{cluster_name} --service-name #{service_name} --desired-count #{desired_count} --task-definition #{task_definition}`
end
end
end
|
danmayer/server_builder
|
lib/server_builder/verifier.rb
|
<filename>lib/server_builder/verifier.rb
module ServerBuilder
class Verifier
attr_accessor :logger, :opts, :host
def initialize(opts, logger)
@opts = opts
@host = opts.fetch('host'){ "utils.picoappz.com" }
@logger = logger
end
def verify
logger.info "verifying server with: #{opts.inspect}"
verify_graphite(opts['graphite']) if opts['graphite']
verify_statsd(opts['statsd']) if opts['statsd']
verify_logstash(opts['logstash']) if opts['logstash']
verify_elasticsearch(opts['elasticsearch']) if opts['elasticsearch']
verify_redis(opts['redis']) if opts['redis']
end
protected
def verify_jenkins(port)
port = port.to_i
port = 8080 if port==0
logger.info "verifying jenkins"
output = `curl #{host}:#{port}`
output.match(/jenkins/)
end
def verify_graphite(port)
port = port.to_i
port = 2003 if port==0
logger.info "verifying graphite"
require 'simple-graphite'
g = Graphite.new({:host => host, :port => port})
10.times {
g.push_to_graphite do |graphite|
graphite.puts "server_builder.test.graphite 3.1415926 #{g.time_now}"
end
sleep(0.01)
}
end
# https://gist.github.com/amoslanka/6245043
def verify_statsd(port)
port = port.to_i
port = 8125 if port==0
logger.info "verifying statsd"
require 'statsd-ruby'
statsd = Statsd.new(host, port).tap{|sd| sd.namespace = 'server_builder'}
require 'pry-byebug'; debugger
20.times {
statsd.increment 'test.statsd'
sleep(0.01)
}
end
def verify_logstash(port)
port = port.to_i
port = 49175 if port==0
logger.info "verifying logstash"
require 'logstash-logger'
# Defaults to UDP
logger = LogStashLogger.new(host, port, :tcp)
logger.info 'server_builder test logstash logging'
end
def verify_redis(port)
port = port.to_i
port = 6379 if port==0
logger.info "verifying redis on host #{host} port #{port}"
require "redis"
redis = Redis.new(:uri => "#{host}:#{port}")
test_val = "setting redis"
redis.set("server_builder_test", test_val)
if test_val == redis.get("server_builder_test")
return
else
logger.error "redis validation failed didn't receive same data set"
exit 1
end
end
# ./bin/server_builder verify host=elastic-search.myhost.com elasticsearch=80
def verify_elasticsearch(port)
port = port.to_i
port = 9200 if port==0
logger.info "verifying elasticsearch"
require 'elasticsearch'
# Connect to cluster at search1:9200, sniff nodes and round-robin between them
es = Elasticsearch::Client.new hosts: ["#{host}search1:#{port}"], reload_connections: true
# Index a document:
es.index index: 'server_builder',
type: 'test_post',
id: 1,
body: {
title: "Elasticsearch clients",
content: "Interesting content...",
date: "2013-09-24"
}
# Get the document:
doc = es.get index: 'server_builder', type: 'test_post', id: 1
puts doc
# Search:
doc = es.search index: 'server_builder',
body: { query: { match: { title: 'elasticsearch' } } }
puts doc
end
end
end
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record/common_adapter_elements.rb
|
# frozen_string_literal: true
module RGeo
module ActiveRecord
# Return a feature type module given a string type.
def self.geometric_type_from_name(name)
case name.to_s
when /^geometrycollection/i then Feature::GeometryCollection
when /^geometry/i then Feature::Geometry
when /^linestring/i then Feature::LineString
when /^multilinestring/i then Feature::MultiLineString
when /^multipoint/i then Feature::MultiPoint
when /^multipolygon/i then Feature::MultiPolygon
when /^point/i then Feature::Point
when /^polygon/i then Feature::Polygon
end
end
end
end
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record/geometry_mixin.rb
|
<gh_stars>10-100
# frozen_string_literal: true
module RGeo
module ActiveRecord
# This module is mixed into all geometry objects. It provides an
# as_json method so that ActiveRecord knows how to generate JSON
# for a geometry-valued field.
module GeometryMixin
# The default JSON generator Proc. Renders geometry fields as WKT.
DEFAULT_JSON_GENERATOR = Proc.new(&:to_s)
@json_generator = DEFAULT_JSON_GENERATOR
# Set the style of JSON generation used for geometry fields in an
# ActiveRecord model by default. You may pass nil to use
# DEFAULT_JSON_GENERATOR, a proc that takes a geometry as the
# argument and returns an object that can be converted to JSON
# (i.e. usually a hash or string), or one of the following symbolic
# values:
#
# <tt>:wkt</tt>::
# Well-known text format. (Same as DEFAULT_JSON_GENERATOR.)
# <tt>:geojson</tt>::
# GeoJSON format. Requires the rgeo-geojson gem.
def self.set_json_generator(value = nil, &block)
if block && !value
value = block
elsif value == :geojson
require "rgeo/geo_json"
value = proc { |geom| GeoJSON.encode(geom) }
end
@json_generator = value.is_a?(Proc) ? value : DEFAULT_JSON_GENERATOR
end
# Given a feature, returns an object that can be serialized as JSON
# (i.e. usually a hash or string), using the current json_generator.
# This is used to generate JSON for geometry-valued ActiveRecord
# fields by default.
def self.generate_json(geom)
@json_generator.call(geom)
end
# Serializes this object as JSON for ActiveRecord.
def as_json(opts = nil)
GeometryMixin.generate_json(self)
end
end
# include this module in every RGeo feature type
[
Geographic::ProjectedGeometryCollectionImpl,
Geographic::ProjectedLinearRingImpl,
Geographic::ProjectedLineImpl,
Geographic::ProjectedLineStringImpl,
Geographic::ProjectedMultiLineStringImpl,
Geographic::ProjectedMultiPointImpl,
Geographic::ProjectedMultiPolygonImpl,
Geographic::ProjectedPointImpl,
Geographic::ProjectedPolygonImpl,
Geographic::SphericalGeometryCollectionImpl,
Geographic::SphericalLinearRingImpl,
Geographic::SphericalLineImpl,
Geographic::SphericalLineStringImpl,
Geographic::SphericalMultiLineStringImpl,
Geographic::SphericalMultiPointImpl,
Geographic::SphericalMultiPolygonImpl,
Geographic::SphericalPointImpl,
Geographic::SphericalPolygonImpl,
Geos::ZMGeometryCollectionImpl,
Geos::ZMGeometryImpl,
Geos::ZMLinearRingImpl,
Geos::ZMLineImpl,
Geos::ZMLineStringImpl,
Geos::ZMMultiLineStringImpl,
Geos::ZMMultiPointImpl,
Geos::ZMMultiPolygonImpl,
Geos::ZMPointImpl,
Geos::ZMPolygonImpl,
Cartesian::GeometryCollectionImpl,
Cartesian::LinearRingImpl,
Cartesian::LineImpl,
Cartesian::LineStringImpl,
Cartesian::MultiLineStringImpl,
Cartesian::MultiPointImpl,
Cartesian::MultiPolygonImpl,
Cartesian::PointImpl,
Cartesian::PolygonImpl
].each { |klass| klass.include(GeometryMixin) }
if RGeo::Geos.capi_supported?
[
Geos::CAPIGeometryCollectionImpl,
Geos::CAPIGeometryImpl,
Geos::CAPILinearRingImpl,
Geos::CAPILineImpl,
Geos::CAPILineStringImpl,
Geos::CAPIMultiLineStringImpl,
Geos::CAPIMultiPointImpl,
Geos::CAPIMultiPolygonImpl,
Geos::CAPIPointImpl,
Geos::CAPIPolygonImpl,
].each { |klass| klass.include(GeometryMixin) }
end
if RGeo::Geos.ffi_supported?
[
Geos::FFIGeometryCollectionImpl,
Geos::FFIGeometryImpl,
Geos::FFILinearRingImpl,
Geos::FFILineImpl,
Geos::FFILineStringImpl,
Geos::FFIMultiLineStringImpl,
Geos::FFIMultiPointImpl,
Geos::FFIMultiPolygonImpl,
Geos::FFIPointImpl,
Geos::FFIPolygonImpl,
].each { |klass| klass.include(GeometryMixin) }
end
end
end
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record.rb
|
# frozen_string_literal: true
require "rgeo"
require "active_record"
require "rgeo/active_record/version"
require "rgeo/active_record/spatial_expressions"
require "rgeo/active_record/spatial_factory_store"
require "rgeo/active_record/arel_spatial_queries"
require "rgeo/active_record/common_adapter_elements"
require "rgeo/active_record/geometry_mixin"
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record/spatial_expressions.rb
|
<filename>lib/rgeo/active_record/spatial_expressions.rb
# frozen_string_literal: true
module RGeo
module ActiveRecord
# Returns true if spatial expressions (i.e. the methods in the
# SpatialExpressions module) are supported.
def self.spatial_expressions_supported?
defined?(Arel::Nodes::NamedFunction)
end
# A set of spatial expression builders.
# These methods can be chained off other spatial expressions to form
# complex expressions.
module SpatialExpressions
#--
# Generic functions
#++
def st_function(function, *args)
spatial_info = args.last.is_a?(::Array) ? args.pop : []
SpatialNamedFunction.new(function, [self] + args, spatial_info)
end
#--
# Geometry functions
#++
def st_dimension
SpatialNamedFunction.new("ST_Dimension", [self], [false, true])
end
def st_geometrytype
SpatialNamedFunction.new("ST_GeometryType", [self], [false, true])
end
def st_astext
SpatialNamedFunction.new("ST_AsText", [self], [false, true])
end
def st_asbinary
SpatialNamedFunction.new("ST_AsBinary", [self], [false, true])
end
def st_srid
SpatialNamedFunction.new("ST_SRID", [self], [false, true])
end
def st_isempty
SpatialNamedFunction.new("ST_IsEmpty", [self], [false, true])
end
def st_issimple
SpatialNamedFunction.new("ST_IsSimple", [self], [false, true])
end
def st_boundary
SpatialNamedFunction.new("ST_Boundary", [self], [true, true])
end
def st_envelope
SpatialNamedFunction.new("ST_Envelope", [self], [true, true])
end
def st_equals(rhs)
SpatialNamedFunction.new("ST_Equals", [self, rhs], [false, true, true])
end
def st_disjoint(rhs)
SpatialNamedFunction.new("ST_Disjoint", [self, rhs], [false, true, true])
end
def st_intersects(rhs)
SpatialNamedFunction.new("ST_Intersects", [self, rhs], [false, true, true])
end
def st_touches(rhs)
SpatialNamedFunction.new("ST_Touches", [self, rhs], [false, true, true])
end
def st_crosses(rhs)
SpatialNamedFunction.new("ST_Crosses", [self, rhs], [false, true, true])
end
def st_within(rhs)
SpatialNamedFunction.new("ST_Within", [self, rhs], [false, true, true])
end
def st_contains(rhs)
SpatialNamedFunction.new("ST_Contains", [self, rhs], [false, true, true])
end
def st_overlaps(rhs)
SpatialNamedFunction.new("ST_Overlaps", [self, rhs], [false, true, true])
end
def st_relate(rhs, matrix = nil)
args = [self, rhs]
args << matrix.to_s if matrix
SpatialNamedFunction.new("ST_Relate", args, [false, true, true, false])
end
def st_distance(rhs, units = nil)
args = [self, rhs]
args << units.to_s if units
SpatialNamedFunction.new("ST_Distance", args, [false, true, true, false])
end
def st_intersection(rhs)
SpatialNamedFunction.new("ST_Intersection", [self, rhs], [true, true, true])
end
def st_difference(rhs)
SpatialNamedFunction.new("ST_Difference", [self, rhs], [true, true, true])
end
def st_union(rhs)
SpatialNamedFunction.new("ST_Union", [self, rhs], [true, true, true])
end
def st_symdifference(rhs)
SpatialNamedFunction.new("ST_SymDifference", [self, rhs], [true, true, true])
end
def st_buffer(distance, units = nil)
args = [self, distance.to_f]
args << units.to_s if units
SpatialNamedFunction.new("ST_Buffer", args, [true, true, false])
end
def st_convexhull
SpatialNamedFunction.new("ST_ConvexHull", [self], [true, true])
end
#--
# Point functions
#++
def st_x
SpatialNamedFunction.new("ST_X", [self], [false, true])
end
def st_y
SpatialNamedFunction.new("ST_Y", [self], [false, true])
end
def st_z
SpatialNamedFunction.new("ST_Z", [self], [false, true])
end
def st_m
SpatialNamedFunction.new("ST_M", [self], [false, true])
end
#--
# Curve functions
#++
def st_startpoint
SpatialNamedFunction.new("ST_StartPoint", [self], [true, true])
end
def st_endpoint
SpatialNamedFunction.new("ST_EndPoint", [self], [true, true])
end
def st_isclosed
SpatialNamedFunction.new("ST_IsClosed", [self], [false, true])
end
def st_isring
SpatialNamedFunction.new("ST_IsRing", [self], [false, true])
end
def st_length(units = nil)
args = [self]
args << units.to_s if units
SpatialNamedFunction.new("ST_Length", args, [false, true, false])
end
#--
# LineString functions
#++
def st_numpoints
SpatialNamedFunction.new("ST_NumPoints", [self], [false, true])
end
def st_pointn(n)
SpatialNamedFunction.new("ST_PointN", [self, n.to_i], [true, true, false])
end
#--
# Surface functions
#++
def st_area(units = nil)
args = [self]
args << units.to_s if units
SpatialNamedFunction.new("ST_Area", args, [false, true, false])
end
def st_centroid
SpatialNamedFunction.new("ST_Centroid", [self], [true, true])
end
def st_pointonsurface
SpatialNamedFunction.new("ST_PointOnSurface", [self], [true, true])
end
#--
# Polygon functions
#++
def st_exteriorring
SpatialNamedFunction.new("ST_ExteriorRing", [self], [true, true])
end
def st_numinteriorrings
# Note: the name difference is intentional. The standard
# names this function incorrectly.
SpatialNamedFunction.new("ST_NumInteriorRing", [self], [false, true])
end
def st_interiorringn(n)
SpatialNamedFunction.new("ST_InteriorRingN", [self, n.to_i], [true, true, false])
end
#--
# GeometryCollection functions
#++
def st_numgeometries
SpatialNamedFunction.new("ST_NumGeometries", [self], [false, true])
end
def st_geometryn(n)
SpatialNamedFunction.new("ST_GeometryN", [self, n.to_i], [true, true, false])
end
end
end
end
# Add tools to build spatial structures in the AST.
# Allow chaining of spatial expressions from attributes
Arel::Attribute.send :include, RGeo::ActiveRecord::SpatialExpressions
module Arel
# Create a spatial constant node.
# This node wraps a spatial value (such as an RGeo feature or a text
# string in WKT format). It supports chaining with the functions
# defined by RGeo::ActiveRecord::SpatialExpressions.
def self.spatial(arg)
RGeo::ActiveRecord::SpatialConstantNode.new(arg)
end
end
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record/version.rb
|
<reponame>quantierra/rgeo-activerecord
# frozen_string_literal: true
module RGeo
module ActiveRecord
VERSION = "7.0.0"
end
end
|
quantierra/rgeo-activerecord
|
lib/rgeo-activerecord.rb
|
<gh_stars>10-100
# frozen_string_literal: true
require "rgeo/active_record"
|
quantierra/rgeo-activerecord
|
lib/rgeo/active_record/spatial_factory_store.rb
|
# frozen_string_literal: true
module RGeo
module ActiveRecord
class SpatialFactoryStore
include Singleton
Entry = Struct.new(:attrs, :factory)
attr_accessor :registry
def initialize
@registry = []
@default = nil
end
def register(factory, attrs = {})
registry.push(Entry.new(filter_attrs(attrs), factory))
end
def default(attrs = {})
@default || default_for_attrs(attrs)
end
def default=(factory)
@default = factory
end
def factory(attrs)
closest_factory(attrs) || default(attrs)
end
def clear
@registry = []
end
private
def default_for_attrs(attrs)
if attrs[:sql_type] =~ /geography/
Geographic.spherical_factory(to_factory_attrs(attrs))
else
Cartesian.preferred_factory(to_factory_attrs(attrs))
end
end
def to_factory_attrs(attrs)
{
has_m_coordinate: attrs[:has_m],
has_z_coordinate: attrs[:has_z],
srid: (attrs[:srid] || 0),
}
end
def filter_attrs(attrs)
attrs.slice(:geo_type, :has_m, :has_z, :sql_type, :srid)
end
##
# Match attrs to the closest equal to or less specific factory
#
# That means that attrs can at most be matched to an Entry with the same
# number of keys as it. But could match with a factory with only 1 key
# in its attrs.
#
# Examples:
# attrs = {sql_type: "geometry" }, entry_attrs = {sql_type: "geometry", geo_type: "point"}
# is not a match because the entry is more specific than attrs
#
# attrs = {sql_type: "geometry", geo_type: "point"}, entry_attrs = {sql_type: "geometry"}
# is a match because the entry is less specific than attrs and would be the fallback for all "geometry" types
#
# attrs = {sql_type: "geometry", geo_type: "point"}, entry_attrs = {sql_type: "geometry", geo_type: "linestring"}
# is not a match because there are mismatched keys
#
# If there is no match, nil is returned
def closest_factory(attrs)
max_matches = 0
registry.reduce(nil) do |selected_fac, entry|
cmp = cmp_attrs(attrs, entry.attrs)
if cmp > max_matches
max_matches = cmp
entry.factory
else
selected_fac
end
end
end
##
# Returns number of common key/values
# or -1 if oth is bigger than attrs, or they have a mismatched key/value pair
def cmp_attrs(attrs, oth)
return -1 if oth.size > attrs.size
matches = 0
attrs.each do |k, v|
next if oth[k].nil?
return -1 unless v == oth[k]
matches += 1
end
matches
end
end
end
end
|
quantierra/rgeo-activerecord
|
test/common_elements_test.rb
|
# frozen_string_literal: true
require "test_helper"
class CommonElementsTest < Minitest::Test
class Feature
def test
9
end
end
class Point
def test
8
end
end
def test_geometric_type_from_name
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:point), RGeo::Feature::Point
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:polygon), RGeo::Feature::Polygon
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:geometry), RGeo::Feature::Geometry
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:linestring), RGeo::Feature::LineString
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:geometrycollection), RGeo::Feature::GeometryCollection
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:multipoint), RGeo::Feature::MultiPoint
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:multilinestring), RGeo::Feature::MultiLineString
assert_equal RGeo::ActiveRecord.geometric_type_from_name(:multipolygon), RGeo::Feature::MultiPolygon
end
def test_no_namespace_confusion
assert_equal 9, Feature.new.test
end
end
|
quantierra/rgeo-activerecord
|
test/spatial_factory_store_test.rb
|
<reponame>quantierra/rgeo-activerecord
# frozen_string_literal: true
require "test_helper"
class SpatialFactoryStoreTest < Minitest::Test
def test_default
store.default = nil
assert RGeo::Cartesian.preferred_factory === store.default
end
def test_set_default
store.clear
default_factory = Object.new
store.default = default_factory
assert_equal default_factory, store.default
end
def test_register
store.clear
default_factory = Object.new
store.default = default_factory
point_factory = Object.new
store.register point_factory, geo_type: "point", srid: 4326
assert_equal point_factory, store.factory(geo_type: "point", srid: 4326)
assert_equal 1, store.registry.size
assert_equal point_factory, store.factory(geo_type: "point", srid: 4326)
assert_equal 1, store.registry.size
polygon_factory = Object.new
store.register polygon_factory, geo_type: "polygon"
assert_equal polygon_factory, store.factory(geo_type: "polygon")
assert_equal 2, store.registry.size
z_point_factory = Object.new
store.register z_point_factory, geo_type: "point", has_z: true
assert_equal z_point_factory, store.factory(geo_type: "point", has_z: true)
assert_equal default_factory, store.factory(geo_type: "linestring")
end
def test_register_filter_attrs
store.clear
factory = Object.new
store.register(factory, { geo_type: "point", my_custom_field: "data" })
assert_equal(store.registry.first.attrs, { geo_type: "point" })
end
def test_fetch_factory
store.clear
default_factory = Object.new
store.default = default_factory
# test fallback
geom_factory = Object.new
store.register(geom_factory, { sql_type: "geometry" })
assert_equal(geom_factory, store.factory({ sql_type: "geometry", srid: 3857 }))
# test exact match
geom_merc_factory = Object.new
store.register(geom_merc_factory, { sql_type: "geometry", srid: 3857 })
assert_equal(geom_merc_factory, store.factory({ sql_type: "geometry", srid: 3857 }))
# test mismatched params
assert_equal(default_factory, store.factory({ sql_type: "geography", srid: 3857 }))
end
def test_fetch_factory_specificity
store.clear
default_factory = Object.new
store.default = default_factory
geom_factory = Object.new
store.register(geom_factory, { sql_type: "geometry", srid: 3857, geo_type: "point" })
assert_equal(default_factory, store.factory({ sql_type: "geometry", srid: 3857 }))
end
def test_fetch_factory_order
store.clear
default_factory = Object.new
store.default = default_factory
fac1 = Object.new
store.register(fac1, { sql_type: "geometry" })
fac2 = Object.new
store.register(fac2, { sql_type: "geometry" })
assert_equal(fac1, store.factory({ sql_type: "geometry" }))
end
private
def store
RGeo::ActiveRecord::SpatialFactoryStore.instance
end
end
|
jcppkkk/redmine_live_relative_time
|
init.rb
|
<reponame>jcppkkk/redmine_live_relative_time
require 'redmine'
require 'application_helper_patch'
require_dependency 'hooks/append_javascript_head_hook'
Redmine::Plugin.register :redmine_live_relative_time do
name 'Redmine Live Relative Time'
author '<NAME>, <EMAIL>'
url 'https://github.com/igloonet/redmine_live_relative_time.git'
author_url 'https://igloonet.cz'
description 'Little plugin which auto update relative times (1 minute ago) as times go.'
version '1.1'
Rails.configuration.to_prepare do
ApplicationHelper.send(:include, RedmineLiveRelativeTime)
end
end
|
jcppkkk/redmine_live_relative_time
|
lib/application_helper_patch.rb
|
module RedmineLiveRelativeTime
def self.included(base) # :nodoc:
base.send(:include, InstanceMethods)
base.class_eval do
unloadable
alias_method :time_tag_dead, :time_tag
alias_method :time_tag, :time_tag_live
end
end
module InstanceMethods
def time_tag_live(time)
text = distance_of_time_in_words(Time.now, time)
if @project
link_to(text, {:controller => 'activities', :action => 'index', :id => @project, :from => User.current.time_to_date(time)}, :title => format_time(time), :class => 'time-tag', data: { livestamp: time.to_i.to_s })
else
content_tag('abbr', text, :title => format_time(time), :class => 'time-tag', data: { livestamp: time.to_i.to_s })
end
end
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
spec/spec_helper.rb
|
require 'byebug'
$LOAD_PATH << './lib/'
|
thefrontiergroup/thefrontiergroup-matchers
|
lib/thefrontiergroup/matchers/be_paginated.rb
|
<gh_stars>1-10
RSpec::Matchers.define :be_paginated do
match do |object|
object.respond_to?(:current_page) && object.respond_to?(:total_pages)
end
description do
"be paginated"
end
failure_message do |object|
"expected #{object.inspect} to be a paginated collection"
end
failure_message_when_negated do
"expected #{object.inspect} not to be a paginated collection"
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
spec/matchers/order_spec.rb
|
<filename>spec/matchers/order_spec.rb
require 'spec_helper'
require 'thefrontiergroup/matchers/order'
describe 'order matcher' do
subject { elements }
let(:elements) { [ first_element, second_element, third_element ] }
let(:first_element) { 'A' }
let(:second_element) { 'B' }
let(:third_element) { 'C' }
it { is_expected.to order(second_element).after(first_element) }
it { is_expected.not_to order(first_element).after(second_element) }
it { is_expected.to order(third_element).after(first_element) }
it { is_expected.to order(third_element).after(second_element) }
end
|
thefrontiergroup/thefrontiergroup-matchers
|
spec/matchers/be_paginated_spec.rb
|
require 'spec_helper'
require 'thefrontiergroup/matchers/be_paginated'
class PaginatedUser
class << self
def current_page
1
end
def total_pages
1
end
end
end
class NonPaginatedUser
end
describe 'be_paginated matcher' do
subject { resource }
context "when resource responds to pagination methods" do
let(:resource) { PaginatedUser }
it { is_expected.to be_paginated }
end
context "when resource does not respond to pagination methods" do
let(:resource) { NonPaginatedUser }
it { is_expected.not_to be_paginated }
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
lib/thefrontiergroup/matchers/order.rb
|
RSpec::Matchers.define :order do |origin|
chain :after do |other|
@after = other
end
match do |actual|
@result = true
if @after
actual.each do |item|
if item == origin
@result = false
break
end
break if item == @after
end
end
@result
end
description do
"be in the correct order"
end
failure_message do |actual|
"expected\n'#{origin.inspect}'\n to be after:\n'#{@after.inspect}'\n in:\n'#{actual.inspect}'"
end
failure_message_when_negated do |actual|
"expected\n'#{origin.inspect}'\n to be before:\n'#{@after.inspect}'\n in:\n'#{actual.inspect}'"
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
lib/thefrontiergroup/matchers/set_flash.rb
|
<reponame>thefrontiergroup/thefrontiergroup-matchers
RSpec::Matchers.define :set_flash do |flash_type|
chain :to do |message|
@message = message
end
match do |response|
if @message.nil?
flash.has_key? flash_type
else
@message === flash[flash_type]
end
end
description do
"set flash[:#{flash_type}] to #{@message}"
end
failure_message do
if @message.nil?
"expected flash[:#{flash_type}] to be set"
else
"expected flash[:#{flash_type}] to be #{@message}, got #{flash[flash_type].to_s}"
end
end
failure_message_when_negated do
if @message.nil?
"expected flash[:#{flash_type}] not to be set"
else
"expected flash[:#{flash_type}] not to be #{@message}"
end
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
spec/matchers/response_status_matchers_spec.rb
|
<gh_stars>1-10
require 'spec_helper'
require 'thefrontiergroup/matchers/response_status_matchers'
require 'ostruct'
[
[ "be_bad_request", 400 ],
[ "be_forbidden", 403 ],
[ "be_not_found", 404 ],
[ "be_unauthorised", 401 ],
[ "be_unprocessable_entity", 422 ],
[ "be_internal_server_error", 500 ]
].each do |matcher, status|
describe "#{matcher} matcher" do
subject { request }
let(:request) { double }
let(:response) { OpenStruct.new(status: returned_status) }
before { allow(request).to receive(:status) }
context "when response status is #{status}" do
let(:returned_status) { status }
it { is_expected.to eval(matcher) }
end
context "when response status is not #{status}" do
let(:returned_status) { 123 }
it { is_expected.not_to eval(matcher) }
end
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
lib/thefrontiergroup/matchers.rb
|
require "thefrontiergroup/matchers/be_paginated"
require "thefrontiergroup/matchers/order"
require "thefrontiergroup/matchers/response_status_matchers"
require "thefrontiergroup/matchers/set_flash"
require "thefrontiergroup/matchers/version"
|
thefrontiergroup/thefrontiergroup-matchers
|
spec/matchers/set_flash_spec.rb
|
<gh_stars>1-10
require 'spec_helper'
require 'thefrontiergroup/matchers/set_flash'
describe 'set_flash matcher' do
subject { double(flash: {notice: 'foobar'}) }
it { is_expected.to set_flash(:notice) }
it { is_expected.not_to set_flash(:alert) }
it { is_expected.to set_flash(:notice).to('foobar') }
it { is_expected.to set_flash(:notice).to(/foo/) }
it { is_expected.not_to set_flash(:notice).to('not foobar') }
it { is_expected.not_to set_flash(:notice).to(/not/) }
# When in a Rails app, `flash` in the matcher is defined on ActionDispatch::TestProcess
# and `response` in the matcher is an ActionController::TestResponse
# This mimics that
def flash
subject.flash
end
end
|
thefrontiergroup/thefrontiergroup-matchers
|
lib/thefrontiergroup/matchers/response_status_matchers.rb
|
<gh_stars>1-10
def define_status_code_matcher(name, status)
RSpec::Matchers.define "be_#{name}" do
match do
subject
response.status == status
end
description do
"have the HTTP status code #{status}"
end
failure_message do |actual|
"expected HTTP status code to be #{status} but was #{actual.status}"
end
failure_message_when_negated do
"expected HTTP status code not to be #{status}"
end
end
end
[
[ "bad_request", 400 ],
[ "forbidden", 403 ],
[ "not_found", 404 ],
[ "unauthorised", 401 ],
[ "unprocessable_entity", 422 ],
[ "internal_server_error", 500 ]
].each do |name, status|
define_status_code_matcher(name, status)
end
|
philbritton/azure-sync
|
azure-sync.rb
|
require 'waz-blobs'
#
# Constants for accessing your azure account
#
# access key is the really long guid that is generated
@access_key = ""
# account name is the name of the storage. So buddystore.blob.core.windows.net
# the account name would be buddystore
@account_name = ""
# what is the name of the container you want to put "stuff" in
# I usually use the name of the computer I am on
@main_container_name = ""
#
# helper methods
#
def all_filesystem_files
`ls`.split("\n")
end
def file_type(filename)
`file -Ib #{filename}`.gsub(/\n/,"").split('; ')[0]
end
WAZ::Storage::Base.establish_connection!(:account_name => @account_name, :access_key => @access_key)
#
# Gets the container if it doesn't exist it creates it
#
container = WAZ::Blobs::Container.find(@main_container_name)
if container.nil? then
WAZ::Blobs::Container.create(@main_container_name)
container = WAZ::Blobs::Container.find(@main_container_name)
end
#
# Gets a list of all the new files on the file system
# and is ready to upload them to azure
#
azure_files = container.blobs.map(&:name)
local_files = all_filesystem_files
intersection = azure_files & local_files
final_files = local_files - intersection
final_files.delete("azure-sync.rb")
final_files.delete("README")
final_files.delete("Gemfile")
final_files.delete("Gemfile.lock")
#
# Uploads the new files to azure
#
if final_files.size > 0 then
final_files.each do |f|
container.store(f, File.open(f), file_type(f))
end
puts 'save was successful'
else
puts 'no files to save'
end
|
thepartisan/asciidoctor-kroki
|
ruby/lib/asciidoctor/extensions/asciidoctor_kroki/version.rb
|
<gh_stars>10-100
# frozen_string_literal: true
module Asciidoctor
module AsciidoctorKroki
VERSION = '0.5.0'
end
end
|
thepartisan/asciidoctor-kroki
|
ruby/asciidoctor-kroki.gemspec
|
# frozen_string_literal: true
require_relative 'lib/asciidoctor/extensions/asciidoctor_kroki/version'
Gem::Specification.new do |s|
s.name = 'asciidoctor-kroki'
s.version = Asciidoctor::AsciidoctorKroki::VERSION
s.summary = 'Asciidoctor extension to convert diagrams to images using Kroki'
s.description = 'An extension for Asciidoctor to convert diagrams to images using https://kroki.io'
s.authors = ['<NAME>']
s.email = ['<EMAIL>']
s.homepage = 'https://github.com/Mogztter/asciidoctor-kroki'
s.license = 'MIT'
s.metadata = {
'bug_tracker_uri' => 'https://github.com/Mogztter/asciidoctor-kroki/issues',
'source_code_uri' => 'https://github.com/Mogztter/asciidoctor-kroki'
}
s.files = `git ls-files`.split($RS)
s.test_files = s.files.grep(%r{^(test|spec|features|tasks)/})
s.require_paths = ['lib']
s.add_runtime_dependency 'asciidoctor', '~> 2.0'
s.add_development_dependency 'rake', '~> 13.0.6'
s.add_development_dependency 'rspec', '~> 3.10.0'
s.add_development_dependency 'rubocop', '~> 1.21'
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/interpretacoes_controller.rb
|
<gh_stars>0
class InterpretacoesController < ApplicationController
wrap_parameters :interpretacao, include: %i[nome valor_minimo valor_maximo]
before_action :set_escala
before_action :set_escala_interpretacao, only: [:show, :update, :destroy]
# GET /interpretacoes
def index
json_response(@escala.interpretacoes)
end
# GET /interpretacoes/1
def show
json_response(@interpretacao)
end
# POST /interpretacoes
def create
@escala.interpretacoes.create!(interpretacao_params)
json_response(@interpretacao, :created)
end
# PATCH/PUT /interpretacoes/1
def update
@interpretacao.update(interpretacao_params)
json_response(@interpretacao, :updated)
end
# DELETE /interpretacoes/1
def destroy
@interpretacao.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_escala
@escala = Escala.find_by(id: params[:escala_id])
end
def set_escala_interpretacao
@interpretacao = @escala.interpretacoes.find_by!(id: params[:id])
end
# Only allow a trusted parameter "white list" through.
def interpretacao_params
params.require(:interpretacao).permit(:nome, :valor_minimo, :valor_maximo)
end
end
|
fernandofeoli/ApiCaelumEAP
|
app/models/resposta.rb
|
<reponame>fernandofeoli/ApiCaelumEAP
class Resposta < ApplicationRecord
belongs_to :parametro
has_many :resultados
has_many :registros, through: :resultados
end
|
fernandofeoli/ApiCaelumEAP
|
app/models/registro.rb
|
<filename>app/models/registro.rb
class Registro < ApplicationRecord
belongs_to :paciente
belongs_to :escala
has_many :resultados
has_many :parametros, through: :resultados
end
|
fernandofeoli/ApiCaelumEAP
|
app/models/paciente.rb
|
class Paciente < ApplicationRecord
validates :cpf, presence: { message: 'Invalid: Field can not be empty.' }
validates :nome, presence: { message: 'Invalid: Field can not be empty.' }
belongs_to :user
has_many :registros
has_many :escalas, through: :registros
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/escalas_controller.rb
|
<reponame>fernandofeoli/ApiCaelumEAP<filename>app/controllers/escalas_controller.rb
class EscalasController < ApplicationController
before_action :set_escala, only: [:show, :update, :destroy]
# GET /escalas
def index
@escalas = Escala.all
json_response(@escalas)
end
# POST /escalas
def create
@escala = Escala.create(escala_params)
json_response(@escala, :created)
end
# GET /escalas/:id
def show
json_response(@escala)
end
# PUT /escalas/1
def update
@escala.update(escala_params)
json_response(@escala, :updated)
end
# DELETE /escalas/1
def destroy
@escala.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_escala
@escala = Escala.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def escala_params
params.require(:escala).permit(:nome)
end
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/respostas_controller.rb
|
class RespostasController < ApplicationController
wrap_parameters :resposta, include: %i[nome pontuacao]
before_action :set_escala
before_action :set_escala_parametro
before_action :set_parametro_resposta, only: [:show, :update, :destroy]
# GET /respostas
def index
json_response(@parametro.respostas)
end
# GET /respostas/1
def show
json_response(@resposta)
end
# POST /respostas
def create
@parametro.respostas.create!(resposta_params)
json_response(@parametro, :created)
end
# PATCH/PUT /respostas/1
def update
@resposta.update(resposta_params)
json_response(@resposta, :updated)
end
# DELETE /respostas/1
def destroy
@resposta.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_escala
@escala = Escala.find_by(id: params[:escala_id])
end
def set_escala_parametro
@parametro = @escala.parametros.find_by!(id: params[:parametro_id])
end
def set_parametro_resposta
@resposta = @parametro.respostas.find_by!(id: params[:id])
end
# Only allow a trusted parameter "white list" through.
def resposta_params
params.require(:resposta).permit(:nome, :pontuacao)
end
end
|
fernandofeoli/ApiCaelumEAP
|
config/initializers/session_store.rb
|
if Rails.env == "production"
Rails.application.config.session_store :cookie_store, key: "_ApiCaelumEAP", domain: "api-caelum-eap.herokuapp.com"
else
Rails.application.config.session_store :cookie_store, key: "_ApiCaelumEAP"
end
|
fernandofeoli/ApiCaelumEAP
|
db/migrate/20191104074052_create_registros.rb
|
<reponame>fernandofeoli/ApiCaelumEAP
class CreateRegistros < ActiveRecord::Migration[6.0]
def change
create_table :registros do |t|
t.integer :pontuacao
t.references :paciente, null: false, foreign_key: true
t.references :escala, null: false, foreign_key: true
t.timestamps
end
end
end
|
fernandofeoli/ApiCaelumEAP
|
spec/requests/sessions_spec.rb
|
<reponame>fernandofeoli/ApiCaelumEAP
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'user correct login', type: :request do
headers = {
'ACCEPT' => 'application/json'
}
before do
@user = build(:user)
post '/sessions', params: { user: {
email: @user.email,
password: <PASSWORD>
} }, headers: headers
end
it 'returns a created status' do
expect(response).to have_http_status(:ok)
end
end
RSpec.describe 'user error login', type: :request do
headers = {
'ACCEPT' => 'application/json'
}
before do
@user = build(:user)
post '/sessions', params: { user: {
email: '',
password: <PASSWORD>
} }, headers: headers
end
it 'returns a unauthorized status' do
expect(response).to have_http_status(:ok)
end
end
|
fernandofeoli/ApiCaelumEAP
|
app/models/interpretacao.rb
|
class Interpretacao < ApplicationRecord
belongs_to :escala
end
|
fernandofeoli/ApiCaelumEAP
|
spec/requests/registrations_spec.rb
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'create new user', type: :request do
before do
@user = build(:user)
post '/registrations', params: { user: {
nome: @user.nome,
email: @user.email,
password: <PASSWORD>,
password_confirmation: <PASSWORD>
} }
end
it 'returns a created status' do
expect(response).to have_http_status(:ok)
end
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/resultados_controller.rb
|
<gh_stars>0
class ResultadosController < ApplicationController
wrap_parameters :resultado, include: %i[resposta_id parametro_id]
before_action :set_registro
before_action :set_registro_resultado, only: [:show, :update, :destroy]
# GET /resultados
def index
json_response(@registro.resultados)
end
# GET /resultados/1
def show
json_response(@resultado)
end
# POST /resultado
def create
@registro.resultados.create(resultado_params)
json_response(@registro.resultados.last, :created)
end
# PATCH/PUT /resultados/1
def update
@resultado.update(resultado_params)
json_response(@resultado, :updated)
end
# DELETE /resultados/1
def destroy
@resultado.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_registro
@registro = Registro.find_by!(id: params[:registro_id])
end
def set_registro_resultado
@resultado = @registro.resultados.find_by!(id: params[:id])
end
# Only allow a trusted parameter "white list" through.
def resultado_params
params.require(:resultado).permit(:resposta_id, :parametro_id)
end
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/parametros_controller.rb
|
<reponame>fernandofeoli/ApiCaelumEAP
class ParametrosController < ApplicationController
wrap_parameters :parametro, include: %i[nome]
before_action :set_escala
before_action :set_escala_parametro, only: [:show, :update, :destroy]
# GET /parametros
def index
json_response(@escala.parametros)
end
# GET /parametros/1
def show
json_response(@parametro)
end
# POST /parametros
def create
@escala.parametros.create!(parametro_params)
json_response(@escala, :created)
end
# PATCH/PUT /parametros/1
def update
@parametro.update(parametro_params)
json_response(@parametro, :updated)
end
# DELETE /parametros/1
def destroy
@parametro.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_escala
@escala = Escala.find_by(id: params[:escala_id])
end
def set_escala_parametro
@parametro = @escala.parametros.find_by!(id: params[:id])
end
# Only allow a trusted parameter "white list" through.
def parametro_params
params.require(:parametro).permit(:nome)
end
end
|
fernandofeoli/ApiCaelumEAP
|
db/migrate/20191102221110_create_interpretacoes.rb
|
<gh_stars>0
class CreateInterpretacoes < ActiveRecord::Migration[6.0]
def change
create_table :interpretacoes do |t|
t.string :nome
t.integer :valor_maximo
t.integer :valor_minimo
t.references :escala, null: false, foreign_key: true
t.timestamps
end
end
end
|
fernandofeoli/ApiCaelumEAP
|
db/migrate/20191031041327_add_details_to_user.rb
|
<filename>db/migrate/20191031041327_add_details_to_user.rb<gh_stars>0
class AddDetailsToUser < ActiveRecord::Migration[6.0]
def change
add_column :users, :nome, :string
add_column :users, :cargo, :string
add_column :users, :documento, :string
end
end
|
fernandofeoli/ApiCaelumEAP
|
config/routes.rb
|
Rails.application.routes.draw do
root to: "static#home"
resources :sessions, only: [:create]
resources :registrations, only: [:create]
delete :logout, to: 'sessions#logout'
get :logged_in, to: 'sessions#logged_in'
resources :pacientes do
resources :registros do
resources :resultados
end
end
resources :escalas do
resources :interpretacoes
resources :parametros do
resources :respostas
end
end
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
end
|
fernandofeoli/ApiCaelumEAP
|
app/models/parametro.rb
|
<filename>app/models/parametro.rb
class Parametro < ApplicationRecord
belongs_to :escala
has_many :respostas
has_many :resultados
has_many :registros, through: :resultados
end
|
fernandofeoli/ApiCaelumEAP
|
app/controllers/pacientes_controller.rb
|
class PacientesController < ApplicationController
include CurrentUserConcern
before_action :set_paciente, only: [:show, :update, :destroy]
# GET /pacientes
def index
@pacientes = Paciente.where(user_id: 6)
json_response(@pacientes)
end
# GET /pacientes/1
def show
render json: @paciente
end
# POST /pacientes
def create
@paciente = @current_user.pacientes.create(paciente_params)
json_response(@paciente, :created)
end
# PATCH/PUT /pacientes/1
def update
@paciente.update(paciente_params)
json_response(@escala, :updated)
end
# DELETE /pacientes/1
def destroy
@paciente.destroy
head :no_content
end
private
# Use callbacks to share common setup or constraints between actions.
def set_paciente
@paciente = Paciente.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def paciente_params
params.require(:paciente).permit(:nome, :cpf, :genero, :hip_diag, :idade)
end
end
|
malloc3/YG_Harmonization
|
libraries/PlateReaderMethods.rb
|
# By: <NAME>
# <EMAIL>
needs "Yeast Display/YeastDisplayHelper"
needs "Tissue Culture Libs/CollectionDisplay"
needs 'Standard Libs/AssociationManagement'
needs "YG_Harmonization/Upload_PlateReader_Data"
# needs "YG_Harmonization/BiotekPlateReaderCalibration"
module PlateReaderMethods
include YeastDisplayHelper
include CollectionDisplay
include Upload_PlateReader_Data
# include BiotekPlateReaderCalibration
include AssociationManagement
SAVING_DIRECTORY = "_UWBIOFAB"
# Experimental Default volume
DEFAULT_VOLUME = { qty: 300, units: 'µl' }
PLT_READER_LOC = "A10.530"
PLT_READER_TYPE = 'Biotek Synergy HT'
# Plate Reader Calibration
CAL_TEMPLATE_FILENAME = "calibration_template_v1"
CAL_MEASUREMENTS = ['cal_od', 'cal_gfp']
# Directs technician to set up biotek plate reader software
#
# @params collection [collection obj] the collection that is being measured
# @params template_filename [string] the name of the biotek protcol/measurement template
def set_up_plate_reader(collection, template_filename)
if template_filename.include? 'calibration_template'
experiment_filename = "experiment_calibration_plate_#{todays_date}"
else
experiment_filename = "experiment_#{collection.id}_#{todays_date}"
end
# Open Biotek software
# Set up plate reader workspace and taking measurements
# select new exp and save
img1 = "Actions/Yeast_Gates/plateReaderImages/open_biotek.PNG"
img2 = "Actions/Yeast_Gates/plateReaderImages/begin_plate_reader.PNG"
show do
title "Setting Up Plate Reader Workspace"
separator
note "<b>The next steps should be done on the plate reader computer</b>."
note "<b>1.</b> Open BioTek Gen5 software by clicking the icon shown below."
image img1
note "<b>2.</b> Under <b>'Create a New Item'</b> click <b>'Experiment'</b> "
# select template
note "<b>3.</b> From the list select: <b>#{template_filename}</b>"
note "<b>4.</b> Click Read Plate icon shown below"
image img2
note "<b>5.</b> Click <b>'READ'</b> on the pop-up window."
bullet "Name experiment file: <b>#{experiment_filename}</b>"
bullet "<b>Save</b> it under the <b>#{SAVING_DIRECTORY}</b> folder."
note "<b>6.</b> Load plate and click <b>'OK'</b>"
end
# show do
# title "Setting Up Plate Reader Workspace"
# note "Take 96 well plate to the plate reader computer, under cabinet <b>#{PLT_READER_LOC}</b>."
# note "Open BioTek Gen5 software by clicking the icon shown below."
# image img1
# note "Under 'Create a New Item' click <b>'Experiment'</b> "
# # select template
# note "From the list select <b>#{template_filename}</b>"
# note "Next, click Read Plate icon shown below and click <b>'READ'</b> on the pop-up window."
# image img2
# note "Name experiment file: <b>#{experiment_filename}</b>"
# note "Finally, save it under the <b>#{SAVING_DIRECTORY}</b> folder."
# note "Load plate and click <b>'OK'</b>"
# end
end
def add_blanks(volume={}, media)
volume = DEFAULT_VOLUME unless volume.present?
show do
title "Add Blanks to Plate"
note "Prior to our measurement, we must add a blank to get a true OD reading."
check "Fill the last three wells of the 96 Well plate <b>H10, H11, H12</b> with #{qty_display(volume)} of <b>#{media}</b> liquid media."
end
end
def load_plate
show do
title "Load Plate Reader"
note "Load plate on to the plate reader and click <b>'OK'</b>"
end
end
# Exports data from plate reader (BioTek Gen 5)
#
# @params collection [collection obj] collection that is being measured
# @params timepoint [integer] what hour into the experiment is this data being collected
# @params method [string] what is being measured on the plate reader can also be gfp
# @return filename [string] filename generated with information for downstream processing
def export_data(collection, timepoint, method='od')
if method.include? 'cal'
filename = "jid_#{jid}_item_#{collection.id}_#{todays_date}_#{method}"
else
filename = "jid_#{jid}_item_#{collection.id}_#{timepoint}hr_#{todays_date}_#{method}"
end
img1 = "Actions/Yeast_Gates/plateReaderImages/exporting_data_new.GIF"
img2 = "Actions/Yeast_Gates/plateReaderImages/excel_export_button_new.png"
img3 = "Actions/Yeast_Gates/plateReaderImages/saving_export_csv_new.png"
case method
when 'od'
dtype = 'Blank Read 1:600'
desc = 'Optical Density'
when 'gfp'
dtype = 'Blank Read 2:485/20,516/20'
desc = 'Fluorescence'
when 'cal_od'
dtype = 'Read 1:600'
desc = 'Calibration Optical Density'
when 'cal_gfp'
dtype = 'Read 2:485/20,516/20'
desc = 'Calibration Fluorescence'
else
dtype = ''
end
# Exporting single file (csv)
show do
title "Export #{desc} Measurements from Plate Reader"
warning "Make sure that no other Excel sheets are open before exporting!"
separator
image img1
bullet "Select the <b>'Statistics'</b> tab"
bullet "Select Data: <b>#{dtype}</b>"
separator
note "Next, click the Excel sheet export button. <b>The sheet will appear on the menu bar below</b>."
image img2
warning "Make sure to save file as a .CSV file!"
note "Go to sheet and <b>'Save as'</b> ==> <b>#{filename}</b> under the <b>#{SAVING_DIRECTORY}</b> folder."
image img3
end
# show do
# title "Export #{desc} Measurements from Plate Reader"
# warning "Make sure that no other Excel sheets are open before exporting!"
# separator
# note "After measurements have been taken, be sure to select the <b>'Statistics'</b> tab"
# note "Select Data: <b>#{dtype}</b>"
# image img1
# note "Next, click the Excel sheet export button. The sheet will appear on the menu bar below."
# image img2
# note "Go to sheet and 'Save as' <b>#{filename}</b> under the <b>#{SAVING_DIRECTORY}</b> folder."
# warning "Make sure to save file as a .CSV file!"
# image img3
# end
return filename
end
def todays_date
DateTime.now.strftime("%m%d%Y")
end
# Give an introduction to the sync by OD protocol
#
# @param wavelength [integer] the type of light measured, 0 to 900
def intro_sync_OD(wavelength)
show do
title "Sychronization of Cultures by OD"
note "In this protocol you will be measuring the cell concentration of cultures by Optical Density."
note "Then we will normalize all cultures to a similar cellular concentration to begin our growth experiment."
note "This allows researchers to observe discrepencies in growth rates of different strains and allows us to compare conditions."
note "<b>1.</b> Setup Plate Reader (Biotek) & measure OD#{wavelength}."
note "<b>2.</b> Take a calculated volume from each well and dilute into a 96 Deep Well Plate."
end
end
# Associates the actual ODs calculated from the well OD and the ave culture volume diluted into the final vol
#
# @params out_coll [collection obj] output collection that the od_mat will be associated to
# @params od_mat [2-D Array] matrix containing ODs of the output collection
def associate_true_ods_item(out_coll,od_mat)
timepoint = 0 # timepoint now at t = 0 since we have diluted our cultures to the necessary starting ODs
k = 'optical_density'
method = 'od'
od_hsh = Hash.new(0)
od_hsh["#{timepoint}_hr_#{method}"] = od_mat
Item.find(out_coll.id).associate(k, od_hsh)
end
# # This function directs tech to pre-fill deep well plate with required media
# #
# # @params out_coll [collection obj] the output collection object
# # @params type_of_media [string] describes the type of media to be used in the experiment
# # @params media_vol_mat [2-D Array] is a matrix of the media volume per well in ul
# def aliquot_media(out_coll, media_vol_mat, media)
# # Direct tech to fill output plate with media
# tot_media_vol = 0
# media_vol_mat.flatten.each {|vol| tot_media_vol += vol}
# # Where controls are to be placed in the experimental plate
# gfp_input_cult_coord = 'H9'
# wt_no_stain_coord = 'H7'
# wt_stain_coord = 'H8'
# rc_list = out_coll.get_non_empty
# rc_list.push(find_rc_from_alpha_coord(alpha_coord=gfp_input_cult_coord).first)
# rc_list.push(find_rc_from_alpha_coord(alpha_coord=wt_no_stain_coord).first)
# rc_list.push(find_rc_from_alpha_coord(alpha_coord=wt_stain_coord).first)
# log_info 'rc_list', rc_list
# show do
# title "Filling New 96 Deep Well Plate #{out_coll}"
# note "For this step you will need:"
# check "96 Well Deep U-bottom Plate and label with <b>#{out_coll.id}</b>"
# check "Multichannel Reservoir"
# check "Breathable AeraSeal Plate Cover"
# check "<b>#{((tot_media_vol/1000) + 1).round(1)} mLs</b> of <b>#{media}</b> liquid growth media."
# end
# show do
# title "Filling 96 Deep Well Plate #{out_coll}"
# note "Follow the table bellow to aliquot the appropriate amount of <b>#{media}</b> media to the respective well:"
# table highlight_rc(out_coll, rc_list) { |r, c| "#{(media_vol_mat[r][c]).round(1)} µl" }
# end
# end
# -------------------------------------------------PlateReaderControl------------------------------------------------------------------#
# def calibration_plate_chk()
# # is plate made already?
# check_cal_plate = show do
# title "Calibrating the #{PLT_READER_TYPE} Plate Reader"
# separator
# select [ "Yes", "No"], var: "cal_plate", label: "Is there a calibration plate that is less than 2 weeks old? If not, select 'No' and proceed to the next step."
# # note "If yes, take the calibration plate and place on the plate shaker in the 30°C incubator for 5 mins."
# end
# return (check_cal_plate[:cal_plate] == "No" ? false : true)
# end
# # Creates a calibration plate for the plate reader with a flourescence dye and a optical density reagent
# #
# # @params flour [string]
# # @params ludox [string]
# # @params collection [collection obj] container of plate reader cal solutions
# def create_cal_plate(cal_coll)
# flour_samp = Sample.find_by_name("Fluorescein Sodium Salt" )
# ludox_samp = Sample.find_by_name("LUDOX Stock")
# # Items and materials required for calibration plate
# flour_item = find(:item, { sample: { name: flour_samp.name }, object_type: { name: "1mM Fluorescein Stock" } } ).first
# ludox_item = find(:item, { sample: { name: ludox_samp.name }, object_type: { name: "1X LUDOX Aliquot" } } ).first
# cal_items = [flour_item, ludox_item]
# take cal_items, interactive: true
# h2o_type = "Nuclease-free water" # Change in Production Aq to Mol grade H2O
# h2o_samp = Sample.find_by_name(h2o_type)
# cal_plt_mats = {'1X PBS'=>'Bench', 'Mol. Grade H2O'=>'Media Bay', '96 Well Flat Bottom (black)'=>'Bench'}
# show do
# title "Creating a New Calibration Plate"
# separator
# note "<b>Gather the following:</b>"
# cal_plt_mats.each {|mat, loc| check "#{mat} at #{loc}"}
# end
# show do
# title "Creating a New Calibration Plate"
# separator
# note "Vortex 1mM Fluorescein Stock and make sure there are no precipitates."
# check "In a fresh 1.5mL Eppendorf tube, dilute 50µl of 1mM Fluorescein Stock into 950µl of 1X PBS - Final Concentration [50µM]"
# note "Make sure to vortex."
# end
# dims = cal_coll.dimensions
# # log_info 'dims', dims
# rows = dims[0]
# cols = dims[1]
# new_coll_mat = Array.new(rows) { Array.new(cols) { -1 } }
# rows.times do |r|
# cols.times do |c|
# if r < 4
# new_coll_mat[r][c] = flour_samp.id
# elsif r == 4
# new_coll_mat[r][c] = ludox_samp.id
# elsif r == 5
# new_coll_mat[r][c] = h2o_samp.id
# end
# end
# end
# cal_plate = cal_coll
# cal_plate.matrix = new_coll_mat
# cal_plate.save
# # log_info 'new_coll_mat', new_coll_mat
# # log_info 'cal_plate matrix', cal_plate.matrix
# pbs_wells = cal_plate.select {|well| well == flour_samp.id}.select {|r, c| c != 0}
# # direct tech to fill new calibration plate
# show do
# title "Creating a New Calibration Plate"
# separator
# note "You will need <b>#{(pbs_wells.length * 0.1) + 0.1}mL</b> of 1X PBS for the next step."
# note "Follow the table below to dispense 1X PBS in the appropriate wells:"
# table highlight_rc(cal_plate, pbs_wells) {|r,c| "100µl"}
# end
# flour_serial_image = "Actions/Yeast_Gates/plateReaderImages/flour_serial_dilution.png"
# show do
# title "Serial Dilution of Flourescein"
# separator
# note "From the 50µM Fluorescein solution, dispense <b>200µl</b> in wells <b>A1, B1, C1, D1</b>"
# note "Following the image below, transfer <b>100µl</b> of 50µM Fluorescein solution in Column 1 to Column 2"
# note "Resuspend by pipetting up and down 3X"
# note "Repeat until column 11 and discard the remaining <b>100µl</b>."
# image flour_serial_image
# end
# ludox_wells = cal_plate.select {|well| well == ludox_samp.id}
# show do
# title "Creating a New Calibration Plate"
# separator
# note "Follow the table below to dispense #{ludox_samp.name} into the appropriate wells."
# table highlight_rc(cal_plate, ludox_wells) {|r,c| ludox_vol(r, c)}
# end
# h2o_wells = cal_plate.select {|well| well == h2o_samp.id}
# show do
# title "Creating a New Calibration Plate"
# separator
# note "Follow the table below to dispense #{h2o_type} into the appropriate wells."
# table highlight_rc(cal_plate, h2o_wells) {|r,c| ludox_vol(r, c)}
# end
# # Assocaite todays_date with item
# Item.find(cal_plate.id).associate('date_created', todays_date)
# release cal_items, interactive: true
# return cal_plate
# end
# def ludox_vol(row, col)
# if col < 4
# return "100µl"
# elsif col.between?(4, 7)
# return "200µl"
# else col.between?(7, 11)
# return "300µl"
# end
# end
# # This function directs tech to measure calibration plate on plate reader and export data; it also associates data from plate reader
# #
# # @params cal_plates [Array] an array of item objects
# #
# def measure_cal_plate(cal_plates)
# cal_plate = cal_plates.first
# # measure on plate reader
# set_up_plate_reader(cal_plate, CAL_TEMPLATE_FILENAME)
# # Export a file for each measurement - Can the plate Reader export in xml?
# CAL_MEASUREMENTS.each do |method|
# timepoint = nil # Is nil since it is not being measured along with this experiment
# filename = export_data(cal_plate, timepoint, method=method)
# # Show block upload button and retrieval of file uploaded
# up_show, up_sym = upload_show(filename)
# if (up_show[up_sym].nil?)
# show {warning "No upload found for calibration measurement. Try again!!!"}
# up_show, up_sym = upload_show(filename)
# else
# upload = find_upload_from_show(up_show, up_sym)
# key = "#{todays_date}_#{method}"
# # Need to associate data to all plans that are batched in job
# associate_to_plan(upload, key)
# # Associates upload to calibration plate and plan
# cal_plates.each do |cal_plate|
# associate_to_item(cal_plate, key, upload)
# # Associates data hash of measurements to item/collection - extract info from plate reader upload and associate with item
# associate_PlateReader_Data(upload, cal_plate, method, timepoint)
# end
# end
# end
# cal_plates.shift.location = '4°C Fridge'
# cal_plates.each {|plt| plt.mark_as_deleted}
# end
#-------------------------------------------------PlateReaderControl------------------------------------------------------------------#
end # module
|
malloc3/YG_Harmonization
|
libraries/YG_Controls.rb
|
# By: <NAME>
# <EMAIL>
# Updated: 08/15/18
# This library is to help with creating additional controls to the YG_Harmonization workflow
needs 'Standard Libs/AssociationManagement'
module YG_Controls
include AssociationManagement
include PartProvenance
def creating_neg_pos_wt_staining_control(in_collection, out_collection, output_cult_dest, cult_vol_mat, media_vol_mat, input_cult_coords, samp_id=22544) #Sync by OD
if debug
samp_id = 22544 # WT 22544 is not in Nursery
end
# Finding where WT is on the input collection in order to copy cult and media vol for synchronization of WT controls
input_wt_cult_coord = find_input_wt_cult_coord(collection=in_collection)
# log_info 'input_wt_cult_coord creating control', input_wt_cult_coord
# Found the coordinate in which wt is in now copy the media vol and the culture vol
neg_pos_wt_cult_coord_destination = ['H7', 'H8'] # Where I want control cults to be in the output plate
neg_pos_wt_cult_coord_destination = find_rc_from_alpha_coord(alpha_coord=neg_pos_wt_cult_coord_destination)
neg_pos_stn_wt_cult_vol = input_wt_cult_coord.map {|r,c| cult_vol_mat[r][c]}.first
neg_pos_stn_wt_media_vol = input_wt_cult_coord.map {|r,c| media_vol_mat[r][c]}.first
neg_pos_wt_cult_coord_destination.each {|r,c|
cult_vol_mat[r][c] = neg_pos_stn_wt_cult_vol
media_vol_mat[r][c] = neg_pos_stn_wt_media_vol
input_cult_coords.push([r, c])
out_coll_matrix = out_collection.matrix
out_coll_matrix[r][c] = samp_id # sample_id Diploid WT
out_collection.matrix = out_coll_matrix
out_collection.save
}
return input_cult_coords, cult_vol_mat, media_vol_mat
end
def creating_pos_gfp_control(out_collection, input_plate_ods, final_output_vol, cult_vol_mat, media_vol_mat, input_cult_coords, samp_id=6390) # Sync by OD
gfp_input_cult_coord = 'H9' # gfp culture is in the same place that it will be in the deep well experimental plate
gfp_input_cult_coord_destination = find_rc_from_alpha_coord(alpha_coord=gfp_input_cult_coord)
gfp_input_cult_vol, gfp_input_media_vol = sync_gfp_control(gfp_input_cult_coord=gfp_input_cult_coord, gfp_output_cult_coord=gfp_input_cult_coord, input_plate_ods, final_output_vol) #YG_Controls
log_info 'gfp_input_cult_vol', gfp_input_cult_vol, 'gfp_input_media_vol',gfp_input_media_vol
gfp_input_cult_coord_destination.each {|r,c|
cult_vol_mat[r][c] = gfp_input_cult_vol
media_vol_mat[r][c] = gfp_input_media_vol
input_cult_coords.push([r, c])
out_coll_matrix = out_collection.matrix
out_coll_matrix[r][c] = samp_id # sample_id NOR00 1.0
out_collection.matrix = out_coll_matrix
out_collection.save
}
return input_cult_coords, cult_vol_mat, media_vol_mat
end
# Will find where in a collection diploid WT is located and return [[r,c]]
def find_input_wt_cult_coord(collection)
wt_cult_coord = []
collection.matrix.each_with_index.map {|row, r_idx|
row.each_with_index.map {|col, c_idx|
wt_sample_id = []
if debug
wt_sample_id = [1, 30, 22544, 22801]
else
wt_sample_id = [30, 22544, 22801]
end
# Once wt sample_id is found in the collection return [[r,c]]
if wt_sample_id.include? col
wt_cult_coord.push([r_idx, c_idx])
break
end
}
}
return wt_cult_coord
end
# Will add a positive gfp colony (NOR00) to a desired well in a collection
#
# @params collection [collection obj] the collection to which the gfp control will be added to
# @params well [string] the alpha numeric coordinate that the gfp colony will be added to
# @returns need_to_create_new_control_plate[:make_new_plate] [boolean] will return true or false based on user input
def adding_positive_gfp_control(collection, well='H9')
strain_sample_id = 6390 # NOR_00 1.0
obj_type = "Yeast Plate"
# Find the plate created for the gfp positive control
positive_gfp_control_plate = find(:item, { sample: { id: strain_sample_id }, object_type: { name: obj_type } } ).select {|item|
item.get('YG_Control') == 'positive_gfp'
}.first # Key: YG_Control, Value: 'positive_gfp' - previously associated value to sample and item
take [positive_gfp_control_plate], interactive: true
display_rc_list = find_rc_from_alpha_coord(well)
show do
title "Adding Positive GFP Control"
separator
note "To 96 Flat Bottom Plate <b>#{collection.id}</b>:"
bullet "Fill <b>#{well}</b> with 200µl of liquid SC media"
bullet "Pick colony from Yeast Plate <b>#{positive_gfp_control_plate.id}</b> & resuspend in the well highlighted below"
table highlight_alpha_rc(collection, display_rc_list) {|r,c| "#{positive_gfp_control_plate.id}"}
check "<b>Finally, place clear lid on top and tape shut before placing it on the plate shaker.</b>"
end
# Associate provenance data between control plate and collection
control_plate_associations = AssociationMap.new(positive_gfp_control_plate)
display_rc_list.each do |r, c|
# Add control strain to collection
collection_sample_matrix = collection.matrix
collection_sample_matrix[r][c] = strain_sample_id
collection.matrix = collection_sample_matrix
part = collection.part(r,c)
part_associations = AssociationMap.new(part)
add_provenance({
from: positive_gfp_control_plate,
from_map: control_plate_associations,
to: part,
to_map: part_associations,
additional_relation_data: { source_colony: 1, process: "resuspension" }
})
# Associate additional data to this part
part_associations.put('control', "positive_gfp")
part_associations.save
# Add control strain to collection
# collection_sample_matrix = collection.matrix
# collection_sample_matrix[r][c] = strain_sample_id
# collection.matrix = collection_sample_matrix
end
control_plate_associations.save
need_to_create_new_control_plate = show do
title "Checking Control Plate #{positive_gfp_control_plate}"
separator
select ["Yes", "No"], var: "make_new_plate" , label: "Are there colonies left to be picked?" , default: 1
end
release [positive_gfp_control_plate], interactive: true
return need_to_create_new_control_plate[:make_new_plate].to_s
end
# Finds where an alpha_coordinate is in a 96 Well plate
#
# @params alpha_coord [array or string] can be a single alpha_coordinate or a list of alpha_coordinate strings ie: 'A1' or ['A1','H7']
# @return rc_list [Array] a list of [r,c] coordinates that describe where the alpha_coord(s) are in a 96 well matrix
def find_rc_from_alpha_coord(alpha_coord)
# look for where alpha coord is 2-D array coord
coordinates_96 = ('A'..'H').to_a.map {|row| (1..12).to_a.map {|col| row + col.to_s}}
rc_list = []
if alpha_coord.instance_of? Array
# alpha_coord = alpha_coord.map {|a| a.upcase}
alpha_coord.each {|a_coord|
coordinates_96.map.each_with_index { |row, r_idx| row.each_index.select {|col| row[col] == a_coord.upcase}.each { |c_idx| rc_list.push([r_idx, c_idx]) } }
}
else
coordinates_96.map.each_with_index { |row, r_idx| row.each_index.select {|col| row[col] == alpha_coord.upcase}.each { |c_idx| rc_list.push([r_idx, c_idx]) } }
end
return rc_list
end
def diluting_gfp_control(in_collection, out_collection, final_od=0.0003)
in_data_matrix = in_map.get_data_matrix
in_data_matrix.each_with_index do |row, r_idx|
row.each_with_index do |part_data, c_idx|
if !part_data.nil?
if part_data['control'] == 'positive_gfp'
# record relation between input and output parts, well position is the same
# for this transfer
from_part = in_collection.part(r_idx, c_idx)
to_part = out_collection.part(r_idx, c_idx)
in_map = AssociationMap.new(from_part)
out_map = AssociationMap.new(to_part)
add_provenance({
from: from_part,
from_map: in_map,
to: to_part,
to_map: out_map,
additional_relation_data: { process: "dilution" }
})
# Associate additional data to this part on output collection
out_map.put('control', "positive_gfp")
out_map.put('od600', "#{final_od}")
in_map.save
out_map.save
# manually populate sample_id matrix of output collection
out_collection.matrix[r_idx][c_idx] = in_collection.matrix[r_idx][c_idx]
out_collection.save
end
end
end
end
end
def sync_gfp_control(gfp_input_cult_coord, gfp_output_cult_coord, input_plate_ods, final_output_vol)
gfp_input_cult_coord = find_rc_from_alpha_coord(alpha_coord=gfp_input_cult_coord)
gfp_input_cult_od = gfp_input_cult_coord.map {|r,c| input_plate_ods[r][c]}.first * 10 #dilution 1:10
gfp_input_cult_vol = ((0.0003*final_output_vol)/gfp_input_cult_od) * 1000 # converting to ul
gfp_input_media_vol = (1000.0 - gfp_input_cult_vol).round(2)
return gfp_input_cult_vol, gfp_input_media_vol
end
end # Module
|
malloc3/YG_Harmonization
|
libraries/HighThroughput_Lib.rb
|
# By: <NAME>
# <EMAIL>
# This library contains functions that aid in yeast high throughput screening.
# Ie: Calculations, formatting collections, etc...
needs "Standard Libs/AssociationManagement"
needs "Standard Libs/MatrixTools"
module HighThroughput_Lib
include AssociationManagement, MatrixTools
include PartProvenance
# Finds where an alpha_coordinate is in a 96 Well plate
#
# @params alpha_coord [array or string] can be a single alpha_coordinate or a list of alpha_coordinate strings ie: 'A1' or ['A1','H7']
# @return rc_list [Array] a list of [r,c] coordinates that describe where the alpha_coord(s) are in a 96 well matrix
def find_rc_from_alpha_coord(alpha_coord)
# look for where alpha coord is 2-D array coord
coordinates_96 = ('a'..'h').to_a.map {|row| (1..12).to_a.map {|col| row + col.to_s}}
rc_list = []
if alpha_coord.instance_of? Array
alpha_coord = alpha_coord.map {|a| a.downcase}
alpha_coord.each {|a_coord|
coordinates_96.map.each_with_index { |row, r_idx| row.each_index.select {|col| row[col] == a_coord}.each { |c_idx| rc_list.push([r_idx, c_idx]) } }
}
else
coordinates_96.map.each_with_index { |row, r_idx| row.each_index.select {|col| row[col] == alpha_coord.downcase}.each { |c_idx| rc_list.push([r_idx, c_idx]) } }
end
return rc_list
end
# Fills collection matrix with sample_ids based on how many biological replicates requested
#
# @params collection [collection] collection to be filled with biological replicates
# @params items [array] an array of items that biological replicates will be taken from
# @params bio_reps [integer] comes from protocol parameter altered to an integer
# @return collection [collection] filled collection with same dimensions
def fill_collection_mat(collection, items, bio_reps)
items.each do |item|
colony_num = 0
item_associations = AssociationMap.new(item)
bio_reps.times do
r, c, x = collection.add_one(item.sample_id)
part = collection.part(r, c)
part_associations = AssociationMap.new(part)
# record historical relation between item and target collection part, using PartProvenance
add_provenance({
from: item,
from_map: item_associations,
to: part,
to_map: part_associations,
additional_relation_data: { source_colony: colony_num, process: "resuspension" }
})
part_associations.save
colony_num += 1
end
item_associations.save
end
return collection
end
def alpha_coords_96_matrix()
('a'..'h').to_a.map {|row| (1..12).to_a.map {|col| row + col.to_s}}
end
# Empty collection matrix
#
# @params collection [collection] collection you wish to empty
# @return collection [collection] emptied collection with same dimensions
def blank_collection_mat(collection)
# empty out_coll
rc_list = collection.get_non_empty
rc_list.map {|r,c| collection.set(r,c,-1)}
return collection
end
end # Module
|
malloc3/YG_Harmonization
|
libraries/SynchronizationLib.rb
|
module SynchronizationLib
FINAL_OD = [0.0003, 0.00015, 0.000075]
FINAL_OUTPUT_VOL = 1#mL
ROWS = ('A'..'H').to_a
COLS = (1..12).to_a
def sync_experimental_cultures(in_collection, out_collection, input_plate_ods, bio_reps)
# Coordinates of wells in input collection that have experimental cultures
# input_cult_coords = in_collection.get_non_empty.select {|r,c| r != 7 && !in_collection.matrix[r][c].nil?}
input_cult_coords = in_collection.get_non_empty.select {|r,c| r != 7}
# log_info 'input_cult_coords', input_cult_coords, in_collection.get_non_empty
# Create an array of ods from non empty wells
input_plate_ods = input_cult_coords.map {|r, c| input_plate_ods[r][c]}
# log_info 'input_plate_ods', input_plate_ods
# Calculates the average OD by slices of 6 wells - PlateReaderMethods
average_ods = average_ods(input_cult_coords, input_plate_ods, bio_reps) # bio_rep well average ODs of plate
# log_info 'average ods', average_ods
# Calculates volumes from culture and media to obtain necessary final OD
ave_cult_vols_ul, ave_media_vols_ul = ave_cult_media_vol(average_ods) # 6 well average vols
# log_info 'ave_cult_vols_ul', ave_cult_vols_ul
# log_info 'ave_media_vols_ul', ave_media_vols_ul
# Creates a matrix based on the length of row averages and number of wells measured - Used to display volumes for tech
cult_vol_mat = matrix_mapping(input_plate_ods, ave_cult_vols_ul, bio_reps)
media_vol_mat = matrix_mapping(input_plate_ods, ave_media_vols_ul, bio_reps)
# log_info 'cult_vol_mat', cult_vol_mat
# log_info 'media_vol_mat', media_vol_mat
# 4. Calc true ODs based on cult_vol_ul and original ODs
# Calculates the actual OD of each well based on the input well/cult OD and the average cult vol used for that row
actual_ods_mat = actual_ods(input_plate_ods, ave_cult_vols_ul, bio_reps)
# log_info 'actual_ods_mat', actual_ods_mat
# 5. Associate new actual ODs to output collection - t = 0
associate_true_ods_item(out_collection, actual_ods_mat)
out_collection.matrix = out_coll_samp_id_mat(in_collection)
return input_cult_coords, cult_vol_mat, media_vol_mat
end
# This function directs tech to pre-fill deep well plate with required media
#
# @params out_coll [collection obj] the output collection object
# @params type_of_media [string] describes the type of media to be used in the experiment
# @params media_vol_mat [2-D Array] is a matrix of the media volume per well in ul
def aliquot_media(out_coll, media_vol_mat, media)
# Direct tech to fill output plate with media
tot_media_vol = 0
# show do
# title 'media vol mat - aliquot_media()'
# note "#{media_vol_mat}"
# end
media_vol_mat.flatten.select {|vol| vol != -1 }.each {|vol|
tot_media_vol += vol if vol
}
rc_list = out_coll.get_non_empty.select {|r,c| !out_coll.matrix[r][c].nil? }
log_info 'rc_list sync lib', rc_list
show do
title "Filling New 96 Deep Well Plate #{out_coll}"
separator
note "For this step you will need:"
check "96 Well Deep U-bottom Plate and label with <b>#{out_coll.id}</b>"
check "Multichannel Reservoir"
check "Breathable AeraSeal Plate Cover"
check "<b>#{((tot_media_vol/1000) + 1).round(1)}mLs</b> of <b>#{media}</b> liquid growth media."
end
show do
title "Filling 96 Deep Well Plate #{out_coll}"
separator
note "Follow the table bellow to aliquot the appropriate amount of <b>#{media}</b> media to the respective well:"
table highlight_alpha_rc(out_coll, rc_list) { |r, c| "#{(media_vol_mat[r][c]).round(1) if media_vol_mat[r][c]} µl" }
end
end
# Directs tech and inoculates output collection with cultures from the input collection
#
# @param in_coll [collection obj] the input collection
# @param out_coll [collection obj] the output collection
# @param input_cult_coords [Array] one dimensional array that contains the coordinates of the input collection cultures
# @param cult_vol_mat [2-D Array] matrix that contains the volume required to inoculate the output collection
def inoculate_plate(in_coll, out_coll, input_cult_coords, cult_vol_mat)
# Creates a matrix with row column coordinates from the input collection - Will be used to direct tech which input wells to dilute in the output collection
in_out_map = input_cult_coords.map {|r, c| (ROWS[r] + COLS[c].to_s)}.select {|coord| !coord.include? "H"} # [0,0] --> "A1"
in_out_map_mat = FINAL_OD.map {|f_od| in_out_map }.flatten.each_slice(12).to_a
in_out_map_mat.each {|arr|
if arr.length != 12
(12-arr.length).times do
arr.push(-1)
end
end
}
if in_out_map_mat.length != 8
(8 - in_out_map_mat.length).times do
in_out_map_mat.push(Array.new(12) {-1})
end
end
# Adding alpha numeric coordinates for controls
input_control_cults_coords = []
input_control_cults_coords.push(find_input_wt_cult_coord(collection=in_coll).map {|r, c| (ROWS[r] + COLS[c].to_s)}.first) # Creating 2 WT control cultures
input_control_cults_coords.push(find_input_wt_cult_coord(collection=in_coll).map {|r, c| (ROWS[r] + COLS[c].to_s)}.first)
gfp_control_coord = 'H9'
input_control_cults_coords.push(gfp_control_coord)
# input_control_cults_coords.each {|control_coord| in_out_map_mat[in_out_map_mat.length - 1].push(control_coord)}
# in_out_map_mat.push(input_control_cults_coords)
in_out_map_mat[7][6] = input_control_cults_coords[0]
in_out_map_mat[7][7] = input_control_cults_coords[1]
in_out_map_mat[7][8] = input_control_cults_coords[2]
display_coords = out_coll.get_non_empty.select {|r,c| !out_coll.matrix[r][c].nil? }.each_slice(in_out_map.select{|coord| !coord.include? "H"}.length).to_a
# Diluting cultures 1:10 before transfering
show do
title "Dilute Cultures in Item #{in_coll}"
separator
check "Perform a 1:10 dilution on cultures"
bullet "10ul of culture to 90ul of media"
end
if debug
show do
title "Debugging"
note "out_coll_#{out_coll}"
note "display_coords_#{display_coords}"
note "in_out_map_mat_#{in_out_map_mat}"# ***
note "cult_vol_mat_#{cult_vol_mat}"
end
end
display_coords.each do |rc_list|
show do
title "Innoculating New 96 Deep Well Plate #{out_coll}"
separator
bullet "The coordinates correspond to wells from 96 Flat Bottom Plate <b>#{in_coll.id}</b>."
note "Follow the table below to inoculate the filled 96 Deep Well Plate with the appropriate volume and culture:"
table highlight_alpha_rc(out_coll, rc_list) {|r, c| "#{in_out_map_mat[r][c] if in_out_map_mat[r][c]}\n#{cult_vol_mat[r][c].round(1) if cult_vol_mat[r][c]}µl"}
end
end
group_by_collection = operations.map.group_by {|op| op.input("96 Well Flat Bottom").collection}
growth_temperature = group_by_collection[in_coll].first.input("Growth Temperature (°C)").val
Item.find(out_coll.id).associate('growth_temperature', growth_temperature)
# Move output plate (96DW Plate to incubator)
out_coll.location = "#{growth_temperature}C Incubator Shaker @ 800 rpm"
out_coll.save
### IF USING THE SAME PLATE FOR MULTIPLE SYNCS THEN WHEN SHOULD WE DELETE INCOLLECTION
# in_coll.mark_as_deleted
release([out_coll], interactive: true)
end
# Based on the number of diltuions (Final ODs) create a new matrix with sample ids in the correct organization
#
# @params in_coll [collection] the input collection in order to obtain the sample id matrix
# @return out_samp_id_mat [2-D Array] matrix containing new sample id matrix; spread out input collection sample ids
def out_coll_samp_id_mat(in_coll)
if debug
in_coll = Collection.find(411551)
end
output_samp_ids = []
(FINAL_OD.length).times do
in_coll.matrix.each_with_index do |row, r_idx|
row.each_with_index do |well, c_idx|
if !well.nil?
if r_idx != 7
well > -1 ? output_samp_ids.push(well) : -1
end
end
end
end
end
# Filling in blank/empty wells with -1
out_samp_id_mat = output_samp_ids.each_slice(12).to_a
out_samp_id_mat.each {|row|
if row.length != 12
(12 - row.length).times do
row.push(-1)
end
end
}
if out_samp_id_mat.length != 8
(8 - out_samp_id_mat.length).times do
out_samp_id_mat.push(Array.new(12) {-1})
end
end
return out_samp_id_mat
end
# Finds volume needed from input culture and the necessary media volume for the output culture/well
#
# @params row_od_aves [array] array of the average ODs by row
# @return cult_vols_ul [array] array of culture volumes found based on the average row ods
# @return media_vols_ul [array] array of media minus the culture volume
def ave_cult_media_vol(row_od_aves)
cult_vols_ul = []
media_vols_ul = []
FINAL_OD.each do |f_od|
c_vol = row_od_aves.map {|ave_od| ave_od == 0.0 ? 0.0 : ((f_od * FINAL_OUTPUT_VOL)/ave_od) * 10000.0} # 10,000 includes the 1:10 dilution
m_vol = c_vol.map {|vol| vol == 0.0 ? 0.0 : (1000.0 - vol).round()}
cult_vols_ul.push(c_vol)
media_vols_ul.push(m_vol)
end
return cult_vols_ul, media_vols_ul
end
# Averages 2-D array across rows. Turns 2-D array into 1-D array of averages
#
# @param input_plate_ods [2-D Array] matrix containing the ODs of a 96 well plate measured on the BioTek Plate Reader
# @param
# @return row_od_aves [Array] array of averages across rows of the input matrix
def average_ods(input_cult_coords, input_plate_ods, bio_reps)
# slice = 6
average_ods = []
input_plate_ods.each_slice(bio_reps).to_a.map do |arr|
tot_od = 0.0
arr.each {|od| tot_od += od}
average_ods.push(tot_od/arr.length)
end
return average_ods
end
# Creates a matrix based on the amount of row averages and number of wells measured
#
# @params input_plate_ods [Array] one dimensional array of non empty wells from input collection
# @params ave_arr [2-D Array] matrix created from ave vol calculated based on the ave od of each slice(6 wells)
# @return matrix [2-D Array] matrix with all volumes needed for display onto a 8x12 matrix
def matrix_mapping(input_plate_ods, ave_arr,bio_reps)
matrix = []
input_slices = input_plate_ods.each_slice(bio_reps).to_a
ave_arr.each do |arr|
input_slices.each_with_index do |slice, i|
slice.each {|well| matrix.push(arr[i])}
end
end
matrix = matrix.flatten
if matrix.length != 96
(96 - matrix.length).times do
matrix.push(-1)
end
end
return matrix.each_slice(12).to_a
end
# Calculates the actual OD of each well based on the input well/cult OD and the average cult vol for that row
#
# @params input_plate_ods [array] 1 dim array with all the ODs that were measured from the input collection
# @params cult_vols_ul [2-D Array] a matrix of average culture volume needed to reach requested final OD
# @return actual_ods_mat [2-D Array] a matrix of the calculated actual OD in slices of 12 to fit the 96 well format
def actual_ods(input_plate_ods, cult_vols_ul, bio_reps)
actual_ods = []
well_ods_slices = input_plate_ods.each_slice(bio_reps).to_a
cult_vols_ul.each_with_index do |ave_cult_vol, i|
ave_cult_vol.each_with_index do |c_vol, ii|
well_ods_slices[ii].each {|w_od| actual_ods.push(((w_od * (c_vol/1000.0))/FINAL_OUTPUT_VOL).round(6))}
end
end
actual_ods_mat = actual_ods.each_slice(12).to_a
return actual_ods_mat
end
# Averages 2-D array across rows. Turns 2-D array into 1-D array of averages
#
# @param input_plate_ods [2-D Array] matrix containing the ODs of a 96 well plate measured on the BioTek Plate Reader
# @param
# @return row_od_aves [Array] array of averages across rows of the input matrix
def average_ods(input_cult_coords, input_plate_ods, bio_reps)
# slice = 6
average_ods = []
input_plate_ods.each_slice(bio_reps).to_a.map do |arr|
tot_od = 0.0
arr.each {|od| tot_od += od}
average_ods.push(tot_od/arr.length)
end
return average_ods
end
# given a r,c from one of the first 30 wells and an od level, figures out the location
# of the replicate on the output plate
def get_rc_out_from_rc_in_and_od_no(r,c,od, num_input_samples)
absolute_rc = r * 12 + c
if absolute_rc > 30
raise "rc in is not one of the first 30 samples"
end
adjusted_absolute = absolute_rc + od * num_input_samples
r_out = adjusted_absolute / 12
c_out = adjusted_absolute % 12
return r_out, c_out
end
def get_bio_reps_from_outgrowth_plate(collection)
if debug
return 3
else
return Item.find(collection.id).get('bio_reps')
end
end
end #Module
|
malloc3/YG_Harmonization
|
libraries/Upload_PlateReader_Data.rb
|
<reponame>malloc3/YG_Harmonization<gh_stars>0
# By: <NAME>
# <EMAIL>
# This library contains functions that aid in uploading data that comes from yeast high throughput screening measurements
# Ie: Plate reader measurements
needs 'Standard Libs/MatrixTools'
needs 'Standard Libs/AssociationManagement'
module Upload_PlateReader_Data
require 'csv'
require 'open-uri'
include MatrixTools
include AssociationManagement
# Takes in a csv upload file in a tabular format, extracts the information on it
# into a datamatrix object which is returned.
# Specificly tuned to the output file of the biotek plate reader.
#
# @param upload [Upload] the object which can be resolved to calibration csv
# @return [WellMatrix] a WellMatrix holding the measurement for each well
def extract_measurement_matrix_from_csv(upload)
url = upload.url
table = []
CSV.new(open(url)).each { |line| table.push(line) }
dm = WellMatrix.create_empty(96, -1) if table.size > 25
dm = WellMatrix.create_empty(24, -1) if table.size <= 25
table.each_with_index do |row, idx|
next if idx.zero?
well_coord = row[2]
next if well_coord.nil?
measurement = row[3].to_f
next if measurement.nil?
dm.set(well_coord, measurement)
end
dm
end
# Returns the average OD measurement for different dilutions and well volumes.
# The plotted result of this method can be fit to a curve
# to be used for calibrating the plate reader. This is very specific to the
# Eriberto's calibration of the biotek plate reader.
#
# @param upload [Upload] the object whihc can be resolved to calibration csv
# @return [Hash] a hash containing averaged measurements for
# every concentration and volume tested
def get_calibration_data_hash(upload)
method = upload.name
dm = extract_measurement_matrix_from_csv(upload)
result = {}
data_by_conc = Hash.new { |h, key| h[key] = [0, 0] }
if method.include? 'gfp'
# show {note "#{method}"}
starting_concentration = 50.0#uM
# first 4 rows are serial dilutions
for i in 0...4
12.times do |j|
# each column is a 2x dilution of the previous, starting at 50uM
this_conc = starting_concentration / (2**j)
data = data_by_conc[this_conc]
data[0] += dm[i, j].to_f
data[1] += 1
data_by_conc[this_conc] = data
end
end
# add serial dilution averages to result hash
data_by_conc.each_key do |k,|
data = data_by_conc[k]
result[k] = data[0] / data[1]
end
return result
elsif method.include? 'od'
# row 5, 6 are lud dilutions and pure solution respectively
for i in 4...6
for j in 0...4
data_by_conc["100_#{i}"][0] += dm[i, j].to_f
data_by_conc["100_#{i}"][1] += 1
end
for j in 4...8
data_by_conc["200_#{i}"][0] += dm[i, j].to_f
data_by_conc["200_#{i}"][1] += 1
end
for j in 8...12
data_by_conc["300_#{i}"][0] += dm[i, j].to_f
data_by_conc["300_#{i}"][1] += 1
end
end
# add lud averages to result hash
for i in 1..3
lud_avg = data_by_conc["#{i}00_4"][0] / data_by_conc["#{i}00_4"][1]
sol_avg = data_by_conc["#{i}00_5"][0] / data_by_conc["#{i}00_5"][1]
result["lud#{i}00"] = (lud_avg - sol_avg).round(5) # Returns blanked averages
end
end
result
end
# Provides a upload button in a showblock in order to upload a single file
#
# @params upload_filename [string] can be the name of the file that you want tech to upload
# @return up_show [hash] is the upload hash created in the upload show block
# @return up_sym [symbol] is the symbol created in upload show block that will be used to access upload
def upload_show(upload_filename)
upload_var = "file"
up_sym = upload_var.to_sym
up_show = show do
title "Upload Your Measurements"
note "Select and Upload: #{upload_filename}"
upload var: "#{upload_var}"
end
return up_show, up_sym
end
# Retrieves the upload object from upload show block
#
# @params up_show [hash] is the hash that is created in the upload show block
# @params up_sym [symbol] is the symbol created in the upload show block and used to access file uploaded
# @return upload [upload_object] is the file that was uploaded in the upload show block
def find_upload_from_show(up_show, up_sym)
# Makes a query to find the uploaded file by its default :id
upload = up_show[up_sym].map {|up_hash| Upload.find(up_hash[:id])}.shift
return upload
end
# Associates an upload to an item - DEPRCIATED
# #
# # @params collection [collection obj] can be the collection that you wish to associate upload to
# # @params upload [upload_obj] the file that you wish to associate to item
# # @params key [string] the key to the association it will also appear as description when looking at item
# def associate_to_item(collection, upload, key)
# Item.find(collection.id).associate key.to_sym, "item_#{collection.id}", upload
# end
# Associates an upload to an item
#
# @params in_obj [obj] can be the collection that you wish to associate upload to
# @params upload [upload_obj] the file that you wish to associate to item
# @params key [string] the key to the association it will also appear as description when looking at item
def associate_to_item(in_obj, key, upload)
item_assoc = AssociationMap.new(in_obj)
item_assoc.put(key.to_sym, upload)
item_assoc.save
end
# Associates an upload to the plan that it was uploaded in - Still needed for YG_Harmonization calibration associations
#
# @params upload [upload_obj] the file that you wish to associate to plan
# @params key [string] the key to the association it will also appear as description when looking at item
def associate_to_plan(upload, key)
plan = operations.map {|op| op.plan}.first
plan.associate key.to_sym, "plan_#{plan.id}", upload
end
# Associates an upload to the plans that it was uploaded in
#
# @params data [obj] the thing that you wish to associate to plan
# @params key [string] the key to the association it will also appear as description when looking at item
def associate_to_plans(key, data)
# iterate over ops, find all unique plans, associate to each plan, ensure copying
plans = operations.map { |op| op.plan }.uniq
plans.each do |plan|
plan_associations = AssociationMap.new(plan)
plan_associations.put(key.to_sym, data)
plan_associations.save
end
end
# Opens file using its url and stores it line by line in a matrix
#
# @params upload [upload_obj] the file that you wish to read from
# @return matrix [2D-Array] is the array of arrays of the rows read from file, if csv
def read_url(upload)
url = upload.url
matrix = []
CSV.new(open(url)).each {|line| matrix.push(line)}
# open(url).each {|line| matrix.push(line.split(',')}
return matrix
end
# Takes csv matrix and formats data for OD measurements - Biotek Plate reader
#
# @params matrix [2D-Array] can be array of arrays containing od measurements
# @return hash [hash] is hash created from matrix parameter
def matrix_to_hash(matrix)
hash = Hash.new(0)
cols = matrix.shift.select {|col| col != nil}
rows = []
data = []
ods = matrix.map do |arr|
rows.push(arr.shift) # first index is row letter
arr.pop() # Strips off last index
arr.map! {|str| str.to_f} # converts strings to float to include dilution factor
data.push(arr)
arr.map {|od| od} # Good place to include dilution factor
end
hash["cols"] = cols
hash["rows"] = rows
hash["data"] = data
# hash["optical_density"] = ods
return hash
end
# Reads uploaded file and associates data to a given item/collection
#
# @params upload [upload obj] upload (csv) that is going to be read and processed
# @params collection [collection obj] collection that the data will be associated to
# @params method [string] the type of measurement that was taken (od or gfp)
# @params timepoint [integer] the number of hours that data was collected at
def associate_PlateReader_Data(upload, collection, method, timepoint)
up_name = upload.name.downcase
up_ext = up_name.split('.')[1]
if up_ext.downcase == 'csv'
# If calibration measurement will be associated with item and plan
collection_associations = AssociationMap.new(collection)
if up_name.include? 'cal'
key = method == 'cal_gfp' ? 'cal_fluorescence' : 'cal_optical_density'
cal_hash = get_calibration_data_hash(upload) # from BiotekPlateReader Lib
data_hash = Hash.new(0)
if method == 'cal_gfp'
# Fluorescence std curve & r-sq value
slope, yint, x_arr, y_arr = gfp_standard_curve(cal_hash)
r_sq = r_squared_val(slope, yint, x_arr, y_arr)
trendline = "y = #{slope}x + #{yint} (R^2 = #{r_sq})"
# Associating flour calibration data hash
data_hash['uM_to_data'] = cal_hash
collection_associations.put(key, data_hash)
# ie: 'cal_fluorescence' : {'uM_to_data'=>{50=>2400,25=>1234...}}
collection_associations.put('Fluorescence Standard Curve', trendline)
associate_to_plans('Fluorescence_Standard_Curve', trendline)
else
correction_val_hash = ludox_correction_factors(cal_hash)
data_hash['vol_to_correction_factor'] = correction_val_hash
collection_associations.put(key, data_hash) # ie: 'cal_od'=>{'vol_to_correction_factor'=>{"100"=>1.88,"200"=>0.955}}
associate_to_plans('vol_to_correction_factor', correction_val_hash)
end
else
# matrix = read_url(upload)
matrix = (extract_measurement_matrix_from_csv(upload)).to_a # Uses BiotekPlateReaderCalibration/PlateReaderMethods
# hash = matrix_to_hash(matrix) # Upload_Data Lib - May change if I change data format
log_info 'csv matrix', matrix
# take hash and slice up to associate to input collections - that way matrix always gets formatted to the same dimensions as in_collection
in_cols = collection.object_type.columns
in_rows = collection.object_type.rows
# 'data' - known beforehand, created in matrix_to_OD_hash(matrix)
# slices = hash['data'].flatten.each_slice(in_cols).map {|slice| slice} # 2-D Array with similar dims as collection
slices = matrix.flatten.each_slice(in_cols).map {|slice| slice} # 2-D Array with similar dims as collection
log_info 'sliced up csv', slices
#### left off here attempting to create hashes for GFP or optical density at differnet timepoints if necessary
key = method == 'od' ? 'optical_density': 'gfp_fluorescence'
data_hash = collection_associations.get(key)
log_info slices.shift(in_rows)
if data_hash.nil?
data_hash = Hash.new(0)
data_hash["#{timepoint}_hr"] = slices.shift(in_rows)
collection_associations.put(key, data_hash)
else
data_hash["#{timepoint}_hr"] = slices.shift(in_rows)
collection_associations.put(key, data_hash)
end
end
collection_associations.save
end
# should produce ie: 'optical_density': {'16h_od'=>[[][][][]...[]]}
end
# For associating a matrix to an item
#
# Associatition skem: key:{ desc:[mat] }
# @params item [object] item object that data will be associated to
# @params key [string] key to the data hash associated to the item
# @params desc [string] describes the certain matrix data that it is pair with in the data hash
# @params mat [2D-Array] is the matrix of data being associated
def associate_mat_to_item(item, key, desc, mat)
hash = Hash.new(0)
data_hsh = hash[desc] = mat
item.associate(key, data_hsh)
end
# This fuction uses a reference od600 measurement to calculate the correction factor for different vols (100ul, 200, 300)
#
# @params hash [hash] is the hash of averaged blanked LUDOX samples at different volumes
#
# @returns correction_val_hash [hash] is the hash containing the correction factor for the optical density (600nm) for this experiment
def ludox_correction_factors(hash)
ref_od600 = 0.0425 #Taken from iGEM protocol - is the ref val of another spectrophotometer
# ref/corrected vals
correction_val_hash = Hash.new(0)
hash.each do |vol, ave|
correction_val_hash[vol[3..6]] = (ref_od600/ave).round(4)
end
return correction_val_hash
end
# This function creates a standard curve from the flourocein calibration plate
#
# @params coordinates [hash or 2D-Array] can be a hash or [[x,y],..] where x is known concentration & y is measurement of flouroscence
#
# @returns slope [float] float representing the slope of the regressional line
# @returns yint [float] float representing where the line intercepts the y-axis
# @returns x_arr [Array] a 1D array for all x coords
# @returns y_arr [Array] a 1D arrya for all y coords
def gfp_standard_curve(coordinates)
# Calculating Std Curve for GFP
num_of_pts = 0
a = 0
x_sum = 0
y_sum = 0
x_sq_sum = 0
x_arr = []
y_arr = []
coordinates.each do |x, y|
if x < 25 # Above 25uM is out of linear range of our instrument
a += (x * y)
x_sum += x
x_sq_sum += (x**2)
y_sum += y
x_arr.push(x)
y_arr.push(y)
num_of_pts += 1
end
end
a *= num_of_pts
b = x_sum * y_sum
c = num_of_pts * x_sq_sum
d = x_sum**2
slope = (a - b)/(c - d)
f = slope * (x_sum)
yint = (y_sum - f)/num_of_pts
# show{note "y = #{(slope).round(2)}x + #{(yint).round(2)}"}
return (slope).round(3), (yint).round(3), x_arr, y_arr
end
# This function calculates how much deviation points are from a regressional line - R-squared Value
# The closer it is to 1 or -1 the less deviation theres is
#
# @params slope [float] float representing the slope of the regressional line
# @params yint [float] float representing where the line intercepts the y-axis
# @params x_arr [Array] a 1D array for all x coords
# @params y_arr [Array] a 1D arrya for all y coords
#
# @returns rsq_val [float] float representing the R-squared Value
def r_squared_val(slope, yint, x_arr, y_arr)
y_mean = y_arr.sum/y_arr.length.to_f
# Deviation of y coordinate from the y_mean
y_mean_devs = y_arr.map {|y| (y - y_mean)**2}
dist_mean = y_mean_devs.sum # the sq distance from the mean
# Finding y-hat using regression line
y_estimate_vals = x_arr.map {|x| (slope * x) + yint }
# Deviation of y-hat values from the y_mean
y_estimate_dev = y_estimate_vals.map {|y| (y - y_mean)**2}
dist_regres = y_estimate_dev.sum # the sq distance from regress. line
rsq_val = (dist_regres/dist_mean).round(4)
return rsq_val
end
end # Module
|
malloc3/YG_Harmonization
|
libraries/YG_Measure_OD_GFP.rb
|
<gh_stars>0
# By: <NAME>
# <EMAIL>
# 10/01/18
module YG_Measure_OD_GFP
def transfer_cultures(in_item, out_item)
if debug
in_item = Item.find(276614) # Contains new part_associations 100118
end
in_collection = Collection.find(in_item.id)
out_collection = Collection.find(out_item.id)
rc_list = in_collection.get_non_empty
vol_display_matrix = get_vol_transfer_matrix(in_collection)
log_info 'vol_display_matrix', vol_display_matrix
show do
title "Transfer Culture Aliquots to #{out_item.object_type.name} for Plate Reader"
separator
check "Grab a clean <b>#{out_item.object_type.name}</b>."
check "Label the #{out_item.object_type.name} => <b>#{out_item.id}</b>."
check "Use a multi-channel pipettor to transfer the correct volume from <b>#{in_item.id}</b> to the <b>#{out_item.object_type.name}</b>."
note "<b>Follow the table below to transfer the correct volume:</b>"
table highlight_alpha_rc(in_item, rc_list) {|r,c| "#{vol_display_matrix[r][c]}µl"}
end
positive_sytox_rc = get_pos_sytox_rc(in_collection)
show do
title "Adding Ethanol to Positive SYTOX Control"
separator
check "Get #{150*positive_sytox_rc.length}µl of 100% EtOH"
bullet "Mix throughly by pipetting"
note "<b>Follow the table below to add 150ul of EtOH to the correct well</b>"
table highlight_alpha_rc(in_item, positive_sytox_rc){|r,c| "150µl"}
end
# Ensure that collection sample matricies get transferred
in_coll_matrix = in_collection.matrix
out_collection.matrix = in_coll_matrix
out_collection.save
end
def get_vol_transfer_matrix(in_collection)
vol_display_matrix = Array.new(in_collection.object_type.rows) { Array.new(in_collection.object_type.columns) {-1}}
rc_list = in_collection.get_non_empty
rc_list.each {|r,c|
control_check = in_collection.get_part_data(:control, r, c)
if control_check == 'negative_sytox'
vol_display_matrix[r][c] = 150
else
vol_display_matrix[r][c] = 300
end
}
return vol_display_matrix
# Old part_data association matrix - 100118
# vol_display_matrix = input_part_data_matrix.each_with_index.map {|row, r_idx|
# row.each_with_index.map {|part_data_obj, c_idx|
# # ie: part_data_obj => {"source"=>[{"id"=>291209, "row"=>0, "column"=>0, "process"=>"dilution"}], "od600"=>0.0003}
# obj_keys = part_data_obj.keys
# if !obj_keys.empty?
# if obj_keys.include? 'control'
# (part_data_obj[:control] == 'negative_sytox') ? trans_vol = 150 : trans_vol = 300
# else
# trans_vol = 300
# end
# else
# trans_vol = -1
# end
# trans_vol
# }
# }
end
def get_pos_sytox_rc(in_collection)
positive_sytox_rc = []
rc_list = in_collection.get_non_empty
rc_list.each {|r,c|
control_check = in_collection.get_part_data(:control, r, c)
if control_check == 'negative_sytox'
positive_sytox_rc.push([r,c])
end
}
# Old part_data association matrix - 100118
# vol_display_matrix = input_part_data_matrix.each_with_index.map {|row, r_idx|
# row.each_with_index.map {|part_data_obj, c_idx|
# # ie: part_data_obj => {"source"=>[{"id"=>291209, "row"=>0, "column"=>0, "process"=>"dilution"}], "od600"=>0.0003}
# obj_keys = part_data_obj.keys
# if !obj_keys.empty?
# if obj_keys.include? 'control'
# (part_data_obj[:control] == 'positive_sytox') ? positive_sytox_rc.push([r_idx, c_idx]) : nil
# end
# end
# }
# }
return positive_sytox_rc
end
def get_timepoint(op, tpoint_param)
return op.input(tpoint_param).val.to_i
end
def get_media_type(in_item)
media = in_item.get('type_of_media')
media = media.nil? ? 'SC' : media
return media
end
end # Module YG_Measure_OD_GFP
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.