Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Add connect and query method | class Midori::Postgres
def initialize(socket)
@db = EM.connect_unix_domain(socket)
end
def connect(dbname, username, password)
Promise.new(->(resolve, _reject) {
@db.connect(dbname, username, password).callback do |status|
resolve.call(status)
end
})
end
end
# EM.run do
# db = EM.connect_unix_domain( "/tmp/.s.PGSQL.5432", EM::P::Postgres3 )
# db.connect( dbname, username, psw ).callback do |status|
# if status
# db.query( "select * from some_table" ).callback do |status, result, errors|
# if status
# result.rows.each do |row|
# p row
# end
# end
# end
# end
# end
# end
| class Midori::Postgres
def initialize(socket)
@db = EM.connect_unix_domain(socket)
end
def connect(dbname, username, password)
await(Promise.new(->(resolve, _reject) {
@db.connect(dbname, username, password).callback do |status|
resolve.call(status)
end
}))
end
def query(str)
await(Promise.new(->(resolve, _reject) {
@db.query(str).callback do |status, result, errors|
resolve.call([status, result, errors])
end
}))
end
end
|
Extend lib to accept a hash for scripts and a hash for options. | module GoogleAjaxLibraries
module Helper
def google_javascript_include_tag(*ajax_libraries)
html = "<script src=\"http://www.google.com/jsapi\"></script>\n"
html << "<script>\n"
ajax_libraries.each do |al|
begin
html << " google.load(\"#{al[0]}\", \"#{al[1] || 1 }\", {uncompressed:#{al[2] || false }});\n"
rescue
html << " google.load(\"#{al}\", \"1\");\n"
end
end
html << "</script>"
return html
end
end
end
ActionView::Base.send(:include, GoogleAjaxLibraries::Helper) | module GoogleAjaxLibraries
module Helper
def google_javascript_include_tag(scripts={}, opts={}, *backward_compat)
key = opts[:key] || nil
ajax_libraries = *scripts
html = ""
if debug
html << "<pre>Options = #{opts.to_json}</pre>\n"
html << "<pre>key = #{key.to_json}</pre>\n"
html << "<pre>Lib to load = #{ajax_libraries.to_json}</pre>\n"
end
html << "<script type=\"text/javascript\" src=\"http://www.google.com/jsapi"
html << "?key=#{key}" if key
html << "\"></script>\n"
html << "<script>\n"
ajax_libraries.each do |al|
begin
html << " google.load(\"#{al[0]}\", \"#{al[1] || 1 }\", {uncompressed:#{al[2] || false }});\n"
rescue
html << " google.load(\"#{al}\", \"1\");\n"
end
end
html << "</script>"
return html
end
end
end
ActionView::Base.send(:include, GoogleAjaxLibraries::Helper)
|
Use :format option in property declaration rather than validates_format_of | class Language
include DataMapper::Resource
# properties
property :id, Serial
property :code, String, :required => true, :unique => true
property :name, String, :required => true
# locale string like 'en-US'
validates_format_of :code, :with => /\A[a-z]{2}-[A-Z]{2}\z/
def self.[](code)
return nil if code.nil?
first :code => code.to_s.gsub('_', '-')
end
end
| class Language
include DataMapper::Resource
# properties
property :id, Serial
property :code, String, :required => true, :unique => true, :format => /\A[a-z]{2}-[A-Z]{2}\z/
property :name, String, :required => true
def self.[](code)
return nil if code.nil?
first :code => code.to_s.gsub('_', '-')
end
end
|
Use 0-9 not 0-8 for random number | class BenchController < ApplicationController
before_filter :find_records
private
def find_records
@records = (1..500).map do |n|
rand(9)
end
end
end
| class BenchController < ApplicationController
before_filter :find_records
private
def find_records
@records = (1..500).map do |n|
rand(10)
end
end
end
|
Add charset to theme stylesheets/javascripts, as the test indicates it should be there | class ThemeController < ContentController
caches_page :stylesheets, :javascript, :images
session :off
def stylesheets
render_theme_item(:stylesheets, params[:filename], 'text/css')
end
def javascript
render_theme_item(:javascript, params[:filename], 'text/javascript')
end
def images
render_theme_item(:images, params[:filename])
end
def error
render :nothing => true, :status => 404
end
def static_view_test
end
private
def render_theme_item(type, file, mime = nil)
mime ||= mime_for(file)
if file.split(%r{[\\/]}).include?("..")
render :text => "Not Found", :status => 404
return
end
send_file(this_blog.current_theme.path + "/#{type}/#{file}",
:type => mime, :disposition => 'inline', :stream => false)
end
def mime_for(filename)
case filename.downcase
when /\.js$/
'text/javascript'
when /\.css$/
'text/css'
when /\.gif$/
'image/gif'
when /(\.jpg|\.jpeg)$/
'image/jpeg'
when /\.png$/
'image/png'
when /\.swf$/
'application/x-shockwave-flash'
else
'application/binary'
end
end
end
| class ThemeController < ContentController
caches_page :stylesheets, :javascript, :images
session :off
def stylesheets
render_theme_item(:stylesheets, params[:filename], 'text/css; charset=utf-8')
end
def javascript
render_theme_item(:javascript, params[:filename], 'text/javascript; charset=utf-8')
end
def images
render_theme_item(:images, params[:filename])
end
def error
render :nothing => true, :status => 404
end
def static_view_test
end
private
def render_theme_item(type, file, mime = nil)
mime ||= mime_for(file)
if file.split(%r{[\\/]}).include?("..")
render :text => "Not Found", :status => 404
return
end
send_file(this_blog.current_theme.path + "/#{type}/#{file}",
:type => mime, :disposition => 'inline', :stream => false)
end
def mime_for(filename)
case filename.downcase
when /\.js$/
'text/javascript'
when /\.css$/
'text/css'
when /\.gif$/
'image/gif'
when /(\.jpg|\.jpeg)$/
'image/jpeg'
when /\.png$/
'image/png'
when /\.swf$/
'application/x-shockwave-flash'
else
'application/binary'
end
end
end
|
Remove grouping method from helper | require 'htmlentities'
module TravelAdviceHelper
def group_by_initial_letter(countries)
groups = countries.group_by do |country|
country.name[0] if country&.name
end
groups.sort_by { |name, _| name }
end
def format_atom_change_description(text)
# Encode basic entities([<>&'"]) as named, the rest as decimal
simple_format(HTMLEntities.new.encode(text, :basic, :decimal))
end
end
| require 'htmlentities'
module TravelAdviceHelper
def format_atom_change_description(text)
# Encode basic entities([<>&'"]) as named, the rest as decimal
simple_format(HTMLEntities.new.encode(text, :basic, :decimal))
end
end
|
Remove 'vendor' from s.files in gemspec | # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'git-up/version'
Gem::Specification.new do |s|
s.name = "git-up"
s.version = GitUp::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Aanand Prasad", "Elliot Crosby-McCullough", "Adrian Irving-Beer", "Joshua Wehner"]
s.email = ["aanand.prasad@gmail.com", "elliot.cm@gmail.com"]
s.homepage = "http://github.com/aanand/git-up"
s.summary = "git command to fetch and rebase all branches"
s.license = "MIT"
s.add_dependency "colored", ">= 1.2"
s.add_dependency "grit"
s.files = Dir.glob("{bin,lib,vendor,man}/**/*") + %w(LICENSE README.md)
s.require_path = 'lib'
s.executables = Dir.glob("bin/*").map(&File.method(:basename))
end
| # -*- encoding: utf-8 -*-
lib = File.expand_path('../lib/', __FILE__)
$:.unshift lib unless $:.include?(lib)
require 'git-up/version'
Gem::Specification.new do |s|
s.name = "git-up"
s.version = GitUp::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Aanand Prasad", "Elliot Crosby-McCullough", "Adrian Irving-Beer", "Joshua Wehner"]
s.email = ["aanand.prasad@gmail.com", "elliot.cm@gmail.com"]
s.homepage = "http://github.com/aanand/git-up"
s.summary = "git command to fetch and rebase all branches"
s.license = "MIT"
s.add_dependency "colored", ">= 1.2"
s.add_dependency "grit"
s.files = Dir.glob("{bin,lib,man}/**/*") + %w(LICENSE README.md)
s.require_path = 'lib'
s.executables = Dir.glob("bin/*").map(&File.method(:basename))
end
|
Fix issue with frozen constants in RailsAdmin at v1.0.0 | module RailsAdmin
module Extensions
module PaperTrail
class VersionProxy
def message
@message = @version.event
end
def version_id
@version.id
end
end
class AuditingAdapter
COLUMN_MAPPING[:rollback] = :id
end
end
end
end
| module RailsAdmin
module Extensions
module PaperTrail
class VersionProxy
def message
@message = @version.event
end
def version_id
@version.id
end
end
end
end
end
|
Add helpers for links to archived notes | module CalendarHelper
def archived_note_link(version_data)
note = YAML::load(version_data.object)
version_note_path(note.fetch("id"), version_data)
end
def current_note_link(version_data)
note = YAML::load(version_data.object)
note_path(note.fetch("id"))
end
end | |
Replace case statement with map | # frozen_string_literal: true
GObject.load_class :ParamSpec
module GObject
# Overrides for GParamSpec, GObject's base class for parameter specifications.
class ParamSpec
def ref
Lib.g_param_spec_ref self
self
end
def accessor_name
get_name.tr('-', '_')
end
def pointer_type?
case value_type
when TYPE_INT
false
when TYPE_STRING
true
end
end
def type_tag
case value_type
when TYPE_INT
:gint
when TYPE_STRING
:utf8
when TYPE_LONG
:glong
end
end
def ffi_type
GirFFI::TypeMap.map_basic_type(type_tag)
end
end
end
| # frozen_string_literal: true
GObject.load_class :ParamSpec
module GObject
# Overrides for GParamSpec, GObject's base class for parameter specifications.
class ParamSpec
def ref
Lib.g_param_spec_ref self
self
end
def accessor_name
get_name.tr('-', '_')
end
def pointer_type?
case value_type
when TYPE_INT
false
when TYPE_STRING
true
end
end
G_TYPE_MAP = {
TYPE_INT => :gint,
TYPE_STRING => :utf8,
TYPE_LONG => :glong
}.freeze
def type_tag
G_TYPE_MAP.fetch(value_type)
end
def ffi_type
GirFFI::TypeMap.map_basic_type(type_tag)
end
end
end
|
Resolve return from proc issue | require 'ocular/mixin/from_file'
require 'ocular/dsl/fog'
require 'ocular/dsl/ssh'
require 'ocular/dsl/logging'
class Ocular
module DSL
class EventBase
class Results
attr_accessor :response
attr_accessor :error
end
attr_accessor :proxy
def initialize(&block)
@callback = block
end
def exec(context, do_fork = self.proxy.do_fork)
context.proxy = self.proxy
if do_fork
return exec_fork(context)
else
return exec_nofork(context)
end
end
def exec_fork(context)
reader, writer = IO::pipe
child_pid = fork do
reader.close
r = Results.new
begin
r.response = context.instance_eval(&@callback)
rescue Exception => error
r.error = error
end
response_data = Marshal.dump(r)
writer.puts(response_data)
writer.close
end
writer.close
Process.wait(child_pid)
r = Marshal.load(reader.read)
reader.close
if r.error
raise r.error
end
return r.response
end
def exec_nofork(context)
return context.instance_eval(&@callback)
end
end
end
end
| require 'ocular/mixin/from_file'
require 'ocular/dsl/fog'
require 'ocular/dsl/ssh'
require 'ocular/dsl/logging'
class Ocular
module DSL
class EventBase
class Results
attr_accessor :response
attr_accessor :error
end
attr_accessor :proxy
def initialize(&block)
@callback = block
end
def exec(context, do_fork = self.proxy.do_fork)
context.proxy = self.proxy
if do_fork
return exec_fork(context)
else
return exec_nofork(context)
end
end
def exec_fork(context)
reader, writer = IO::pipe
child_pid = fork do
reader.close
r = Results.new
begin
r.response = __call(context, @callback)
#r.response = context.instance_eval(&@callback)
rescue Exception => error
r.error = error
end
response_data = Marshal.dump(r)
writer.puts(response_data)
writer.close
end
writer.close
Process.wait(child_pid)
r = Marshal.load(reader.read)
reader.close
if r.error
raise r.error
end
return r.response
end
def exec_nofork(context)
return __call(context, @callback)
#context.instance_eval(&@callback)
end
def __call(context, callback)
# we use this trickery to workaround the LocalJumpError so that we can use return
context.define_singleton_method(:_, &callback)
p = context.method(:_).to_proc
return p.call()
end
end
end
end
|
Update queue validity to 10 minutes to expire items in the airbursh queue | require 'starling'
require 'timeout'
module Airbrush
include Timeout
class Client
DEFAULT_INCOMING_QUEUE = 'airbrush_incoming_queue'
DEFAULT_RESPONSE_TIMEOUT = 2.minutes
DEFAULT_QUEUE_VALIDITY = 0 #This should be changed back to a valid timeout once we know the problem.
attr_reader :host, :incoming_queue, :response_timeout, :queue_validity
def initialize(host, incoming_queue = DEFAULT_INCOMING_QUEUE, response_timeout = DEFAULT_RESPONSE_TIMEOUT, queue_validity = DEFAULT_QUEUE_VALIDITY)
@host = host
@server = Starling.new(@host)
@incoming_queue = incoming_queue
@response_timeout = response_timeout
@queue_validity = queue_validity
end
def process(id, command, args = {})
raise 'No job id specified' unless id
raise 'No command specified' unless command
raise "Invalid arguments #{args}" unless args.is_a? Hash
send_and_receive(id, command, args)
end
private
def send_and_receive(id, command, args)
@server.set(@incoming_queue, { :id => id, :command => command, :args => args }, @queue_validity, false)
queue = unique_name(id)
Timeout::timeout(@response_timeout) do
return @server.get(queue)
end
end
# REVISIT: share implementation with server?
def unique_name(id)
id.to_s
end
end
end | require 'starling'
require 'timeout'
module Airbrush
include Timeout
class Client
DEFAULT_INCOMING_QUEUE = 'airbrush_incoming_queue'
DEFAULT_RESPONSE_TIMEOUT = 2.minutes
DEFAULT_QUEUE_VALIDITY = 10.minutes
attr_reader :host, :incoming_queue, :response_timeout, :queue_validity
def initialize(host, incoming_queue = DEFAULT_INCOMING_QUEUE, response_timeout = DEFAULT_RESPONSE_TIMEOUT, queue_validity = DEFAULT_QUEUE_VALIDITY)
@host = host
@server = Starling.new(@host)
@incoming_queue = incoming_queue
@response_timeout = response_timeout
@queue_validity = queue_validity
end
def process(id, command, args = {})
raise 'No job id specified' unless id
raise 'No command specified' unless command
raise "Invalid arguments #{args}" unless args.is_a? Hash
send_and_receive(id, command, args)
end
private
def send_and_receive(id, command, args)
@server.set(@incoming_queue, { :id => id, :command => command, :args => args }, @queue_validity, false)
queue = unique_name(id)
Timeout::timeout(@response_timeout) do
return @server.get(queue)
end
end
# REVISIT: share implementation with server?
def unique_name(id)
id.to_s
end
end
end
|
Add missing uuid gem dependency | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/grooveshark/version', __FILE__)
Gem::Specification.new do |s|
s.name = "grooveshark"
s.version = Grooveshark::VERSION
s.description = "Unofficial ruby library for consuming the Grooveshark API."
s.summary = "Grooveshark API"
s.authors = ["Dan Sosedoff"]
s.email = "dan.sosedoff@gmail.com"
s.homepage = "http://github.com/sosedoff/grooveshark"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{|f| File.basename(f)}
s.require_paths = ['lib']
s.add_development_dependency 'rspec', '~> 2.6'
s.add_runtime_dependency 'json', '>= 1.4.6'
s.add_runtime_dependency 'rest-client', '>= 1.5.1'
end | # -*- encoding: utf-8 -*-
require File.expand_path('../lib/grooveshark/version', __FILE__)
Gem::Specification.new do |s|
s.name = "grooveshark"
s.version = Grooveshark::VERSION
s.description = "Unofficial ruby library for consuming the Grooveshark API."
s.summary = "Grooveshark API"
s.authors = ["Dan Sosedoff"]
s.email = "dan.sosedoff@gmail.com"
s.homepage = "http://github.com/sosedoff/grooveshark"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{|f| File.basename(f)}
s.require_paths = ['lib']
s.add_development_dependency 'rspec', '~> 2.6'
s.add_runtime_dependency 'json', '>= 1.4.6'
s.add_runtime_dependency 'rest-client', '>= 1.5.1'
s.add_runtime_dependency 'uuid', '~> 2.0'
end
|
Access control for javascript clients. | class ApplicationController < ActionController::Base
protect_from_forgery
private
def records_as_json_with_itemids(records, callback = nil)
json = (records.map {|record| "\"#{record.item_barcode}\": #{record.json}"}).join(",")
json = wrap_page("\"records\":{#{json}}", records)
wrap_json(json, callback)
end
def records_as_json(records, callback = nil)
json = "\"records\":[#{records.map(&:json).join(",")}]"
json = wrap_page(json, records)
wrap_json(json, callback)
end
def wrap_json(json, callback = nil)
if callback
"#{callback}({#{json}})"
else
"{" + json + "}"
end
end
def wrap_page(json, records = nil)
if records and records.respond_to? :current_page
json + ",\"current_page\":#{records.current_page},\"per_page\":#{records.per_page},\"total_entries\":#{records.total_entries}"
else
json
end
end
def record_as_json(record, callback = nil)
wrap_json(record.try(:json))
end
end
| class ApplicationController < ActionController::Base
protect_from_forgery
after_filter :set_access_control_headers
def set_access_control_headers
headers['Access-Control-Allow-Origin'] = 'http://localhost:8000'
headers['Access-Control-Request-Method'] = '*'
end
private
def records_as_json_with_itemids(records, callback = nil)
json = (records.map {|record| "\"#{record.item_barcode}\": #{record.json}"}).join(",")
json = wrap_page("\"records\":{#{json}}", records)
wrap_json(json, callback)
end
def records_as_json(records, callback = nil)
json = "\"records\":[#{records.map(&:json).join(",")}]"
json = wrap_page(json, records)
wrap_json(json, callback)
end
def wrap_json(json, callback = nil)
if callback
"#{callback}({#{json}})"
else
"{" + json + "}"
end
end
def wrap_page(json, records = nil)
if records and records.respond_to? :current_page
json + ",\"current_page\":#{records.current_page},\"per_page\":#{records.per_page},\"total_entries\":#{records.total_entries}"
else
json
end
end
def record_as_json(record, callback = nil)
wrap_json(record.try(:json))
end
end
|
Stop Rake from telling us every spec it's about to run. | require 'rake'
begin
require 'rspec/core/rake_task'
rescue LoadError
end
| require 'rake'
begin
require 'rspec/core/rake_task'
task(:spec).clear
RSpec::Core::RakeTask.new(:spec) do |t|
t.verbose = false
end
rescue LoadError
end
|
Raise error if mission is nil | # frozen_string_literal: true
# Email notifier, to send various emails
class Notifier < ApplicationMailer
def password_reset_instructions(user, mission: nil)
build_reset_url(user)
mail(to: user.email, reply_to: reply_to(mission),
subject: t("notifier.password_reset_instructions"))
end
def intro(user, mission: nil)
@user = user
build_reset_url(user)
mail(to: user.email, reply_to: reply_to(mission),
subject: t("notifier.welcome", site: Settings.site_name))
end
def sms_token_change_alert(mission)
@mission = mission
@site_name = Settings.site_name
mail(to: reply_to(mission), reply_to: reply_to(mission),
subject: t("notifier.sms_token_change.subject", mission_name: mission.name))
end
private
def reply_to(mission)
return [] if mission.nil?
User.with_roles(mission, :coordinator).pluck(:email).uniq[0, 10] # Max of 10 reply to, should be rare.
end
def build_reset_url(user)
@reset_url = edit_password_reset_url(user.perishable_token,
mode: nil, mission_name: nil, locale: user.pref_lang)
end
end
| # frozen_string_literal: true
# Email notifier, to send various emails
class Notifier < ApplicationMailer
def password_reset_instructions(user, mission: nil)
build_reset_url(user)
mail(to: user.email, reply_to: reply_to(mission),
subject: t("notifier.password_reset_instructions"))
end
def intro(user, mission: nil)
@user = user
build_reset_url(user)
mail(to: user.email, reply_to: reply_to(mission),
subject: t("notifier.welcome", site: Settings.site_name))
end
def sms_token_change_alert(mission)
raise ArgumentError, "Mission must not be nil" unless mission
@mission = mission
@site_name = Settings.site_name
mail(to: reply_to(mission), reply_to: reply_to(mission),
subject: t("notifier.sms_token_change.subject", mission_name: mission.name))
end
private
def reply_to(mission)
return [] if mission.nil?
User.with_roles(mission, :coordinator).pluck(:email).uniq[0, 10] # Max of 10 reply to, should be rare.
end
def build_reset_url(user)
@reset_url = edit_password_reset_url(user.perishable_token,
mode: nil, mission_name: nil, locale: user.pref_lang)
end
end
|
Add form to widget requires. | require 'cucumber/salad/widgets/widget'
require 'cucumber/salad/widgets/atom'
require 'cucumber/salad/widgets/list'
require 'cucumber/salad/widgets/base_table'
require 'cucumber/salad/widgets/auto_table'
require 'cucumber/salad/widgets/table'
| require 'cucumber/salad/widgets/widget'
require 'cucumber/salad/widgets/atom'
require 'cucumber/salad/widgets/list'
require 'cucumber/salad/widgets/base_table'
require 'cucumber/salad/widgets/auto_table'
require 'cucumber/salad/widgets/table'
require 'cucumber/salad/widgets/form'
|
Fix nil coercion error in Performance Competition Series | class PerformanceCompetitionSeries::Scoreboard::Standings::Row
attr_accessor :rank
attr_reader :competitor
def initialize(competitor, results, rounds)
@competitor = competitor
@results = results
@rounds = rounds
end
def total_points
points_in_disciplines.sum { |_discipline, points| points }.round(1)
end
def points_in_disciplines
@points_in_disciplines ||=
rounds_by_discipline.each_with_object({}) do |(discipline, rounds), memo|
active_rounds = rounds.select(&:completed)
next if active_rounds.count.zero?
sum_of_points =
active_rounds
.map { |round| result_in_round(round) }
.compact
.sum(&:points)
memo[discipline] = sum_of_points.to_f / active_rounds.count
end
end
def result_in_round(round)
results.find { |result| round.includes?(result.round) }
end
private
attr_reader :results, :rounds
def rounds_by_discipline = rounds.group_by(&:discipline)
end
| class PerformanceCompetitionSeries::Scoreboard::Standings::Row
attr_accessor :rank
attr_reader :competitor
def initialize(competitor, results, rounds)
@competitor = competitor
@results = results
@rounds = rounds
end
def total_points
points_in_disciplines.sum { |_discipline, points| points }.round(1)
end
def points_in_disciplines
@points_in_disciplines ||=
rounds_by_discipline.each_with_object({}) do |(discipline, rounds), memo|
active_rounds = rounds.select(&:completed)
next if active_rounds.count.zero?
sum_of_points =
active_rounds
.map { |round| result_in_round(round) }
.compact
.sum { |result| result.points.to_f }
memo[discipline] = sum_of_points.to_f / active_rounds.count
end
end
def result_in_round(round)
results.find { |result| round.includes?(result.round) }
end
private
attr_reader :results, :rounds
def rounds_by_discipline = rounds.group_by(&:discipline)
end
|
Update Zile from 2.3.17 to 2.3.23. | require 'formula'
class Zile < Formula
url 'http://ftp.gnu.org/gnu/zile/zile-2.3.17.tar.gz'
homepage 'http://www.gnu.org/software/zile/'
md5 'd4a4409fd457e0cb51c76dd8dc09d18b'
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}", "--mandir=#{man}"
system "make install"
end
end
| require 'formula'
class Zile < Formula
url 'http://ftp.gnu.org/gnu/zile/zile-2.3.23.tar.gz'
homepage 'http://www.gnu.org/software/zile/'
md5 '4a2fa0015403cdf0eb32a5e648169cae'
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}", "--mandir=#{man}"
system "make install"
end
end
|
Remove byebug require from installed file. | #! /usr/bin/env ruby
require 'erubis'
require 'erbtex/version'
require 'erbtex/command_line'
require 'erbtex/find_binary'
require 'erbtex/runner'
require 'byebug'
| #! /usr/bin/env ruby
require 'erubis'
require 'erbtex/version'
require 'erbtex/command_line'
require 'erbtex/find_binary'
require 'erbtex/runner'
|
Use the custom dashboard choices when available. | # Include all Methods that are needed when a controller contains the dashboard.
#
module ApplicationController::HasDashboard
extend ActiveSupport::Concern
included do
before_filter :load_constraints, :load_goals
end
module InstanceMethods
def load_constraints
@constraints = Current.view.constraints
end
def load_goals
@goals = Current.view.policy_goals
end
end
end | # Include all Methods that are needed when a controller contains the dashboard.
#
module ApplicationController::HasDashboard
extend ActiveSupport::Concern
included do
before_filter :load_constraints, :load_goals
end
module InstanceMethods
def load_constraints
@constraints = if session[:dashboard].any?
Constraint.for_dashboard(session[:dashboard])
else
Current.view.constraints
end
end
def load_goals
@goals = Current.view.policy_goals
end
end
end
|
Add unf gem to silence warnings from Fog | require File.expand_path("../lib/conjure", __FILE__)
Gem::Specification.new do |s|
s.name = "conjure"
s.version = Conjure::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Brian Auton"]
s.email = ["brianauton@gmail.com"]
s.homepage = "http://github.com/brianauton/conjure"
s.summary = "Magically powerful deployment for Rails applications"
s.license = "MIT"
s.required_rubygems_version = ">= 1.3.6"
s.files = Dir.glob("lib/**/*") + ["README.md", "History.md", "License.txt"]
s.require_path = "lib"
s.executables = ["conjure"]
s.add_dependency "fog", ">= 1.15.0"
s.add_dependency "thor"
s.add_development_dependency "minitest"
s.add_development_dependency "rake"
end
| require File.expand_path("../lib/conjure", __FILE__)
Gem::Specification.new do |s|
s.name = "conjure"
s.version = Conjure::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Brian Auton"]
s.email = ["brianauton@gmail.com"]
s.homepage = "http://github.com/brianauton/conjure"
s.summary = "Magically powerful deployment for Rails applications"
s.license = "MIT"
s.required_rubygems_version = ">= 1.3.6"
s.files = Dir.glob("lib/**/*") + ["README.md", "History.md", "License.txt"]
s.require_path = "lib"
s.executables = ["conjure"]
s.add_dependency "fog", ">= 1.15.0"
s.add_dependency "thor"
s.add_dependency "unf"
s.add_development_dependency "minitest"
s.add_development_dependency "rake"
end
|
Add back some old redirects | Rails.application.routes.draw do
get 'resume' => 'resume#resume', :as => :resume
get 'cv' => 'resume#cv', :as => :cv
get 'portfolio' => 'folios#portfolio', :as => :portfolio
root :to => "home#home"
# redirecting match
get 'fb' => redirect(Links::Facebook)
get 'tw' => redirect(Links::Twitter)
get 'li' => redirect(Links::Linkedin)
get 'gh' => redirect(Links::Github)
get 'fo' => redirect(Links::Foursquare)
# Nothing left, route to error page
get '*wild' => 'application#render_404', :as => :error_404
end
| Rails.application.routes.draw do
get 'resume' => 'resume#resume', :as => :resume
get 'cv' => 'resume#cv', :as => :cv
get 'portfolio' => 'folios#portfolio', :as => :portfolio
root :to => "home#home"
# redirecting match
get 'fb' => redirect(Links::Facebook)
get 'tw' => redirect(Links::Twitter)
get 'li' => redirect(Links::Linkedin)
get 'gh' => redirect(Links::Github)
get 'fo' => redirect(Links::Foursquare)
get 'zshrc' => redirect('https://raw.githubusercontent.com/pfhayes/dotfiles/master/.zshrc')
get 'vimrc' => redirect('https://raw.githubusercontent.com/pfhayes/dotfiles/master/.vimrc')
# Nothing left, route to error page
get '*wild' => 'application#render_404', :as => :error_404
end
|
Update json dependency to later version (CVE-2013-0269) | Gem::Specification.new do |s|
s.name = 'nexmo'
s.version = '1.2.0'
s.platform = Gem::Platform::RUBY
s.authors = ['Tim Craft']
s.email = ['mail@timcraft.com']
s.homepage = 'http://github.com/timcraft/nexmo'
s.description = 'A Ruby wrapper for the Nexmo API'
s.summary = 'See description'
s.files = Dir.glob('{lib,spec}/**/*') + %w(README.md nexmo.gemspec)
s.add_development_dependency('rake', '>= 0.9.3')
s.add_development_dependency('mocha', '~> 0.13.2')
s.add_development_dependency('oauth', '~> 0.4.7')
s.add_development_dependency('faux', '~> 1.1.0')
s.require_path = 'lib'
if RUBY_VERSION == '1.8.7'
s.add_development_dependency('minitest', '~> 4.2.0')
s.add_development_dependency('json', '~> 1.6')
end
end
| Gem::Specification.new do |s|
s.name = 'nexmo'
s.version = '1.2.0'
s.platform = Gem::Platform::RUBY
s.authors = ['Tim Craft']
s.email = ['mail@timcraft.com']
s.homepage = 'http://github.com/timcraft/nexmo'
s.description = 'A Ruby wrapper for the Nexmo API'
s.summary = 'See description'
s.files = Dir.glob('{lib,spec}/**/*') + %w(README.md nexmo.gemspec)
s.add_development_dependency('rake', '>= 0.9.3')
s.add_development_dependency('mocha', '~> 0.13.2')
s.add_development_dependency('oauth', '~> 0.4.7')
s.add_development_dependency('faux', '~> 1.1.0')
s.require_path = 'lib'
if RUBY_VERSION == '1.8.7'
s.add_development_dependency('minitest', '~> 4.2.0')
s.add_development_dependency('json', '~> 1.8.0')
end
end
|
Upgrade dependency gems, bootstrap3-datetimepicker-rails to 4.17.47 and momentjs-rails to 2.17.1. | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "datetime_picker_input/version"
Gem::Specification.new do |spec|
spec.name = "datetime_picker_input"
spec.version = DatetimePickerInput::VERSION
spec.authors = %w(Juanito Fatas)
spec.email = %w(katehuang0320@gmail.com)
spec.summary = %(Datetime picker wrapper of https://github.com/TrevorS/bootstrap3-datetimepicker-rails, for use with simple_form on Rails 4+.)
spec.description = spec.summary
spec.homepage = "https://github.com/jollygoodcode/datetime_picker_input"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = Dir["spec/**/*"]
spec.require_paths = %w(lib)
spec.add_runtime_dependency "bootstrap3-datetimepicker-rails", "~> 4.17.37"
spec.add_runtime_dependency "momentjs-rails", "~> 2.10.6"
end
| # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "datetime_picker_input/version"
Gem::Specification.new do |spec|
spec.name = "datetime_picker_input"
spec.version = DatetimePickerInput::VERSION
spec.authors = %w(Juanito Fatas)
spec.email = %w(katehuang0320@gmail.com)
spec.summary = %(Datetime picker wrapper of https://github.com/TrevorS/bootstrap3-datetimepicker-rails, for use with simple_form on Rails 4+.)
spec.description = spec.summary
spec.homepage = "https://github.com/jollygoodcode/datetime_picker_input"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = Dir["spec/**/*"]
spec.require_paths = %w(lib)
spec.add_runtime_dependency "bootstrap3-datetimepicker-rails", "~> 4.17.47"
spec.add_runtime_dependency "momentjs-rails", "~> 2.17.1"
end
|
Add spree_notifications migrations to generator | module SpreeQueueLine
module Generators
class InstallGenerator < Rails::Generators::Base
class_option :auto_run_migrations, :type => :boolean, :default => false
def add_javascripts
append_file 'vendor/assets/javascripts/spree/frontend/all.js', "//= require spree/frontend/spree_queue_line\n"
append_file 'vendor/assets/javascripts/spree/backend/all.js', "//= require spree/backend/spree_queue_line\n"
end
def add_stylesheets
inject_into_file 'vendor/assets/stylesheets/spree/frontend/all.css', " *= require spree/frontend/spree_queue_line\n", :before => /\*\//, :verbose => true
inject_into_file 'vendor/assets/stylesheets/spree/backend/all.css', " *= require spree/backend/spree_queue_line\n", :before => /\*\//, :verbose => true
end
def add_migrations
run 'bundle exec rake railties:install:migrations FROM=spree_queue_line'
end
def run_migrations
run_migrations = options[:auto_run_migrations] || ['', 'y', 'Y'].include?(ask 'Would you like to run the migrations now? [Y/n]')
if run_migrations
run 'bundle exec rake db:migrate'
else
puts 'Skipping rake db:migrate, don\'t forget to run it!'
end
end
end
end
end
| module SpreeQueueLine
module Generators
class InstallGenerator < Rails::Generators::Base
class_option :auto_run_migrations, :type => :boolean, :default => false
def add_migrations
run 'bundle exec rake railties:install:migrations FROM=spree_notifications'
run 'bundle exec rake railties:install:migrations FROM=spree_queue_line'
end
def run_migrations
run_migrations = options[:auto_run_migrations] || ['', 'y', 'Y'].include?(ask 'Would you like to run the migrations now? [Y/n]')
if run_migrations
run 'bundle exec rake db:migrate'
else
puts 'Skipping rake db:migrate, don\'t forget to run it!'
end
end
end
end
end
|
Set :send_checked => true in order to have miq_grid_checks defined | class ApplicationHelper::Toolbar::GenericObjectsCenter < ApplicationHelper::Toolbar::Basic
button_group('generic_object_policy', [
select(
:generic_object_policy_choice,
'fa fa-shield fa-lg',
t = N_('Policy'),
t,
:enabled => false,
:onwhen => "1+",
:items => [
button(
:generic_object_tag,
'pficon pficon-edit fa-lg',
N_('Edit Tags for the selected Generic Object Instances'),
N_('Edit Tags'),
:url_parms => "main_div",
:enabled => false,
:onwhen => "1+"),
]
),
])
end
| class ApplicationHelper::Toolbar::GenericObjectsCenter < ApplicationHelper::Toolbar::Basic
button_group('generic_object_policy', [
select(
:generic_object_policy_choice,
'fa fa-shield fa-lg',
t = N_('Policy'),
t,
:enabled => false,
:onwhen => "1+",
:items => [
button(
:generic_object_tag,
'pficon pficon-edit fa-lg',
N_('Edit Tags for the selected Generic Object Instances'),
N_('Edit Tags'),
:url_parms => "main_div",
:send_checked => true,
:enabled => false,
:onwhen => "1+"
),
]
),
])
end
|
Add orders migration from neigborly balanced | # This migration comes from neighborly_balanced (originally 20140817195359)
class CreateNeighborlyBalancedOrders < ActiveRecord::Migration
def change
create_table :neighborly_balanced_orders do |t|
t.references :project, index: true, null: false
t.string :href, null: false
t.timestamps
end
end
end
| |
Add a test case for the scope enum adds | require 'cases/helper'
require 'models/book'
class StoreTest < ActiveRecord::TestCase
fixtures :books
setup do
@book = books(:awdr)
end
test "query state by predicate" do
assert @book.proposed?
assert_not @book.written?
assert_not @book.published?
end
test "query state with symbol" do
assert_equal :proposed, @book.status
end
test "update by declaration" do
@book.written!
assert @book.written?
end
test "update by setter" do
@book.update! status: :written
assert @book.written?
end
test "constant" do
assert_equal 0, Book::STATUS[:proposed]
assert_equal 1, Book::STATUS[:written]
assert_equal 2, Book::STATUS[:published]
end
end
| require 'cases/helper'
require 'models/book'
class StoreTest < ActiveRecord::TestCase
fixtures :books
setup do
@book = books(:awdr)
end
test "query state by predicate" do
assert @book.proposed?
assert_not @book.written?
assert_not @book.published?
end
test "query state with symbol" do
assert_equal :proposed, @book.status
end
test "find via scope" do
assert_equal @book, Book.proposed.first
end
test "update by declaration" do
@book.written!
assert @book.written?
end
test "update by setter" do
@book.update! status: :written
assert @book.written?
end
test "constant" do
assert_equal 0, Book::STATUS[:proposed]
assert_equal 1, Book::STATUS[:written]
assert_equal 2, Book::STATUS[:published]
end
end
|
Remove extra empty line detected at class body end | class ArticleRepository
class ArticleNotFound < StandardError; end
REPOSITORY = Rails.root.join('content')
def initialize(repository = REPOSITORY)
@repository = Pathname.new(repository)
end
def find(id)
file = path(id)
raise ArticleNotFound unless file.exist?
Article.new(id, File.read(file))
end
def self.find(id)
(new).find(id)
end
private
attr_reader :repository
def path(id)
repository.join("#{id.tr('-', '_')}.md")
end
end
| class ArticleRepository
class ArticleNotFound < StandardError; end
REPOSITORY = Rails.root.join('content')
def initialize(repository = REPOSITORY)
@repository = Pathname.new(repository)
end
def find(id)
file = path(id)
raise ArticleNotFound unless file.exist?
Article.new(id, File.read(file))
end
def self.find(id)
(new).find(id)
end
private
attr_reader :repository
def path(id)
repository.join("#{id.tr('-', '_')}.md")
end
end
|
Replace .es6.js extension to .js.es6 in JSHint check | # encoding: utf-8
module Phare
class Check
class JSHint < Check
attr_reader :config, :path
def initialize(directory, options = {})
@config = File.expand_path("#{directory}.jshintrc", __FILE__)
@path = File.expand_path("#{directory}app/assets/javascripts", __FILE__)
@glob = File.join(@path, '**/*')
@extensions = %w(.js .es6.js)
@options = options
super
end
def command
if @tree.changed?
"jshint --config #{@config} --extra-ext #{@extensions.join(',')} #{@tree.changes.join(' ')}"
else
"jshint --config #{@config} --extra-ext #{@extensions.join(',')} #{@glob}"
end
end
protected
def binary_exists?
!Phare.system_output('which jshint').empty?
end
def configuration_exists?
File.exists?(@config)
end
def arguments_exists?
@tree.changed? || Dir.exists?(@path)
end
def print_banner
Phare.puts '---------------------------------------------'
Phare.puts 'Running JSHint to check for JavaScript style…'
Phare.puts '---------------------------------------------'
end
end
end
end
| # encoding: utf-8
module Phare
class Check
class JSHint < Check
attr_reader :config, :path
def initialize(directory, options = {})
@config = File.expand_path("#{directory}.jshintrc", __FILE__)
@path = File.expand_path("#{directory}app/assets/javascripts", __FILE__)
@glob = File.join(@path, '**/*')
@extensions = %w(.js .js.es6)
@options = options
super
end
def command
if @tree.changed?
"jshint --config #{@config} --extra-ext #{@extensions.join(',')} #{@tree.changes.join(' ')}"
else
"jshint --config #{@config} --extra-ext #{@extensions.join(',')} #{@glob}"
end
end
protected
def binary_exists?
!Phare.system_output('which jshint').empty?
end
def configuration_exists?
File.exists?(@config)
end
def arguments_exists?
@tree.changed? || Dir.exists?(@path)
end
def print_banner
Phare.puts '---------------------------------------------'
Phare.puts 'Running JSHint to check for JavaScript style…'
Phare.puts '---------------------------------------------'
end
end
end
end
|
Enable use helper methods in rambulance view | module Rambulance
class Railtie < Rails::Railtie
initializer 'rambulance' do |app|
require "rambulance/exceptions_app"
app.config.exceptions_app =
begin
ActiveSupport::Dependencies.load_missing_constant(Object, :ExceptionsApp)
->(env){ ::ExceptionsApp.call(env) }
rescue NameError
::Rambulance::ExceptionsApp
end
ActiveSupport.on_load(:after_initialize) do
Rails.application.routes.append do
mount app.config.exceptions_app, at: '/rambulance'
end if Rails.env.development?
end
end
end
end
| module Rambulance
class Railtie < Rails::Railtie
initializer 'rambulance', after: :prepend_helpers_path do |app|
require "rambulance/exceptions_app"
app.config.exceptions_app =
begin
ActiveSupport::Dependencies.load_missing_constant(Object, :ExceptionsApp)
->(env){ ::ExceptionsApp.call(env) }
rescue NameError
::Rambulance::ExceptionsApp
end
ActiveSupport.on_load(:after_initialize) do
Rails.application.routes.append do
mount app.config.exceptions_app, at: '/rambulance'
end if Rails.env.development?
end
end
end
end
|
Use symbols instead of strings | require 'faraday_middleware'
require 'faraday/request/multipart_with_file'
require 'faraday/request/gateway'
require 'faraday/request/twitter_oauth'
require 'faraday/response/raise_http_4xx'
require 'faraday/response/raise_http_5xx'
module Twitter
# @private
module Connection
private
def connection(format=format)
options = {
:headers => {
'Accept' => "application/#{format}",
'User-Agent' => user_agent,
},
:proxy => proxy,
:ssl => {:verify => false},
:url => api_endpoint,
}
Faraday.new(options) do |builder|
builder.use Faraday::Request::MultipartWithFile
builder.use Faraday::Request::TwitterOAuth, authentication if authenticated?
builder.use Faraday::Request::Multipart
builder.use Faraday::Request::UrlEncoded
builder.use Faraday::Request::Gateway, gateway if gateway
builder.use Faraday::Response::RaiseHttp4xx
case format.to_s.downcase
when 'json'
builder.use Faraday::Response::Mashify
builder.use Faraday::Response::ParseJson
when 'xml'
builder.use Faraday::Response::Mashify
builder.use Faraday::Response::ParseXml
end
builder.use Faraday::Response::RaiseHttp5xx
builder.adapter(adapter)
end
end
end
end
| require 'faraday_middleware'
require 'faraday/request/multipart_with_file'
require 'faraday/request/gateway'
require 'faraday/request/twitter_oauth'
require 'faraday/response/raise_http_4xx'
require 'faraday/response/raise_http_5xx'
module Twitter
# @private
module Connection
private
def connection(format=format)
options = {
:headers => {
:accept => "application/#{format}",
:user_agent => user_agent,
},
:proxy => proxy,
:ssl => {:verify => false},
:url => api_endpoint,
}
Faraday.new(options) do |builder|
builder.use Faraday::Request::MultipartWithFile
builder.use Faraday::Request::TwitterOAuth, authentication if authenticated?
builder.use Faraday::Request::Multipart
builder.use Faraday::Request::UrlEncoded
builder.use Faraday::Request::Gateway, gateway if gateway
builder.use Faraday::Response::RaiseHttp4xx
case format.to_s.downcase
when 'json'
builder.use Faraday::Response::Mashify
builder.use Faraday::Response::ParseJson
when 'xml'
builder.use Faraday::Response::Mashify
builder.use Faraday::Response::ParseXml
end
builder.use Faraday::Response::RaiseHttp5xx
builder.adapter(adapter)
end
end
end
end
|
Move Client configs before VCR configs | require 'rubygems'
require 'bundler/setup'
require 'cgi'
require 'vcr'
$:.unshift(File.dirname(__FILE__) + '/lib')
require 'dnsimple'
VCR.configure do |c|
c.cassette_library_dir = 'fixtures/vcr_cassettes'
c.hook_into :fakeweb
c.filter_sensitive_data("<USERNAME>") { CGI::escape(DNSimple::Client.username) }
c.filter_sensitive_data("<PASSWORD>") { CGI::escape(DNSimple::Client.password) }
end
RSpec.configure do |c|
c.mock_framework = :mocha
c.extend VCR::RSpec::Macros
end
config = YAML.load_file(File.expand_path(ENV['DNSIMPLE_TEST_CONFIG'] || '~/.dnsimple.test'))
DNSimple::Client.base_uri = config['site'] if config['site'] # Example: https://test.dnsimple.com/
DNSimple::Client.host = config['host'] if config['host'] # Example: test.dnsimple.com
DNSimple::Client.username = config['username'] # Example: testusername
DNSimple::Client.password = config['password'] # Example: testpassword
| require 'rubygems'
require 'bundler/setup'
require 'cgi'
require 'vcr'
$:.unshift(File.dirname(__FILE__) + '/lib')
require 'dnsimple'
config = YAML.load_file(File.expand_path(ENV['DNSIMPLE_TEST_CONFIG'] || '~/.dnsimple.test'))
DNSimple::Client.base_uri = config['site'] if config['site'] # Example: https://test.dnsimple.com/
DNSimple::Client.host = config['host'] if config['host'] # Example: test.dnsimple.com
DNSimple::Client.username = config['username'] # Example: testusername@example.com
DNSimple::Client.password = config['password'] # Example: testpassword
DNSimple::Client.api_token = config['api_token'] # Example: 1234567890
VCR.configure do |c|
c.cassette_library_dir = 'fixtures/vcr_cassettes'
c.hook_into :fakeweb
c.filter_sensitive_data("<USERNAME>") { CGI::escape(DNSimple::Client.username) }
c.filter_sensitive_data("<PASSWORD>") { CGI::escape(DNSimple::Client.password) }
end
RSpec.configure do |c|
c.mock_framework = :mocha
c.extend VCR::RSpec::Macros
end
|
Remove the try, because we are converting to string now | class BelongsToTokenInput < Formtastic::Inputs::StringInput
def input_html_options
super.merge(
"data-token-limit" => 1,
"class" => "active-admin-tokeninput",
"data-field-name" => "#{object_name.to_s.try(:underscore)}[#{method.to_s.singularize}_id]",
"data-pre" => "[#{object.send("#{method}").to_json(:only => [:id, :name])}]",
"name" => "",
"value" => "",
)
end
end
| class BelongsToTokenInput < Formtastic::Inputs::StringInput
def input_html_options
super.merge(
"data-token-limit" => 1,
"class" => "active-admin-tokeninput",
"data-field-name" => "#{object_name.to_s.underscore}[#{method.to_s.singularize}_id]",
"data-pre" => "[#{object.send("#{method}").to_json(:only => [:id, :name])}]",
"name" => "",
"value" => "",
)
end
end
|
Use the generic Chef inc e-mail | # -*- encoding: utf-8 -*-
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "mixlib/versioning/version"
Gem::Specification.new do |spec|
spec.name = "mixlib-versioning"
spec.version = Mixlib::Versioning::VERSION
spec.authors = ["Seth Chisamore", "Christopher Maier"]
spec.email = ["schisamo@chef.io", "cm@chef.io"]
spec.description = "General purpose Ruby library that allows you to parse, compare and manipulate version strings in multiple formats."
spec.summary = spec.description
spec.homepage = "https://github.com/chef/mixlib-versioning"
spec.license = "Apache 2.0"
spec.files = `git ls-files`.split($INPUT_RECORD_SEPARATOR)
spec.executables = spec.files.grep(%r{^bin/}).map { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
end
| # -*- encoding: utf-8 -*-
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "mixlib/versioning/version"
Gem::Specification.new do |spec|
spec.name = "mixlib-versioning"
spec.version = Mixlib::Versioning::VERSION
spec.authors = ["Seth Chisamore", "Christopher Maier"]
spec.description = "General purpose Ruby library that allows you to parse, compare and manipulate version strings in multiple formats."
spec.summary = spec.description
s.email = "info@chef.io"
spec.homepage = "https://github.com/chef/mixlib-versioning"
spec.license = "Apache 2.0"
spec.files = `git ls-files`.split($INPUT_RECORD_SEPARATOR)
spec.executables = spec.files.grep(%r{^bin/}).map { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
end
|
Add text method and private methods for Texticle module | require "texticle/version"
module Texticle
# Your code goes here...
end
| lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'texticle/version'
require "texticle/providers"
require "pony"
module Texticle
def self.text(options)
options[:to] = recipients(options[:to])
Pony.mail(options)
end
private
def self.recipients(numbers)
recipients = []
numbers = numbers.split(",") unless numbers.class == Array
numbers.each do |number|
PROVIDERS.each { |provider| recipients << "#{number}#{provider}" }
end
recipients
end
end
|
Rewrite versioned asset paths coming in | # A monkey-patch to AssetTagHelper that prepends a directory with the rails asset it
# to asset paths as well as using an asset query string. CDNs may ignore the querystring
# so this is belt-and-braces cache busting. Requires a webserver-level rewrite rule
# to strip the /rel-[asset-id]/ directory
module ActionView
module Helpers #:nodoc:
module AssetTagHelper
private
if MySociety::Config.getbool("USE_VERSIONED_ASSET_PATHS", false)
def rewrite_asset_path(source)
asset_id = rails_asset_id(source)
if asset_id.blank?
source
else
"/rel-#{asset_id}" + source + "?#{asset_id}"
end
end
end
end
end
end
| # A monkey-patch to AssetTagHelper that prepends a directory with the rails asset it
# to asset paths as well as using an asset query string. CDNs may ignore the querystring
# so this is belt-and-braces cache busting. Requires a webserver-level rewrite rule
# to strip the /rel-[asset-id]/ directory
module ActionView
module Helpers #:nodoc:
module AssetTagHelper
private
if MySociety::Config.getbool("USE_VERSIONED_ASSET_PATHS", false)
def rewrite_asset_path(source)
asset_id = rails_asset_id(source)
if asset_id.blank?
source
else
"/rel-#{asset_id}" + source + "?#{asset_id}"
end
end
def asset_file_path(path)
asset_id = rails_asset_id(source)
if !asset_id.blank?
path = path.gsub(/^\/rel-\d+/, '')
end
file_path = File.join(ASSETS_DIR, path.split('?').first)
end
end
end
end
end
|
Use Truncate instead of delete while clearing sessions | namespace 'db:sessions' do
desc "Creates a sessions migration for use with ActiveRecord::SessionStore"
task :create => [:environment, 'db:load_config'] do
raise 'Task unavailable to this database (no migration support)' unless ActiveRecord::Base.connection.supports_migrations?
Rails.application.load_generators
require 'rails/generators/rails/session_migration/session_migration_generator'
Rails::Generators::SessionMigrationGenerator.start [ ENV['MIGRATION'] || 'add_sessions_table' ]
end
desc "Clear the sessions table"
task :clear => [:environment, 'db:load_config'] do
ActiveRecord::Base.connection.execute "DELETE FROM #{ActiveRecord::SessionStore::Session.table_name}"
end
desc "Trim old sessions from the table (default: > 30 days)"
task :trim => [:environment, 'db:load_config'] do
cutoff_period = (ENV['SESSION_DAYS_TRIM_THRESHOLD'] || 30).to_i.days.ago
ActiveRecord::SessionStore::Session.
where("updated_at < ?", cutoff_period).
delete_all
end
end
| namespace 'db:sessions' do
desc "Creates a sessions migration for use with ActiveRecord::SessionStore"
task :create => [:environment, 'db:load_config'] do
raise 'Task unavailable to this database (no migration support)' unless ActiveRecord::Base.connection.supports_migrations?
Rails.application.load_generators
require 'rails/generators/rails/session_migration/session_migration_generator'
Rails::Generators::SessionMigrationGenerator.start [ ENV['MIGRATION'] || 'add_sessions_table' ]
end
desc "Clear the sessions table"
task :clear => [:environment, 'db:load_config'] do
ActiveRecord::Base.connection.execute "TRUNCATE TABLE #{ActiveRecord::SessionStore::Session.table_name}"
end
desc "Trim old sessions from the table (default: > 30 days)"
task :trim => [:environment, 'db:load_config'] do
cutoff_period = (ENV['SESSION_DAYS_TRIM_THRESHOLD'] || 30).to_i.days.ago
ActiveRecord::SessionStore::Session.
where("updated_at < ?", cutoff_period).
delete_all
end
end
|
Add a little debugging output. | require "fog"
require "log4r"
module VagrantPlugins
module OpenStack
module Action
# This action connects to OpenStack, verifies credentials work, and
# puts the OpenStack connection object into the `:openstack_compute` key
# in the environment.
class ConnectOpenStack
def initialize(app, env)
@app = app
@logger = Log4r::Logger.new("vagrant_openstack::action::connect_openstack")
end
def call(env)
# Get the configs
config = env[:machine].provider_config
api_key = config.api_key
endpoint = config.endpoint
username = config.username
tenant = config.tenant
region = config.region
connection_options = { :proxy => config.proxy }
@logger.info("Connecting to OpenStack...")
env[:openstack_compute] = Fog::Compute.new({
:provider => :openstack,
:connection_options => connection_options,
:openstack_username => username,
:openstack_api_key => api_key,
:openstack_auth_url => endpoint,
:openstack_tenant => tenant,
:openstack_region => region
})
if config.network
env[:openstack_network] = Fog::Network.new({
:provider => :openstack,
:connection_options => connection_options,
:openstack_username => username,
:openstack_api_key => api_key,
:openstack_auth_url => endpoint,
:openstack_tenant => tenant
})
end
@app.call(env)
end
end
end
end
end
| require "fog"
require "log4r"
module VagrantPlugins
module OpenStack
module Action
# This action connects to OpenStack, verifies credentials work, and
# puts the OpenStack connection object into the `:openstack_compute` key
# in the environment.
class ConnectOpenStack
def initialize(app, env)
@app = app
@logger = Log4r::Logger.new("vagrant_openstack::action::connect_openstack")
end
def call(env)
# Get the configs
config = env[:machine].provider_config
api_key = config.api_key
endpoint = config.endpoint
username = config.username
tenant = config.tenant
region = config.region
# Pass proxy config down into the Fog::Connection object using
# the `connection_options` hash.
connection_options = { :proxy => config.proxy }
@logger.info("Connecting to OpenStack...")
@logger.debug("API connection params: #{connection_options.inspect}")
env[:openstack_compute] = Fog::Compute.new({
:provider => :openstack,
:connection_options => connection_options,
:openstack_username => username,
:openstack_api_key => api_key,
:openstack_auth_url => endpoint,
:openstack_tenant => tenant,
:openstack_region => region
})
if config.network
env[:openstack_network] = Fog::Network.new({
:provider => :openstack,
:connection_options => connection_options,
:openstack_username => username,
:openstack_api_key => api_key,
:openstack_auth_url => endpoint,
:openstack_tenant => tenant
})
end
@app.call(env)
end
end
end
end
end
|
Normalize strings used in metrics | require "metriks"
module Travis
class TravisYmlStats
def self.store_stats(request)
new(request).store_stats
end
def initialize(request)
@request = request
end
def store_stats
store_language
end
private
attr_reader :request
def store_language
Metriks.meter("travis_yml.language.#{config["language"]}").mark
Metriks.meter("travis_yml.github_language.#{payload["language"]}").mark
end
def config
request.config
end
def payload
request.payload
end
end
end
| require "metriks"
module Travis
class TravisYmlStats
def self.store_stats(request)
new(request).store_stats
end
def initialize(request)
@request = request
end
def store_stats
store_language
end
private
attr_reader :request
def store_language
Metriks.meter("travis_yml.language.#{normalize_string config["language"]}").mark
Metriks.meter("travis_yml.github_language.#{normalize_string payload["language"]}").mark
end
def config
request.config
end
def payload
request.payload
end
def normalize_string(str)
str.downcase.tr(" ", "-")
end
end
end
|
Revert "Unify ip addresses for now" | Brain.request(path: "/cluster-api/v1/cells/discovery",
method: :post,
payload: {
cell: {
uuid: Cell.uuid,
fqdn: Cell.fqdn,
volumes: Cell.storage_volumes,
ip_address: ENV["HOST_IP_ADDRESS"]
}
})
| Brain.request(path: "/cluster-api/v1/cells/discovery",
method: :post,
payload: {
cell: {
uuid: Cell.uuid,
fqdn: Cell.fqdn,
volumes: Cell.storage_volumes,
public_ip_address: ENV["HOST_IP_ADDRESS"]
}
})
|
Fix migration removing duplicate stages on MySQL again | class RemoveRedundantPipelineStages < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
redundant_stages_ids = <<~SQL
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT pipeline_id, name FROM ci_stages
GROUP BY pipeline_id, name HAVING COUNT(*) > 1
)
SQL
execute <<~SQL
UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
SQL
end
end
def down
# noop
end
end
| class RemoveRedundantPipelineStages < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
redundant_stages_ids = <<~SQL
SELECT id FROM ci_stages WHERE (pipeline_id, name) IN (
SELECT pipeline_id, name FROM ci_stages
GROUP BY pipeline_id, name HAVING COUNT(*) > 1
)
SQL
execute <<~SQL
UPDATE ci_builds SET stage_id = NULL WHERE stage_id IN (#{redundant_stages_ids})
SQL
if Gitlab::Database.postgresql?
execute <<~SQL
DELETE FROM ci_stages WHERE id IN (#{redundant_stages_ids})
SQL
else # We can't modify a table we are selecting from on MySQL
execute <<~SQL
DELETE a FROM ci_stages AS a, ci_stages AS b
WHERE a.pipeline_id = b.pipeline_id AND a.name = b.name
AND a.id <> b.id
SQL
end
end
def down
# noop
end
end
|
Allow initialization argument to be used in parent | require File.expand_path("../version_5_0", __FILE__)
module VagrantPlugins
module ProviderVirtualBox
module Driver
# Driver for VirtualBox 5.1.x
class Version_5_1 < Version_5_0
def initialize(uuid)
super()
@logger = Log4r::Logger.new("vagrant::provider::virtualbox_5_1")
end
end
end
end
end
| require File.expand_path("../version_5_0", __FILE__)
module VagrantPlugins
module ProviderVirtualBox
module Driver
# Driver for VirtualBox 5.1.x
class Version_5_1 < Version_5_0
def initialize(uuid)
super
@logger = Log4r::Logger.new("vagrant::provider::virtualbox_5_1")
end
end
end
end
end
|
Rework spec to test mount point with and without base directory | require "spec_helper"
require 'util/mount/miq_generic_mount_session'
describe MiqGenericMountSession do
context "#connect" do
before do
MiqGenericMountSession.stub(:raw_disconnect)
@s1 = MiqGenericMountSession.new(:uri => '/tmp/abc', :mount_point => 'tmp')
@s2 = MiqGenericMountSession.new(:uri => '/tmp/abc', :mount_point => 'tmp')
@s1.logger = Logger.new("/dev/null")
@s2.logger = Logger.new("/dev/null")
end
after do
@s1.disconnect
@s2.disconnect
end
it "is unique" do
@s1.connect
@s2.connect
expect(@s1.mnt_point).to_not eq(@s2.mnt_point)
end
end
end
| require "spec_helper"
require 'util/mount/miq_generic_mount_session'
describe MiqGenericMountSession do
it "#connect returns a string pointing to the mount point" do
described_class.stub(:raw_disconnect)
s = described_class.new(:uri => '/tmp/abc')
s.logger = Logger.new("/dev/null")
expect(s.connect).to match(/\A\/tmp\/miq_\d{8}-\d{5}-\w+\z/)
s.disconnect
end
context "#mount_share" do
it "without :mount_point uses default temp directory as a base" do
expect(described_class.new(:uri => '/tmp/abc').mount_share).to match(/\A\/tmp\/miq_\d{8}-\d{5}-\w+\z/)
end
it "with :mount_point uses specified directory as a base" do
expect(described_class.new(:uri => '/tmp/abc', :mount_point => "abc").mount_share).to match(/\Aabc\/miq_\d{8}-\d{5}-\w+\z/)
end
it "is unique" do
expect(described_class.new(:uri => '/tmp/abc').mount_share).to_not eq(described_class.new(:uri => '/tmp/abc').mount_share)
end
end
end
|
Remove a unused method: last_update_time_str. | module FeideeUtils
class Record
module Accessors
def poid
@field[self.class.id_field_name]
end
def last_update_time
timestamp_to_time(@field["lastUpdateTime"])
end
def last_update_time_str
# Only date is in the timestamp. (on iOS)
last_update_time.strftime("%F")
end
module ClassMethods
def define_accessors field_mappings
field_mappings.each do |name, key|
raise "Accessor #{name} already exists in #{self.name}." if method_defined? name
define_method name do field[key] end
end
end
end
end
end
end
| module FeideeUtils
class Record
module Accessors
def poid
@field[self.class.id_field_name]
end
def last_update_time
timestamp_to_time(@field["lastUpdateTime"])
end
module ClassMethods
def define_accessors field_mappings
field_mappings.each do |name, key|
raise "Accessor #{name} already exists in #{self.name}." if method_defined? name
define_method name do field[key] end
end
end
end
end
end
end
|
Implement lesson_plan_item type for video | json.partial! 'course/lesson_plan/items/item.json.jbuilder', item: item
json.item_path course_videos_path(current_course)
json.description item.description
json.edit_path edit_course_video_path(current_course, item) if can?(:update, item)
json.delete_path course_video_path(current_course, item) if can?(:destroy, item)
type = current_component_host[:course_videos_component].try(:title) || t('components.video.name')
json.lesson_plan_item_type [type]
| |
Add unique index for lowe spree option value name and option type id | class AddUniqueIndexToSpreeOptionValueName < SpreeExtension::Migration[4.2]
def change
return if check_lower_index_exists?
attributes = Spree::OptionValue.unscoped.group(:name, :option_type_id).having('sum(1) > 1').pluck(:name,
:option_type_id)
instances = Spree::OptionValue.where(name: [nil, attributes[0]], option_type_id: attributes[1])
instances.find_each do |instance|
column_value = "#{instance.name} #{SecureRandom.urlsafe_base64(8).upcase.delete('/+=_-')[0, 8]}"
instance.update(name: column_value)
end
remove_index :spree_option_values, :name if index_exists?(:spree_option_values, %i[name option_type_id])
if supports_expression_index?
add_index :spree_option_values, 'lower(name), option_type_id', unique: true
else
add_index :spree_option_values, %i[name option_type_id], unique: true
end
end
private
def check_lower_index_exists?
if supports_expression_index?
ActiveRecord::Base.connection.indexes(:spree_option_values).any? do |struct|
struct.columns.eql?('lower((name)::text, option_type_id)')
end
else
index_exists?(:spree_option_values, %i[name option_type_id], unique: true)
end
end
end
| |
Create a factory for category repository items | FactoryGirl.define do
factory :category_hash, class: Hash do
sequence(:id) { |i| "category-#{i}" }
title { id.capitalize.sub(/-/, ' ') }
contents []
initialize_with do
Hash[attributes.map { |key, value| [key.to_s, value] }]
end
end
end
| |
Add reboot No need to process the response code, it's handled by Client | module Squall
class VirtualMachine < Client
URI_PREFIX = 'virtual_machines'
def list
servers = []
if get(URI_PREFIX)
@message.each { |res| servers.push(res['virtual_machine']) }
servers
else
false
end
end
def show(id)
get("#{URI_PREFIX}/#{id}") ? @message['virtual_machine'] : false
end
def edit(id, params = {})
valid = [ :primary_network_id,
:cpus,
:label,
:cpu_shares,
:template_id,
:swap_disk_size,
:memory,
:required_virtual_machine_build,
:hypervisor_id,
:required_ip_address_assignment,
:rate_limit,
:primary_disk_size,
:hostname,
:initial_root_password ]
valid_options!(valid, params)
put("#{URI_PREFIX}/#{id}", { :virtual_machine => params })
end
def create(params = {})
required = { :memory, :cpus, :label, :template_id, :hypervisor_id, :initial_root_password }
required_options!(required, params)
post(URI_PREFIX, { :virtual_machine => params })
@response.code == 201
end
def destroy(id)
delete("#{URI_PREFIX}/#{id}")
end
end
end
| module Squall
class VirtualMachine < Client
URI_PREFIX = 'virtual_machines'
def list
servers = []
if get(URI_PREFIX)
@message.each { |res| servers.push(res['virtual_machine']) }
servers
else
false
end
end
def show(id)
get("#{URI_PREFIX}/#{id}") ? @message['virtual_machine'] : false
end
def edit(id, params = {})
valid = [ :primary_network_id,
:cpus,
:label,
:cpu_shares,
:template_id,
:swap_disk_size,
:memory,
:required_virtual_machine_build,
:hypervisor_id,
:required_ip_address_assignment,
:rate_limit,
:primary_disk_size,
:hostname,
:initial_root_password ]
valid_options!(valid, params)
put("#{URI_PREFIX}/#{id}", { :virtual_machine => params })
end
def create(params = {})
required = { :memory, :cpus, :label, :template_id, :hypervisor_id, :initial_root_password }
required_options!(required, params)
post(URI_PREFIX, { :virtual_machine => params })
end
def destroy(id)
delete("#{URI_PREFIX}/#{id}")
end
def reboot(id)
post("#{URI_PREFIX}/#{id}/reboot")
end
end
end
|
Allow further options in Passage struct. | # - encoding: utf-8 -
require 'scripref/parser'
require 'scripref/processor'
require 'scripref/english'
require 'scripref/german'
module Scripref
VERSION = '0.2.0'
Passage = Struct.new(:text, :b1, :c1, :v1, :b2, :c2, :v2)
class Passage
alias to_s text
end
end
| # - encoding: utf-8 -
require 'scripref/parser'
require 'scripref/processor'
require 'scripref/english'
require 'scripref/german'
module Scripref
VERSION = '0.2.0'
Passage = Struct.new(:text, :b1, :c1, :v1, :b2, :c2, :v2, :a1, :a2) do
def initialize text, b1, c1, v1, b2, c2, v2, opts={}
super text, b1, c1, v1, b2, c2, v2, opts[:a1], opts[:a2]
end
alias to_s text
end
end
|
Establish a heartbeat sender using redis lock | redlock = Redlock::Client.new([$redis])
Thread.new do
loop do
redlock.lock('master', 45100) do |locked|
if locked
counter = 1
45.times do
$redis.publish 'heartbeat', counter.to_json unless $redis.nil?
counter += 1
sleep 1
end
else
sleep 10000
end
end
end
end
| |
Fix a replacement for an attachment | attachment_data_to_modify = AttachmentData.find(787481)
# 845347 isn't the replacement, but use it to find it. This avoids the
# situation where the replacement changes before this code runs
replacement = AttachmentData.find(845347).replaced_by
attachment_data_to_modify.replace_with!(replacement)
AssetManager::AttachmentUpdater.call(attachment_data_to_modify, replacement_id: true)
| |
Add option for other subcommands and ENV fallbacks | use structopt::StructOpt;
/// The CaSILE command line interface, publishing automation level wizard.
#[derive(StructOpt)]
struct Cli {
// #[structopt(short, long)]
// version: [ version = env!("VERGEN_SEMVER") ],
/// Activate debug mode
#[structopt(short, long)]
debug: bool,
/// Outputs verbose feedback where possible
#[structopt(short, long)]
verbose: bool,
#[structopt(subcommand)]
command: Command,
}
#[derive(StructOpt)]
enum Command {
/// Executes a make target
Make {
target: String
}
}
fn main() {
let args = Cli::from_args();
println!("Insert magic potion!");
println!("Build SHA: {}", env!("VERGEN_SHA_SHORT"));
}
| use structopt::StructOpt;
/// The CaSILE command line interface, publishing automation level wizard.
#[derive(StructOpt)]
struct Cli {
// #[structopt(short, long)]
// version: [ version = env!("VERGEN_SEMVER") ],
/// Activate debug mode
#[structopt(short, long, env = "DEBUG")]
debug: bool,
/// Outputs verbose feedback where possible
#[structopt(short, long)]
verbose: bool,
#[structopt(subcommand)]
command: Command,
}
#[derive(StructOpt)]
enum Command {
/// Executes a make target
Make {
target: String
},
/// Pass through other commands to shell
#[structopt(external_subcommand)]
Other(Vec<String>),
}
fn main() {
let args = Cli::from_args();
println!("Insert magic potion!");
println!("Build SHA: {}", env!("VERGEN_SHA_SHORT"));
}
|
Add MetricHolder::new() and use LinkedList | extern crate rand;
const N: usize = 3;
struct MetricHolder {
// TODO: make N a part of the metric holder and use a vector
// also improve by persisting the sum instead of doing avg every time
history: [u32; N],
counter: usize
}
impl MetricHolder {
fn moving_average(&mut self, new_val: u32) -> f64 {
self.history[self.counter] = new_val;
self.counter = (self.counter + 1) % N;
let mut sum: u32 = 0;
let mut count: u32 = 0;
for &el in self.history.iter() {
sum += el;
if el != 0 {
count += 1;
}
}
sum as f64 / count as f64
}
}
fn main() {
// TODO: implement default
let mut metric_holder = MetricHolder {
history: [0; N],
counter: 0
};
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(2));
println!("{}", metric_holder.moving_average(3));
}
| use std::collections::LinkedList;
struct MetricHolder {
n: usize,
sum: u32,
history: LinkedList<u32>
}
impl MetricHolder {
fn moving_average(&mut self, new_val: u32) -> f64 {
self.history.push_back(new_val);
self.sum += new_val;
if self.history.len() > self.n {
let old_val = match self.history.pop_front() {
Some(val) => val,
None => 0,
};
self.sum -= old_val;
}
self.sum as f64 / self.history.len() as f64
}
fn new(n: usize) -> MetricHolder {
MetricHolder {
n: n,
sum: 0,
history: LinkedList::new()
}
}
}
fn main() {
let mut metric_holder = MetricHolder::new(2);
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(1));
println!("{}", metric_holder.moving_average(2));
println!("{}", metric_holder.moving_average(3));
}
|
Fix Bug in PNG Encoding | use std::marker::PhantomData;
#[derive(Copy, Clone)]
pub enum ColorType {
RGBA(u8),
}
pub struct ImageBuffer<T, U = ()>(U, PhantomData<T>);
pub struct Rgba<T>(PhantomData<T>);
pub mod png {
use super::ColorType;
use super::super::png::Encoder;
use std::io::Write;
pub struct PNGEncoder<W>(W);
impl<W: Write> PNGEncoder<W> {
pub fn new(w: W) -> Self {
PNGEncoder(w)
}
pub fn encode(&mut self, d: &[u8], w: u32, h: u32, _: ColorType) -> Result<(), ()> {
Encoder::new(&mut self.0, w, h)
.write_header()
.map_err(|_| ())?
.write_image_data(d)
.map_err(|_| ())
}
}
}
impl<T, U> ImageBuffer<T, U> {
pub fn from_raw(_: u32, _: u32, d: U) -> Option<ImageBuffer<Rgba<u8>, U>> {
Some(ImageBuffer(d, PhantomData))
}
pub fn as_ref(&self) -> &[u8]
where
U: AsRef<[u8]>,
{
self.0.as_ref()
}
}
| use std::marker::PhantomData;
#[derive(Copy, Clone)]
pub enum ColorType {
RGBA(u8),
}
pub struct ImageBuffer<T, U = ()>(U, PhantomData<T>);
pub struct Rgba<T>(PhantomData<T>);
pub mod png {
use super::ColorType;
use super::super::png::{self as pnglib, HasParameters};
use std::io::Write;
pub struct PNGEncoder<W>(W);
impl<W: Write> PNGEncoder<W> {
pub fn new(w: W) -> Self {
PNGEncoder(w)
}
pub fn encode(&mut self, d: &[u8], w: u32, h: u32, _: ColorType) -> Result<(), ()> {
let mut encoder = pnglib::Encoder::new(&mut self.0, w, h);
encoder
.set(pnglib::ColorType::RGBA)
.set(pnglib::BitDepth::Eight);
encoder
.write_header()
.map_err(|_| ())?
.write_image_data(d)
.map_err(|_| ())
}
}
}
impl<T, U> ImageBuffer<T, U> {
pub fn from_raw(_: u32, _: u32, d: U) -> Option<ImageBuffer<Rgba<u8>, U>> {
Some(ImageBuffer(d, PhantomData))
}
pub fn as_ref(&self) -> &[u8]
where
U: AsRef<[u8]>,
{
self.0.as_ref()
}
}
|
Make possible to load config file | extern crate toml;
use config::Config;
pub struct Workflow {
config: Config,
}
pub fn new() -> Workflow {
let config = load_config();
Workflow {
config: config,
}
}
fn load_config() -> Result<Config, String> {
let mut config_file: String = env::home_dir().unwrap();
config_file.push(".jiraconfig");
match toml::from_str(&config_file) {
Ok(c) => c,
Err(_) => Err("Failed load config file")
}
} | extern crate toml;
use config::Config;
use std::env;
use std::fs::File;
use std::io::Read;
pub struct Workflow {
config: Config,
}
pub fn new() -> Workflow {
let config = load_config().unwrap();
Workflow {
config: config,
}
}
fn load_config() -> Result<Config, String> {
let mut config_file = env::home_dir().unwrap();
config_file.push(".jiraconfig");
let mut file = File::open(&config_file).expect("open config file");
let mut toml_string = String::new();
file.read_to_string(&mut toml_string).unwrap();
match toml::from_str(&toml_string) {
Ok(c) => c,
Err(_) => Err("Failed load config file".to_string())
}
} |
Fix comment referring to formerly-above code | use crate::rc::Rc;
use crate::sync::Arc;
use core::panic::{RefUnwindSafe, UnwindSafe};
// not covered via the Shared impl above b/c the inner contents use
// Cell/AtomicUsize, but the usage here is unwind safe so we can lift the
// impl up one level to Arc/Rc itself
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Arc<T> {}
| use crate::rc::Rc;
use crate::sync::Arc;
use core::panic::{RefUnwindSafe, UnwindSafe};
// not covered via the Shared impl in libcore b/c the inner contents use
// Cell/AtomicUsize, but the usage here is unwind safe so we can lift the
// impl up one level to Arc/Rc itself
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
#[stable(feature = "catch_unwind", since = "1.9.0")]
impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Arc<T> {}
|
Rename the Scene constructor to Scene::new | // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use color::Color;
use geom::size::Size2D;
use geom::matrix::Matrix4;
use layers::Layer;
use std::rc::Rc;
pub struct Scene<T> {
pub root: Option<Rc<Layer<T>>>,
pub size: Size2D<f32>,
pub transform: Matrix4<f32>,
pub background_color: Color
}
pub fn Scene<T>(size: Size2D<f32>, transform: Matrix4<f32>) -> Scene<T> {
Scene {
root: None,
size: size,
transform: transform,
background_color: Color {
r: 0.38f32,
g: 0.36f32,
b: 0.36f32,
a: 1.0f32
}
}
}
| // Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use color::Color;
use geom::size::Size2D;
use geom::matrix::Matrix4;
use layers::Layer;
use std::rc::Rc;
pub struct Scene<T> {
pub root: Option<Rc<Layer<T>>>,
pub size: Size2D<f32>,
pub transform: Matrix4<f32>,
pub background_color: Color
}
impl<T> Scene<T> {
pub fn new(size: Size2D<f32>, transform: Matrix4<f32>) -> Scene<T> {
Scene {
root: None,
size: size,
transform: transform,
background_color: Color {
r: 0.38f32,
g: 0.36f32,
b: 0.36f32,
a: 1.0f32
}
}
}
}
|
Add some more doc on fault handling issues and lack of work stealing | //! The distrib module provides methods for executing the rendering in a
//! distributed environment across multiple nodes. The worker module provides
//! the Worker which does the actual job of rendering a subsection of the image.
//! The master module provides the Master which instructs the Workers and collects
//! their results.
//!
//! **Note:** At this time I do nothing for distributed fault handling.
use bincode::rustc_serialize::encoded_size;
pub use self::worker::Worker;
pub use self::master::Master;
pub mod worker;
pub mod master;
#[derive(Debug, Clone, RustcEncodable, RustcDecodable)]
struct Instructions {
pub encoded_size: u64,
pub scene: String,
pub frames: (usize, usize),
pub block_start: usize,
pub block_count: usize,
}
impl Instructions {
pub fn new(scene: &String, frames: (usize, usize), block_start: usize,
block_count: usize) -> Instructions {
let mut instr = Instructions { encoded_size: 0, scene: scene.clone(), frames: frames,
block_start: block_start, block_count: block_count };
instr.encoded_size = encoded_size(&instr);
instr
}
}
| //! The distrib module provides methods for executing the rendering in a
//! distributed environment across multiple nodes. The worker module provides
//! the Worker which does the actual job of rendering a subsection of the image.
//! The master module provides the Master which instructs the Workers and collects
//! their results.
//!
//! **Note:** At this time I do nothing for distributed fault handling or work
//! stealing. If a node crashes during rendering it's results will be lost and the
//! master will hang forever waiting to hear back from the crashed node.
use bincode::rustc_serialize::encoded_size;
pub use self::worker::Worker;
pub use self::master::Master;
pub mod worker;
pub mod master;
#[derive(Debug, Clone, RustcEncodable, RustcDecodable)]
struct Instructions {
pub encoded_size: u64,
pub scene: String,
pub frames: (usize, usize),
pub block_start: usize,
pub block_count: usize,
}
impl Instructions {
pub fn new(scene: &String, frames: (usize, usize), block_start: usize,
block_count: usize) -> Instructions {
let mut instr = Instructions { encoded_size: 0, scene: scene.clone(), frames: frames,
block_start: block_start, block_count: block_count };
instr.encoded_size = encoded_size(&instr);
instr
}
}
|
Test function has been renamed | #[macro_use] extern crate match_any;
use std::any::Any;
fn make_any<T: Any>(value: T) -> Box<Any> {
Box::new(value)
}
#[test]
fn empty() {
match_any!(make_any(10) =>
x: i32 => {
assert_eq!(*x, 10);
}
);
}
// TODO: Add.
// #[bench]
| #[macro_use] extern crate match_any;
use std::any::Any;
fn make_any<T: Any>(value: T) -> Box<Any> {
Box::new(value)
}
#[test]
fn match_i32() {
match_any!(make_any(10) =>
x: i32 => {
assert_eq!(*x, 10);
}
);
}
// TODO: Add.
// #[bench]
|
Use the s-expression printing in the REPL | use cell_gc;
use parser::parse;
use std::io::{self, Write};
use vm;
pub fn repl() -> io::Result<()> {
cell_gc::with_heap(|hs| {
let env = vm::Value::default_env(hs);
loop {
{
let stdout = io::stdout();
let mut stdout = stdout.lock();
write!(&mut stdout, "lisp> ")?;
stdout.flush()?;
}
// Read
let mut source = String::new();
io::stdin().read_line(&mut source)?;
let expr = parse(hs, &source)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
// Eval
let result = vm::eval(hs, expr, &env);
// Print
println!("{:?}", result);
// Loop...
}
// Unreachable...
})
}
| use cell_gc;
use parser::parse;
use print::print;
use std::io::{self, Write};
use vm;
pub fn repl() -> io::Result<()> {
cell_gc::with_heap(|hs| {
let env = vm::Value::default_env(hs);
loop {
{
let stdout = io::stdout();
let mut stdout = stdout.lock();
write!(&mut stdout, "lisp> ")?;
stdout.flush()?;
}
// Read
let mut source = String::new();
io::stdin().read_line(&mut source)?;
let expr = parse(hs, &source)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
// Eval
let result = vm::eval(hs, expr, &env);
// Print
print(val);
println!();
// Loop...
}
// Unreachable...
})
}
|
Remove print on game exit | extern crate json;
extern crate tiled;
extern crate cgmath;
#[macro_use]
extern crate gfx;
extern crate specs;
extern crate genmesh;
extern crate gfx_device_gl;
extern crate gfx_window_glutin;
extern crate glutin;
extern crate image;
extern crate rodio;
mod gfx_app;
mod game;
mod data;
mod critter;
mod graphics;
mod terrain;
mod character;
mod shaders;
mod zombie;
fn main() {
let mut window = gfx_app::GlutinWindow::new();
loop {
match gfx_app::init::run(&mut window) {
gfx_app::GameStatus::Quit => {
println!("Game was quit");
break;
}
gfx_app::GameStatus::Render => (),
}
}
}
| extern crate json;
extern crate tiled;
extern crate cgmath;
#[macro_use]
extern crate gfx;
extern crate specs;
extern crate genmesh;
extern crate gfx_device_gl;
extern crate gfx_window_glutin;
extern crate glutin;
extern crate image;
extern crate rodio;
mod gfx_app;
mod game;
mod data;
mod critter;
mod graphics;
mod terrain;
mod character;
mod shaders;
mod zombie;
fn main() {
let mut window = gfx_app::GlutinWindow::new();
loop {
match gfx_app::init::run(&mut window) {
gfx_app::GameStatus::Quit => break,
gfx_app::GameStatus::Render => (),
}
}
}
|
Set Content-Type header to application/json | extern crate iron;
extern crate rustc_serialize;
use iron::prelude::*;
use iron::status;
use diesel::prelude::*;
use rustc_serialize::json;
use database;
pub fn index(request: &mut Request) -> IronResult<Response> {
println!("Started GET \"/api\" for {}", request.remote_addr);
Ok(Response::with((status::Ok, "Welcome to Lugh API!")))
}
pub fn translations(request: &mut Request) -> IronResult<Response> {
use schema::translations::dsl::*;
use models::*;
println!("Started GET \"/api/translations\" for {}", request.remote_addr);
let connection = database::establish_connection();
let results = translations.load::<Translation>(&connection)
.expect("Error loading translations");
println!("Returns {} translations", results.len());
let payload = json::encode(&results).unwrap();
Ok(Response::with((status::Ok, payload)))
}
| extern crate iron;
extern crate rustc_serialize;
use iron::headers::ContentType;
use iron::prelude::*;
use iron::status;
use diesel::prelude::*;
use rustc_serialize::json;
use database;
pub fn index(request: &mut Request) -> IronResult<Response> {
println!("Started GET \"/api\" for {}", request.remote_addr);
Ok(Response::with((status::Ok, "Welcome to Lugh API!")))
}
pub fn translations(request: &mut Request) -> IronResult<Response> {
use schema::translations::dsl::*;
use models::*;
println!("Started GET \"/api/translations\" for {}", request.remote_addr);
let connection = database::establish_connection();
let results = translations.load::<Translation>(&connection)
.expect("Error loading translations");
println!("Returns {} translations", results.len());
let payload = json::encode(&results).unwrap();
Ok(Response::with((ContentType::json().0, status::Ok, payload)))
}
|
Move down |pub use| under |mod|. | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*!
# Autogard
*/
#![crate_name = "autograd"]
#![crate_type = "rlib"]
#![feature(thread_local)]
pub use context::Context;
pub use context::ContextImpl;
pub use float::Float;
mod context;
mod float;
#[cfg(test)]
mod tests;
| /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*!
# Autogard
*/
#![crate_name = "autograd"]
#![crate_type = "rlib"]
#![feature(thread_local)]
mod context;
mod float;
#[cfg(test)]
mod tests;
pub use context::Context;
pub use context::ContextImpl;
pub use float::Float;
|
Convert rb_str_len() result to i64 | use std::slice;
use ruby_sys::string;
use types::Value;
use util;
pub fn new(string: &str) -> Value {
let str = util::str_to_cstring(string);
unsafe { string::rb_str_new_cstr(str.as_ptr()) }
}
pub fn from_value(value: Value) -> String {
let str = unsafe { string::rb_string_value_cstr(&value) };
util::cstr_as_string(str)
}
pub fn from_value_unchecked(value: Value) -> String {
unsafe {
let str = string::rb_string_value_ptr(&value) as *const u8;
let len = string::rb_str_len(value) as usize;
let vec = slice::from_raw_parts(str, len).to_vec();
String::from_utf8_unchecked(vec)
}
}
pub fn bytesize(value: Value) -> i64 {
unsafe { string::rb_str_len(value) }
}
| use std::slice;
use ruby_sys::string;
use types::Value;
use util;
pub fn new(string: &str) -> Value {
let str = util::str_to_cstring(string);
unsafe { string::rb_str_new_cstr(str.as_ptr()) }
}
pub fn from_value(value: Value) -> String {
let str = unsafe { string::rb_string_value_cstr(&value) };
util::cstr_as_string(str)
}
pub fn from_value_unchecked(value: Value) -> String {
unsafe {
let str = string::rb_string_value_ptr(&value) as *const u8;
let len = string::rb_str_len(value) as usize;
let vec = slice::from_raw_parts(str, len).to_vec();
String::from_utf8_unchecked(vec)
}
}
pub fn bytesize(value: Value) -> i64 {
unsafe { string::rb_str_len(value) as i64 }
}
|
Modify the K20 example to check the systick flag | #![feature(phase)]
#![crate_type="staticlib"]
#![no_std]
extern crate core;
extern crate zinc;
use core::option::Some;
use zinc::hal::k20::pin;
use zinc::hal::pin::GPIO;
use zinc::hal::cortex_m4::systick;
/// Wait the given number of SysTick ticks
pub fn wait(ticks: u32) {
let mut n = ticks;
let mut t = systick::get_current();
loop {
if systick::get_current() > t {
t = systick::get_current();
n -= 1;
if n == 0 {
break;
}
}
}
}
#[no_mangle]
#[no_split_stack]
#[allow(unused_variable)]
#[allow(dead_code)]
pub unsafe fn main() {
zinc::hal::mem_init::init_stack();
zinc::hal::mem_init::init_data();
// Pins for MC HCK (http://www.mchck.org/)
let led1 = pin::Pin::new(pin::PortB, 16, pin::GPIO, Some(zinc::hal::pin::Out));
systick::setup(480000, false);
systick::enable();
loop {
led1.set_high();
wait(10);
led1.set_low();
wait(10);
}
}
| #![feature(phase)]
#![crate_type="staticlib"]
#![no_std]
extern crate core;
extern crate zinc;
use core::option::Some;
use zinc::hal::k20::pin;
use zinc::hal::pin::GPIO;
use zinc::hal::cortex_m4::systick;
/// Wait the given number of SysTick ticks
pub fn wait(ticks: u32) {
let mut n = ticks;
// Reset the tick flag
systick::tick();
loop {
if systick::tick() {
n -= 1;
if n == 0 {
break;
}
}
}
}
#[no_mangle]
#[no_split_stack]
#[allow(unused_variable)]
#[allow(dead_code)]
pub unsafe fn main() {
zinc::hal::mem_init::init_stack();
zinc::hal::mem_init::init_data();
// Pins for MC HCK (http://www.mchck.org/)
let led1 = pin::Pin::new(pin::PortB, 16, pin::GPIO, Some(zinc::hal::pin::Out));
systick::setup(480000, false);
systick::enable();
loop {
led1.set_high();
wait(10);
led1.set_low();
wait(10);
}
}
|
Fix for the latest nightly | //! Intrinsics for panic handling
use interrupt;
#[lang = "eh_personality"]
pub extern "C" fn eh_personality() {}
/// Required to handle panics
#[lang = "panic_fmt"]
#[no_mangle]
pub extern "C" fn rust_begin_unwind(fmt: ::core::fmt::Arguments, file: &str, line: u32) -> ! {
println!("PANIC: {}", fmt);
println!("FILE: {}", file);
println!("LINE: {}", line);
unsafe { interrupt::stack_trace(); }
println!("HALT");
loop {
unsafe { interrupt::halt(); }
}
}
#[allow(non_snake_case)]
#[no_mangle]
/// Required to handle panics
pub extern "C" fn _Unwind_Resume() -> ! {
loop {
unsafe { interrupt::halt(); }
}
}
| //! Intrinsics for panic handling
use interrupt;
#[lang = "eh_personality"]
#[no_mangle]
pub extern "C" fn rust_eh_personality() {}
/// Required to handle panics
#[lang = "panic_fmt"]
#[no_mangle]
pub extern "C" fn rust_begin_unwind(fmt: ::core::fmt::Arguments, file: &str, line: u32) -> ! {
println!("PANIC: {}", fmt);
println!("FILE: {}", file);
println!("LINE: {}", line);
unsafe { interrupt::stack_trace(); }
println!("HALT");
loop {
unsafe { interrupt::halt(); }
}
}
#[allow(non_snake_case)]
#[no_mangle]
/// Required to handle panics
pub extern "C" fn _Unwind_Resume() -> ! {
loop {
unsafe { interrupt::halt(); }
}
}
|
Document !Send and !Sync for Context | use ::handle::Handle;
/// A libudev context.
pub struct Context {
udev: *mut ::ffi::udev,
}
impl Clone for Context {
/// Increments reference count of `libudev` context.
fn clone(&self) -> Self {
Context {
udev: unsafe { ::ffi::udev_ref(self.udev) },
}
}
}
impl Drop for Context {
/// Decrements reference count of `libudev` context.
fn drop(&mut self) {
unsafe {
::ffi::udev_unref(self.udev);
}
}
}
#[doc(hidden)]
impl Handle<::ffi::udev> for Context {
fn as_ptr(&self) -> *mut ::ffi::udev {
self.udev
}
}
impl Context {
/// Creates a new context.
pub fn new() -> ::Result<Self> {
Ok(Context {
udev: try_alloc!(unsafe { ::ffi::udev_new() }),
})
}
}
| use ::handle::Handle;
/// A libudev context. Contexts may not be sent or shared between threads. The `libudev(3)` manpage
/// says:
///
/// > All functions require a libudev context to operate. This context can be create via
/// > udev_new(3). It is used to track library state and link objects together. No global state is
/// > used by libudev, everything is always linked to a udev context. Furthermore, multiple
/// > different udev contexts can be used in parallel by multiple threads. However, a single
/// > context must not be accessed by multiple threads in parallel.
///
/// In Rust, that means that `Context` is `!Send` and `!Sync`. This means a `Context` must be
/// created in the thread where it will be used. Several contexts can exist in separate threads,
/// but they can not be sent between threads.
///
/// Other types in this library (`Device`, `Enumerator`, `Monitor`, etc.) share a reference to a
/// context, which means that these types must also be `!Send` and `!Sync`.
pub struct Context {
udev: *mut ::ffi::udev,
}
impl Clone for Context {
/// Increments reference count of `libudev` context.
fn clone(&self) -> Self {
Context {
udev: unsafe { ::ffi::udev_ref(self.udev) },
}
}
}
impl Drop for Context {
/// Decrements reference count of `libudev` context.
fn drop(&mut self) {
unsafe {
::ffi::udev_unref(self.udev);
}
}
}
#[doc(hidden)]
impl Handle<::ffi::udev> for Context {
fn as_ptr(&self) -> *mut ::ffi::udev {
self.udev
}
}
impl Context {
/// Creates a new context.
pub fn new() -> ::Result<Self> {
Ok(Context {
udev: try_alloc!(unsafe { ::ffi::udev_new() }),
})
}
}
|
Add test for optional backtrace | #![cfg(thiserror_nightly_testing)]
#![feature(backtrace)]
use std::backtrace::Backtrace;
use thiserror::Error;
#[derive(Error, Debug)]
#[error("...")]
pub struct OptSourceNoBacktraceStruct {
#[source]
source: Option<anyhow::Error>,
}
#[derive(Error, Debug)]
#[error("...")]
pub struct OptSourceAlwaysBacktraceStruct {
#[source]
source: Option<anyhow::Error>,
backtrace: Backtrace,
}
#[derive(Error, Debug)]
pub enum OptSourceNoBacktraceEnum {
#[error("...")]
Test {
#[source]
source: Option<anyhow::Error>,
},
}
#[derive(Error, Debug)]
pub enum OptSourceAlwaysBacktraceEnum {
#[error("...")]
Test {
#[source]
source: Option<anyhow::Error>,
backtrace: Backtrace,
},
}
| #![cfg(thiserror_nightly_testing)]
#![feature(backtrace)]
use std::backtrace::Backtrace;
use thiserror::Error;
pub mod structs {
use super::*;
#[derive(Error, Debug)]
#[error("...")]
pub struct OptSourceNoBacktrace {
#[source]
source: Option<anyhow::Error>,
}
#[derive(Error, Debug)]
#[error("...")]
pub struct OptSourceAlwaysBacktrace {
#[source]
source: Option<anyhow::Error>,
backtrace: Backtrace,
}
#[derive(Error, Debug)]
#[error("...")]
pub struct NoSourceOptBacktrace {
#[backtrace]
backtrace: Option<Backtrace>,
}
#[derive(Error, Debug)]
#[error("...")]
pub struct AlwaysSourceOptBacktrace {
source: anyhow::Error,
#[backtrace]
backtrace: Option<Backtrace>,
}
#[derive(Error, Debug)]
#[error("...")]
pub struct OptSourceOptBacktrace {
#[source]
source: Option<anyhow::Error>,
#[backtrace]
backtrace: Option<Backtrace>,
}
}
pub mod enums {
use super::*;
#[derive(Error, Debug)]
pub enum OptSourceNoBacktrace {
#[error("...")]
Test {
#[source]
source: Option<anyhow::Error>,
},
}
#[derive(Error, Debug)]
pub enum OptSourceAlwaysBacktrace {
#[error("...")]
Test {
#[source]
source: Option<anyhow::Error>,
backtrace: Backtrace,
},
}
#[derive(Error, Debug)]
pub enum NoSourceOptBacktrace {
#[error("...")]
Test {
#[backtrace]
backtrace: Option<Backtrace>,
},
}
#[derive(Error, Debug)]
pub enum AlwaysSourceOptBacktrace {
#[error("...")]
Test {
source: anyhow::Error,
#[backtrace]
backtrace: Option<Backtrace>,
},
}
#[derive(Error, Debug)]
pub enum OptSourceOptBacktrace {
#[error("...")]
Test {
#[source]
source: Option<anyhow::Error>,
#[backtrace]
backtrace: Option<Backtrace>,
},
}
}
|
Split line to fix tidy | #![feature(re_rebalance_coherence)]
// compile-flags:--crate-name=test
// aux-build:coherence_lib.rs
extern crate coherence_lib as lib;
use lib::*;
use std::rc::Rc;
struct Local;
impl Remote1<u32> for f64 {
//~^ ERROR only traits defined in the current crate can be implemented for arbitrary types [E0117]
}
fn main() {}
| #![feature(re_rebalance_coherence)]
// compile-flags:--crate-name=test
// aux-build:coherence_lib.rs
extern crate coherence_lib as lib;
use lib::*;
use std::rc::Rc;
struct Local;
impl Remote1<u32> for f64 {
//~^ ERROR only traits defined in the current crate
// | can be implemented for arbitrary types [E0117]
}
fn main() {}
|
Move tests into separate module | #![feature(unsafe_destructor, if_let, globs)]
extern crate libc;
pub use udev::udev::Udev;
mod udev;
pub mod hwdb {
pub use udev::hwdb::{
Hwdb,
Query,
HwdbIterator,
};
}
pub mod device {
pub use udev::device::{
Device,
Devnum,
Type,
TagIterator,
AttributeIterator,
DevlinkIterator,
PropertyIterator,
};
}
pub mod enumerator {
pub use udev::enumerator::{
Enumerator,
DeviceIterator,
DevicePathIterator,
};
}
pub mod monitor {
pub use udev::monitor::{
Monitor,
Event,
Action,
MonitorIterator,
};
}
#[test]
fn test_ttys() {
let udev = Udev::new();
let mut vec = Vec::with_capacity(64);
let mut found_tty = false;
for dev in udev.enumerator().match_subsystem("tty").scan_devices().iter() {
assert!(dev.subsystem().unwrap() == "tty");
if dev.sysname().starts_with("tty") {
match dev.sysnum() {
Some(num) => vec.push(num),
None => {
assert!(!found_tty);
found_tty = true;
}
}
}
}
vec.sort();
assert!(vec.into_iter().zip(range(0u64, 64u64)).all(|(i, j)| i == j));
}
| #![feature(unsafe_destructor, if_let, globs)]
extern crate libc;
pub use udev::udev::Udev;
mod udev;
pub mod hwdb {
pub use udev::hwdb::{
Hwdb,
Query,
HwdbIterator,
};
}
pub mod device {
pub use udev::device::{
Device,
Devnum,
Type,
TagIterator,
AttributeIterator,
DevlinkIterator,
PropertyIterator,
};
}
pub mod enumerator {
pub use udev::enumerator::{
Enumerator,
DeviceIterator,
DevicePathIterator,
};
}
pub mod monitor {
pub use udev::monitor::{
Monitor,
Event,
Action,
MonitorIterator,
};
}
#[cfg(test)]
mod test {
use Udev;
#[test]
fn test_ttys() {
let udev = Udev::new();
let mut vec = Vec::with_capacity(64);
let mut found_tty = false;
for dev in udev.enumerator().match_subsystem("tty").scan_devices().iter() {
assert!(dev.subsystem().unwrap() == "tty");
if dev.sysname().starts_with("tty") {
match dev.sysnum() {
Some(num) => vec.push(num),
None => {
assert!(!found_tty);
found_tty = true;
}
}
}
}
vec.sort();
assert!(vec.into_iter().zip(range(0u64, 64u64)).all(|(i, j)| i == j));
}
}
|
Use `compile_error!` instead of `use num_cpus` | #[cfg(doctest)]
use num_cpus as _;
#[cfg(test)]
compile_error!("Miri should not touch me");
| #[cfg(doctest)]
compile_error!("rustdoc should not touch me");
#[cfg(test)]
compile_error!("Miri should not touch me");
|
Fix typo ('commited', needs 2nd 't') | // Copyright 2015, Igor Shaula
// Licensed under the MIT License <LICENSE or
// http://opensource.org/licenses/MIT>. This file
// may not be copied, modified, or distributed
// except according to those terms.
extern crate winreg;
use std::io;
use winreg::RegKey;
use winreg::enums::*;
use winreg::transaction::Transaction;
fn main() {
let t = Transaction::new().unwrap();
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
let key = hkcu.create_subkey_transacted("Software\\RustTransaction", &t).unwrap();
key.set_value("TestQWORD", &1234567891011121314u64).unwrap();
key.set_value("TestDWORD", &1234567890u32).unwrap();
println!("Commit transaction? [y/N]:");
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
input = input.trim_right().to_owned();
if input == "y" || input == "Y" {
t.commit().unwrap();
println!("Transaction commited.");
}
else {
// this is optional, if transaction wasn't commited,
// it will be rolled back on disposal
t.rollback().unwrap();
println!("Transaction wasn't commited, it will be rolled back.");
}
}
| // Copyright 2015, Igor Shaula
// Licensed under the MIT License <LICENSE or
// http://opensource.org/licenses/MIT>. This file
// may not be copied, modified, or distributed
// except according to those terms.
extern crate winreg;
use std::io;
use winreg::RegKey;
use winreg::enums::*;
use winreg::transaction::Transaction;
fn main() {
let t = Transaction::new().unwrap();
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
let key = hkcu.create_subkey_transacted("Software\\RustTransaction", &t).unwrap();
key.set_value("TestQWORD", &1234567891011121314u64).unwrap();
key.set_value("TestDWORD", &1234567890u32).unwrap();
println!("Commit transaction? [y/N]:");
let mut input = String::new();
io::stdin().read_line(&mut input).unwrap();
input = input.trim_right().to_owned();
if input == "y" || input == "Y" {
t.commit().unwrap();
println!("Transaction committed.");
}
else {
// this is optional, if transaction wasn't committed,
// it will be rolled back on disposal
t.rollback().unwrap();
println!("Transaction wasn't committed, it will be rolled back.");
}
}
|
Enable static linking support (similar to openssl-sys) | fn main () {
println!("cargo:rustc-flags=-l sodium");
}
| use std::env;
fn main () {
if let Some(lib_dir) = env::var("SODIUM_LIB_DIR").ok() {
println!("cargo:rustc-flags=-L native={}", lib_dir);
}
let mode = if env::var_os("SODIUM_STATIC").is_some() {
"static"
} else {
"dylib"
};
println!("cargo:rustc-flags=-l {0}=sodium", mode);
}
|
Use ast.generics.split_for_impl() to correctly generate impl with any generic parameters | #![feature(proc_macro)]
#![feature(proc_macro_lib)]
#![recursion_limit = "128"]
#[macro_use] extern crate quote;
extern crate proc_macro;
extern crate syn;
use proc_macro::TokenStream;
// Yield mock generated code for template
// Hello, {{name}} ({{age}})
#[proc_macro_derive(StacheDisplay)]
pub fn stache_display(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_macro_input(&s).unwrap();
let type_name = &ast.ident;
let generics = &ast.generics;
let gen = quote! {
impl #generics std::fmt::Display for #type_name #generics {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use display_html_safe::DisplayHtmlSafe;
f.write_str("Hello, ")?;
DisplayHtmlSafe::fmt(&self.name, f)?;
f.write_str(" (")?;
DisplayHtmlSafe::fmt(&self.age, f)?;
f.write_str(")\n")?;
Ok(())
}
}
};
gen.parse().unwrap()
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
| #![feature(proc_macro)]
#![feature(proc_macro_lib)]
#![recursion_limit = "128"]
#[macro_use] extern crate quote;
extern crate proc_macro;
extern crate syn;
use proc_macro::TokenStream;
// Yield mock generated code for template
// Hello, {{name}} ({{age}})
#[proc_macro_derive(StacheDisplay)]
pub fn stache_display(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_macro_input(&s).unwrap();
let name = &ast.ident;
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
let gen = quote! {
impl #impl_generics std::fmt::Display for #name #ty_generics #where_clause {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use display_html_safe::DisplayHtmlSafe;
f.write_str("Hello, ")?;
DisplayHtmlSafe::fmt(&self.name, f)?;
f.write_str(" (")?;
DisplayHtmlSafe::fmt(&self.age, f)?;
f.write_str(")\n")?;
Ok(())
}
}
};
gen.parse().unwrap()
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
|
Add extra compile fail error | #[macro_use]
extern crate diesel;
use diesel::*;
table! {
int_primary_key {
id -> Serial,
}
}
table! {
string_primary_key {
id -> VarChar,
}
}
fn main() {
let connection = Connection::establish("").unwrap();
let one = connection.find(int_primary_key::table, "1".to_string()).unwrap();
//~^ ERROR E0277
//~| ERROR E0277
let string = connection.find(string_primary_key::table, 1).unwrap();
//~^ ERROR E0277
//~| ERROR E0277
}
| #[macro_use]
extern crate diesel;
use diesel::*;
table! {
int_primary_key {
id -> Serial,
}
}
table! {
string_primary_key {
id -> VarChar,
}
}
fn main() {
let connection = Connection::establish("").unwrap();
let one = connection.find(int_primary_key::table, "1".to_string()).unwrap();
//~^ ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
let string = connection.find(string_primary_key::table, 1).unwrap();
//~^ ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
//~| ERROR E0277
}
|
Add regression test for macro expansion | // run-rustfix
#![warn(clippy::toplevel_ref_arg)]
#![allow(unused)]
fn main() {
// Closures should not warn
let y = |ref x| println!("{:?}", x);
y(1u8);
let ref x = 1;
let ref y: (&_, u8) = (&1, 2);
let ref z = 1 + 2;
let ref mut z = 1 + 2;
let (ref x, _) = (1, 2); // ok, not top level
println!("The answer is {}.", x);
// Make sure that allowing the lint works
#[allow(clippy::toplevel_ref_arg)]
let ref mut x = 1_234_543;
}
| // run-rustfix
#![warn(clippy::toplevel_ref_arg)]
#![allow(unused)]
fn main() {
// Closures should not warn
let y = |ref x| println!("{:?}", x);
y(1u8);
let ref x = 1;
let ref y: (&_, u8) = (&1, 2);
let ref z = 1 + 2;
let ref mut z = 1 + 2;
let (ref x, _) = (1, 2); // ok, not top level
println!("The answer is {}.", x);
let ref x = vec![1, 2, 3];
// Make sure that allowing the lint works
#[allow(clippy::toplevel_ref_arg)]
let ref mut x = 1_234_543;
}
|
Update IO references to use old_io module. | use std::io::{File, Open, ReadWrite};
use std::io::IoResult;
use super::GapBuffer;
use super::gap_buffer;
use super::Position;
use super::Range;
pub struct Buffer {
data: GapBuffer,
file: Option<File>,
}
pub fn from_file(path: &Path) -> IoResult<Buffer> {
// Try to open and read the file, returning any errors encountered.
let mut file = match File::open_mode(path, Open, ReadWrite) {
Ok(f) => f,
Err(error) => return Err(error),
};
let mut data = match file.read_to_string() {
Ok(d) => d,
Err(error) => return Err(error),
};
let data = gap_buffer::new(data);
// Create a new buffer using the loaded data, file, and other defaults.
Ok(Buffer{ data: data, file: Some(file) })
}
#[cfg(test)]
mod tests {
use super::from_file;
#[test]
fn from_file_loads_file_into_buffer() {
match from_file(&Path::new("tests/sample/file")) {
Ok(buffer) => assert_eq!(buffer.data.to_string(), "it works!\n"),
Err(error) => panic!(error),
}
}
}
| use std::old_io::{File, Open, ReadWrite};
use std::old_io::IoResult;
use super::GapBuffer;
use super::gap_buffer;
use super::Position;
use super::Range;
pub struct Buffer {
data: GapBuffer,
file: Option<File>,
}
pub fn from_file(path: &Path) -> IoResult<Buffer> {
// Try to open and read the file, returning any errors encountered.
let mut file = match File::open_mode(path, Open, ReadWrite) {
Ok(f) => f,
Err(error) => return Err(error),
};
let mut data = match file.read_to_string() {
Ok(d) => d,
Err(error) => return Err(error),
};
let data = gap_buffer::new(data);
// Create a new buffer using the loaded data, file, and other defaults.
Ok(Buffer{ data: data, file: Some(file) })
}
#[cfg(test)]
mod tests {
use super::from_file;
#[test]
fn from_file_loads_file_into_buffer() {
match from_file(&Path::new("tests/sample/file")) {
Ok(buffer) => assert_eq!(buffer.data.to_string(), "it works!\n"),
Err(error) => panic!(error),
}
}
}
|
Add basic loop to `Harness`. | use event::Reactor;
use render::{Context, GlutinContext, MetaContext};
pub struct Harness<A, C>
where A: Application,
C: MetaContext
{
application: A,
context: Context<C>,
}
impl<A> Harness<A, GlutinContext>
where A: Application
{
pub fn with_glutin_context(application: A) -> Self {
panic!()
}
}
pub trait Application {
fn reactors(&mut self) -> &mut [&mut Reactor];
}
| use glutin::Window;
use event::{Event, PollEvents, Reactor};
use render::{Context, GlutinContext, MetaContext};
pub struct Harness<A, C>
where A: Application<C>,
C: MetaContext
{
application: A,
context: Context<C>,
}
impl<A> Harness<A, GlutinContext>
where A: Application<GlutinContext>
{
pub fn from_glutin_window(application: A, window: Window) -> Self {
Harness {
application: application,
context: Context::from_glutin_window(window),
}
}
}
impl<A, C> Harness<A, C>
where A: Application<C>,
C: MetaContext
{
pub fn start(&mut self) {
'main: loop {
for event in self.context.window.poll_events() {
match event {
Event::Closed => {
break 'main;
}
_ => {}
}
self.application.react(&event);
}
self.context.clear();
self.application.render(&mut self.context);
self.context.flush().unwrap();
}
}
}
pub trait Application<C>: Reactor
where C: MetaContext
{
fn render(&mut self, context: &mut Context<C>);
}
|
Add (But not yet implement) the Minecraft protocol version declaration | //! Entry module for the Netherrack project
// We'll be using serialization a lot
extern crate rustc_serialize;
// Enable logging and use of logging macros throughout Netherrack
#[macro_use]
extern crate log;
// Allow use of Semantic Versioning througout Netherrack
extern crate semver;
// Also allow usage of Version through Netherrack
pub use semver::Version;
// Allow use of the core module
pub mod core;
// Allow access to I/O operations
pub mod io;
// Allow access to the universe
pub mod universe;
// Okay, this is the start of Netherrack's functions
/// The version of Netherrack as determined at compile time
pub const NETHERRACK_VERSION_STRING: &'static str = env!("CARGO_PKG_VERSION");
/// Gets the current version of Netherrack
pub fn get_version() -> Version {
Version::parse(NETHERRACK_VERSION_STRING).unwrap()
}
/// Starts a new Netherrack server instance
pub fn start_server() {
info!("Netherrack startup called");
trace!("Starting network in a new thread");
std::thread::spawn(move || {
io::network::start_network();
});
debug!("Networking should be set up");
loop {
//TODO: In the future, execute a tick. Use Duration's span function to get time taken, sleep the remainder, and go again
std::thread::sleep_ms(20);
}
} | //! Entry module for the Netherrack project
// We'll be using serialization a lot
extern crate rustc_serialize;
// Enable logging and use of logging macros throughout Netherrack
#[macro_use]
extern crate log;
// Allow use of Semantic Versioning througout Netherrack
extern crate semver;
// Also allow usage of Version through Netherrack
pub use semver::Version;
// Allow use of the core module
pub mod core;
// Allow access to I/O operations
pub mod io;
// Allow access to the universe
pub mod universe;
// Okay, this is the start of Netherrack's functions
/// The version of the Minecraft protocol accepted by Netherrack
pub const MINECRAFT_PROTOCOL_VERSION: u32 = 47;
/// The version of Netherrack as determined at compile time
pub const NETHERRACK_VERSION_STRING: &'static str = env!("CARGO_PKG_VERSION");
/// Gets the current version of Netherrack
pub fn get_version() -> Version {
Version::parse(NETHERRACK_VERSION_STRING).unwrap()
}
/// Starts a new Netherrack server instance
pub fn start_server() {
info!("Netherrack startup called");
trace!("Starting network in a new thread");
std::thread::spawn(move || {
io::network::start_network();
});
debug!("Networking should be set up");
loop {
//TODO: In the future, execute a tick. Use Duration's span function to get time taken, sleep the remainder, and go again
std::thread::sleep_ms(20);
}
} |
Make the error module public | //! A simple library for reading and writing Riegl .sdc files.
//!
//! .sdc files are simple binary tables of discrete-return LiDAR data.
extern crate byteorder;
mod error;
pub mod file;
pub mod point;
mod result;
pub use file::File;
| //! A simple library for reading and writing Riegl .sdc files.
//!
//! .sdc files are simple binary tables of discrete-return LiDAR data.
extern crate byteorder;
pub mod error;
pub mod file;
pub mod point;
mod result;
pub use file::File;
|
Add a TCP read timeout to network connection | use std::io::{BufReader, Write};
use std::net::{TcpListener, TcpStream};
use std::thread;
use birch::network::NetworkConnection;
use birch::socket::IRCReader;
fn main() {
// Hardcoding things for now just to test everything out.
let mut stream = TcpStream::connect("irc.freenode.net:6667").unwrap();
thread::spawn(move || {
let mut reader = BufReader::new(stream.try_clone().unwrap());
let mut net = NetworkConnection::new("ep`", &mut stream);
net.initialize().unwrap();
loop {
let msg = reader.read_message();
if let Ok(msg) = msg {
println!("[birch <- \u{1b}[37;1mnet\u{1b}[0m] {}", msg);
net.handle(&msg).expect("failed to handle message");
} else {
println!("read failed");
break;
}
}
});
// TODO: hook this up
let listener = TcpListener::bind("127.0.0.1:9123").unwrap();
println!("listening started, ready to accept");
for stream in listener.incoming() {
thread::spawn(|| {
let mut stream = stream.unwrap();
stream.write_all(b"Hello World\r\n").unwrap();
});
}
}
| use std::io::{BufReader, Write};
use std::net::{TcpListener, TcpStream};
use std::thread;
use std::time::Duration;
use birch::network::NetworkConnection;
use birch::socket::IRCReader;
fn main() {
// Hardcoding things for now just to test everything out.
let mut stream = TcpStream::connect("irc.freenode.net:6667").unwrap();
stream
.set_read_timeout(Some(Duration::from_secs(180)))
.unwrap();
thread::spawn(move || {
let mut reader = BufReader::new(stream.try_clone().unwrap());
let mut net = NetworkConnection::new("ep`", &mut stream);
net.initialize().unwrap();
loop {
let msg = reader.read_message();
if let Ok(msg) = msg {
println!("[birch <- \u{1b}[37;1mnet\u{1b}[0m] {}", msg);
net.handle(&msg).expect("failed to handle message");
} else {
println!("read failed");
break;
}
}
});
// TODO: hook this up
let listener = TcpListener::bind("127.0.0.1:9123").unwrap();
println!("listening started, ready to accept");
for stream in listener.incoming() {
thread::spawn(|| {
let mut stream = stream.unwrap();
stream.write_all(b"Hello World\r\n").unwrap();
});
}
}
|
Reimplement IsInDiary for StoreId with StoreId::is_in_collection() | use libimagstore::store::Entry;
use libimagstore::storeid::StoreId;
pub trait IsInDiary {
fn is_in_diary(&self, name: &str) -> bool;
}
impl IsInDiary for Entry {
fn is_in_diary(&self, name: &str) -> bool {
self.get_location().clone().is_in_diary(name)
}
}
impl IsInDiary for StoreId {
fn is_in_diary(&self, name: &str) -> bool {
self.to_str().map(|s| s.contains(name)).unwrap_or(false)
}
}
| use libimagstore::store::Entry;
use libimagstore::storeid::StoreId;
pub trait IsInDiary {
fn is_in_diary(&self, name: &str) -> bool;
}
impl IsInDiary for Entry {
fn is_in_diary(&self, name: &str) -> bool {
self.get_location().clone().is_in_diary(name)
}
}
impl IsInDiary for StoreId {
fn is_in_diary(&self, name: &str) -> bool {
self.is_in_collection(&["diary", name])
}
}
|
Add initialization of the console | use ::xen::StartInfo;
pub mod defs;
pub mod barrier;
pub mod memory;
extern {
// Start info is not present on all architecture, this is why this
// was made a global variable only for x86_*
pub static start_info: *const StartInfo;
}
pub fn init() {
unsafe {
memory::map_shared_info();
}
}
| use ::xen::StartInfo;
use ::xen::console::console;
use ::xen::console::ConsoleInterface;
pub mod defs;
pub mod barrier;
pub mod memory;
extern {
// Start info is not present on all architecture, this is why this
// was made a global variable only for x86_*
pub static start_info: *const StartInfo;
}
pub fn init() {
unsafe {
let console_vaddr: memory::Vaddr;
memory::map_shared_info();
console_vaddr = memory::mfn_to_vaddr((*start_info).domu_console.mfn);
console().set_port((*start_info).domu_console.evtchn);
console().set_interface(console_vaddr as *mut ConsoleInterface);
}
}
|
Fix format trait errors (breaking change) | fn _12_1(){
println!("guide 12-1");
let a = [1i, 2i, 3i];
let mut m = [2i, 3i, 4i];
let b = [0i, ..20]; // shorthand for array of 20 elements all initialized to 0
println!("{}", b);
m = [5i, 6i, 7i];
println!("{}", m);
for i in m.iter() {
println!("elem {}", i);
}
let names = ["Emilija", "Anzelika"];
println!("{} -> {}", names[1], names[0]);
}
fn _12_2(){
println!("guide 12-2");
let mut v = vec![1i, 2, 3];
v.push(4);
println!("{}, len is {}", v, v.len());
}
fn _12_3(){
println!("guide 12-3");
let mut a = vec![0i, 1, 2, 3, 4];
let middle = a.as_mut_slice();
middle[0] = 10i;
for e in middle.iter() {
println!("{}", e);
}
}
fn main(){
println!("guide 12");
_12_1();
_12_2();
_12_3();
}
| fn _12_1(){
println!("guide 12-1");
let a = [1i32, 2i32, 3i32];
let mut m = [2i32, 3i32, 4i32];
if false {
println!("{:?} {:?}", a, m);
}
let b = [0i32; 20]; // shorthand for array of 20 elements all initialized to 0
println!("{:?}", b);
m = [5i32, 6i32, 7i32];
println!("{:?}", m);
for i in m.iter() {
println!("elem {}", i);
}
let names = ["Emilija", "Anzelika"];
println!("{} -> {}", names[1], names[0]);
}
fn _12_2(){
println!("guide 12-2");
let mut v = vec![1i32, 2, 3];
v.push(4);
println!("{:?}, len is {}", v, v.len());
}
fn _12_3(){
println!("guide 12-3");
let mut a = vec![0i32, 1, 2, 3, 4];
let middle = a.as_mut_slice();
middle[0] = 10i32;
for e in middle.iter() {
println!("{}", e);
}
}
fn main(){
println!("guide 12");
_12_1();
_12_2();
_12_3();
}
|
Fix use of the new function signatures | #![feature(plugin)]
#![plugin(rustlex)]
#[allow(plugin_as_library)]
extern crate rustlex;
#[macro_use] extern crate log;
extern crate time;
extern crate term;
pub mod frontend;
pub mod backend;
pub use backend::zcode::zfile;
pub mod utils;
use frontend::codegen;
use std::io::{Read,Write};
pub fn compile<R: Read, W: Write>(input: &mut R, output: &mut W) {
// compile
//screen
let mut clean_input = frontend::screener::screen(input);
// tokenize
let tokens = frontend::lexer::lex(&mut clean_input);
debug!("");
for token in tokens.iter() {
debug!("{:?}", token);
}
// parse tokens and create ast
let ast = frontend::parser::parse_tokens(tokens);
ast.print();
// create code
codegen::generate_zcode(ast, output);
}
| #![feature(plugin)]
#![plugin(rustlex)]
#[allow(plugin_as_library)]
extern crate rustlex;
#[macro_use] extern crate log;
extern crate time;
extern crate term;
pub mod frontend;
pub mod backend;
pub use backend::zcode::zfile;
pub mod utils;
use frontend::codegen;
use std::io::{Read,Write};
pub fn compile<R: Read, W: Write>(input: &mut R, output: &mut W) {
// compile
//screen
let mut clean_input = frontend::screener::screen(input);
// tokenize
let tokens = frontend::lexer::lex(&mut clean_input);
debug!("");
for token in tokens {
debug!("{:?}", token);
}
// parse tokens and create ast
let ast = frontend::parser::parse_tokens(tokens.collect()); //use collect until we work on iterators directly
ast.print();
// create code
codegen::generate_zcode(ast, output);
}
|
Hide modules from the public API, only the re-exports should be used | pub mod liblz4;
pub mod decoder;
pub mod encoder;
pub use decoder::Decoder;
pub use encoder::Encoder;
pub use encoder::EncoderBuilder;
pub use liblz4::BlockSize;
pub use liblz4::BlockMode;
pub use liblz4::ContentChecksum;
pub use liblz4::version;
| mod liblz4;
mod decoder;
mod encoder;
pub use decoder::Decoder;
pub use encoder::Encoder;
pub use encoder::EncoderBuilder;
pub use liblz4::BlockSize;
pub use liblz4::BlockMode;
pub use liblz4::ContentChecksum;
pub use liblz4::version;
|
Check if best move in transposition table is valid | use game::{Position, Move, ZobristHash, NumPlies};
use negamax::Bound;
#[derive(Clone, Debug)]
pub struct Data {
pub hash: ZobristHash,
pub depth: NumPlies,
pub bound: Bound,
pub best_move_opt: Option<Move>,
}
pub struct TranspositionTable(Vec<Option<Data>>);
impl TranspositionTable {
pub fn with_capacity(capacity: usize) -> Self {
TranspositionTable(vec![None; capacity])
}
pub fn get<'a>(&'a self, pos: &Position) -> Option<&'a Data> {
let hash = pos.hash();
let idx = (hash.0 % (self.0.len() as u64)) as usize;
self.0[idx].as_ref().and_then(|x| if x.hash == hash { Some(x) } else { None })
}
// TODO implement a better replacement scheme
pub fn set(&mut self, pos: &Position, depth: NumPlies, best_move_opt: Option<Move>, bound: Bound) {
let hash = pos.hash();
let idx = (hash.0 % (self.0.len() as u64)) as usize;
self.0[idx] = Some(Data { hash: hash, depth: depth, best_move_opt: best_move_opt, bound: bound });
}
}
| use game::{Position, Move, ZobristHash, NumPlies};
use negamax::Bound;
#[derive(Clone, Debug)]
pub struct Data {
pub hash: ZobristHash,
pub depth: NumPlies,
pub bound: Bound,
pub best_move_opt: Option<Move>,
}
pub struct TranspositionTable(Vec<Option<Data>>);
impl TranspositionTable {
pub fn with_capacity(capacity: usize) -> Self {
TranspositionTable(vec![None; capacity])
}
pub fn get<'a>(&'a self, pos: &Position) -> Option<&'a Data> {
let hash = pos.hash();
let idx = (hash.0 % (self.0.len() as u64)) as usize;
self.0[idx].as_ref().and_then(|x| {
let is_correct_pos = x.hash == hash &&
x.best_move_opt.as_ref()
.map_or(true, |y| pos.legal_iter().any(|z| *y == z));
if is_correct_pos { Some(x) } else { None }
})
}
// TODO implement a better replacement scheme
pub fn set(&mut self, pos: &Position, depth: NumPlies, best_move_opt: Option<Move>, bound: Bound) {
let hash = pos.hash();
let idx = (hash.0 % (self.0.len() as u64)) as usize;
self.0[idx] = Some(Data { hash: hash, depth: depth, best_move_opt: best_move_opt, bound: bound });
}
}
|
Make imageflow_helpers run tests under system alloc | #[macro_use]
extern crate lazy_static;
extern crate reqwest;
extern crate hyper_native_tls;
extern crate regex;
extern crate hyper;
extern crate blake2_rfc;
extern crate twox_hash;
extern crate chrono;
extern crate zip;
extern crate serde;
extern crate serde_json;
extern crate libc;
extern crate backtrace;
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
extern crate openssl;
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
extern crate hyper_openssl;
pub mod identifier_styles;
pub mod preludes;
pub mod filesystem;
pub mod fetching;
pub mod caching;
pub mod hashing;
pub mod process_testing;
pub mod process_capture;
pub mod colors;
pub mod debug;
pub mod timeywimey{
pub fn time_bucket(seconds_per_bucket: u64, bucket_count: u64) -> u64{
::std::time::SystemTime::now().duration_since(::std::time::UNIX_EPOCH).unwrap().as_secs() / seconds_per_bucket % bucket_count
}
pub use chrono::UTC;
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
| #![feature(alloc_system)]
extern crate alloc_system;
#[macro_use]
extern crate lazy_static;
extern crate reqwest;
extern crate hyper_native_tls;
extern crate regex;
extern crate hyper;
extern crate blake2_rfc;
extern crate twox_hash;
extern crate chrono;
extern crate zip;
extern crate serde;
extern crate serde_json;
extern crate libc;
extern crate backtrace;
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
extern crate openssl;
#[cfg(not(any(target_os = "windows", target_os = "macos")))]
extern crate hyper_openssl;
pub mod identifier_styles;
pub mod preludes;
pub mod filesystem;
pub mod fetching;
pub mod caching;
pub mod hashing;
pub mod process_testing;
pub mod process_capture;
pub mod colors;
pub mod debug;
pub mod timeywimey{
pub fn time_bucket(seconds_per_bucket: u64, bucket_count: u64) -> u64{
::std::time::SystemTime::now().duration_since(::std::time::UNIX_EPOCH).unwrap().as_secs() / seconds_per_bucket % bucket_count
}
pub use chrono::UTC;
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
}
}
|
Make it regression test of issue-77668 | #![crate_type = "lib"]
pub fn bar<P>( // Error won't happen if "bar" is not generic
_baz: P,
) {
hide_foo()();
}
fn hide_foo() -> impl Fn() { // Error won't happen if "iterate" hasn't impl Trait or has generics
foo
}
fn foo() { // Error won't happen if "foo" isn't used in "iterate" or has generics
}
| // revisions: default miropt
//[miropt]compile-flags: -Z mir-opt-level=2
// ~^ This flag is for #77668, it used to be ICE.
#![crate_type = "lib"]
pub fn bar<P>( // Error won't happen if "bar" is not generic
_baz: P,
) {
hide_foo()();
}
fn hide_foo() -> impl Fn() { // Error won't happen if "iterate" hasn't impl Trait or has generics
foo
}
fn foo() { // Error won't happen if "foo" isn't used in "iterate" or has generics
}
|
Make sure the submodule is brought in | use std::io::Command;
use std::os;
fn main() {
let curr_dir = os::getenv("CARGO_MANIFEST_DIR").unwrap();
let nvim_dir = Path::new(curr_dir).join("neovim");
Command::new("make").arg("libnvim").cwd(&nvim_dir).status().unwrap();
let nvim_lib_dir = nvim_dir.join_many(&["build", "src", "nvim"]);
let deps_lib_dir = nvim_dir.join_many(&[".deps", "usr", "lib"]);
println!("cargo:rustc-flags=-L {} -L {} -l nvim:static -l nvim -l uv -l msgpack -l curses",
nvim_lib_dir.as_str().unwrap(),
deps_lib_dir.as_str().unwrap());
}
| use std::io::Command;
use std::os;
fn main() {
let curr_dir = Path::new(os::getenv("CARGO_MANIFEST_DIR").unwrap());
Command::new("git").arg("submodule").arg("update").arg("--init").cwd(&curr_dir).status().unwrap();
let nvim_dir = Path::new(curr_dir).join("neovim");
Command::new("make").arg("libnvim").cwd(&nvim_dir).status().unwrap();
let nvim_lib_dir = nvim_dir.join_many(&["build", "src", "nvim"]);
let deps_lib_dir = nvim_dir.join_many(&[".deps", "usr", "lib"]);
println!("cargo:rustc-flags=-L {} -L {} -l nvim:static -l nvim -l uv -l msgpack -l curses",
nvim_lib_dir.as_str().unwrap(),
deps_lib_dir.as_str().unwrap());
}
|
Print message instead of disabling challenge | use super::hmac_client;
use super::hmac_server;
use rand;
use rand::Rng;
use errors::*;
pub fn run() -> Result<(), Error> {
let skip_exercise = true;
if skip_exercise {
return Err(ChallengeError::Skipped("Runs very long.").into());
}
let mut rng = rand::thread_rng();
let key: Vec<u8> = rng.gen_iter().take(20).collect();
let mut server = hmac_server::start(key)?;
let result = hmac_client::run();
server.close().context("failed to close connection")?;
result
}
| use super::hmac_client;
use super::hmac_server;
use rand;
use rand::Rng;
use errors::*;
pub fn run() -> Result<(), Error> {
println!("Challenge 32: takes about three minutes, pleases wait ...");
let mut rng = rand::thread_rng();
let key: Vec<u8> = rng.gen_iter().take(20).collect();
let mut server = hmac_server::start(key)?;
let result = hmac_client::run();
server.close().context("failed to close connection")?;
result
}
|
Enable an old test that works now! | use std::cell::Cell;
use std::rc::Rc;
use std::sync::Arc;
fn rc_cell() -> Rc<Cell<i32>> {
let r = Rc::new(Cell::new(42));
let x = r.get();
r.set(x + x);
r
}
// TODO(solson): also requires destructors to run for the second borrow to work
// TODO(solson): needs StructWrappedNullablePointer support
// fn rc_refcell() -> i32 {
// let r = Rc::new(RefCell::new(42));
// *r.borrow_mut() += 10;
// let x = *r.borrow();
// x
// }
fn arc() -> Arc<i32> {
let a = Arc::new(42);
a
}
fn true_assert() {
assert_eq!(1, 1);
}
fn main() {
assert_eq!(*arc(), 42);
assert_eq!(rc_cell().get(), 84);
true_assert();
}
| use std::cell::{Cell, RefCell};
use std::rc::Rc;
use std::sync::Arc;
fn rc_cell() -> Rc<Cell<i32>> {
let r = Rc::new(Cell::new(42));
let x = r.get();
r.set(x + x);
r
}
fn rc_refcell() -> i32 {
let r = Rc::new(RefCell::new(42));
*r.borrow_mut() += 10;
let x = *r.borrow();
x
}
fn arc() -> Arc<i32> {
let a = Arc::new(42);
a
}
fn true_assert() {
assert_eq!(1, 1);
}
fn main() {
assert_eq!(*arc(), 42);
assert_eq!(rc_cell().get(), 84);
assert_eq!(rc_refcell(), 52);
true_assert();
}
|
Break out of loop if can't send data | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A task that sniffs data
use std::comm::{channel, Receiver, Sender, Disconnected};
use std::task::TaskBuilder;
use resource_task::{LoadResponse};
pub type SnifferTask = Sender<LoadResponse>;
pub fn new_sniffer_task(next_rx: Sender<LoadResponse>) -> SnifferTask {
let (sen, rec) = channel();
let builder = TaskBuilder::new().named("SnifferManager");
builder.spawn(proc() {
SnifferManager::new(rec).start(next_rx);
});
sen
}
struct SnifferManager {
data_receiver: Receiver<LoadResponse>,
}
impl SnifferManager {
fn new(data_receiver: Receiver<LoadResponse>) -> SnifferManager {
SnifferManager {
data_receiver: data_receiver,
}
}
}
impl SnifferManager {
fn start(&self, next_rx: Sender<LoadResponse>) {
loop {
match self.data_receiver.try_recv() {
Ok(snif_data) => next_rx.send(snif_data),
Err(Disconnected) => break,
Err(_) => (),
}
}
}
}
| /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A task that sniffs data
use std::comm::{channel, Receiver, Sender, Disconnected};
use std::task::TaskBuilder;
use resource_task::{LoadResponse};
pub type SnifferTask = Sender<LoadResponse>;
pub fn new_sniffer_task(next_rx: Sender<LoadResponse>) -> SnifferTask {
let (sen, rec) = channel();
let builder = TaskBuilder::new().named("SnifferManager");
builder.spawn(proc() {
SnifferManager::new(rec).start(next_rx);
});
sen
}
struct SnifferManager {
data_receiver: Receiver<LoadResponse>,
}
impl SnifferManager {
fn new(data_receiver: Receiver<LoadResponse>) -> SnifferManager {
SnifferManager {
data_receiver: data_receiver,
}
}
}
impl SnifferManager {
fn start(self, next_rx: Sender<LoadResponse>) {
loop {
match self.data_receiver.try_recv() {
Ok(snif_data) => {
let result = next_rx.send_opt(snif_data);
if result.is_err() {
break;
}
}
Err(Disconnected) => break,
Err(_) => (),
}
}
}
}
|
Use ProcessAction in Windows handler for clippy | use anyhow::{bail, Result};
use std::process::{Child, Command};
use crate::task_handler::ProcessAction;
use log::info;
pub fn compile_shell_command(command_string: &str) -> Command {
// Chain two `powershell` commands, one that sets the output encoding to utf8 and then the user provided one.
let mut command = Command::new("powershell");
command.arg("-c").arg(format!(
"[Console]::OutputEncoding = [Text.UTF8Encoding]::UTF8; {}",
command_string
));
command
}
/// Send a signal to a windows process.
pub fn send_signal_to_child(
_child: &Child,
_action: &ProcessAction,
_children: bool,
) -> Result<bool> {
bail!("not supported on windows.")
}
/// Kill a child process
pub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {
match child.kill() {
Err(_) => {
info!("Task {} has already finished by itself", task_id);
false
}
Ok(_) => true,
}
}
| use anyhow::{bail, Result};
use std::process::{Child, Command};
use crate::task_handler::ProcessAction;
use log::info;
pub fn compile_shell_command(command_string: &str) -> Command {
// Chain two `powershell` commands, one that sets the output encoding to utf8 and then the user provided one.
let mut command = Command::new("powershell");
command.arg("-c").arg(format!(
"[Console]::OutputEncoding = [Text.UTF8Encoding]::UTF8; {}",
command_string
));
command
}
/// Send a signal to a windows process.
pub fn send_signal_to_child(
_child: &Child,
action: &ProcessAction,
_children: bool,
) -> Result<bool> {
match action {
ProcessAction::Pause => bail!("Pause is not yet supported on windows."),
ProcessAction::Resume => bail!("Resume is not yet supported on windows."),
ProcessAction::Kill => bail!("Kill is not yet supported on windows."),
}
}
/// Kill a child process
pub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {
match child.kill() {
Err(_) => {
info!("Task {} has already finished by itself", task_id);
false
}
Ok(_) => true,
}
}
|
Use intradoc-links for the whole test, add a @has check | // ignore-tidy-linelength
pub mod internal {
pub struct r#mod;
/// See [name], [other name]
///
/// [name]: mod
/// [other name]: ../internal/struct.mod.html
// @has 'raw_ident_eliminate_r_hashtag/internal/struct.B.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'name'
// @has 'raw_ident_eliminate_r_hashtag/internal/struct.B.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'other name'
pub struct B;
}
/// See [name].
///
/// [name]: internal::mod
// @has 'raw_ident_eliminate_r_hashtag/struct.A.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'name'
struct A;
| // ignore-tidy-linelength
pub mod internal {
// @has 'raw_ident_eliminate_r_hashtag/internal/struct.mod.html'
pub struct r#mod;
/// See [name], [other name]
///
/// [name]: mod
/// [other name]: crate::internal::mod
// @has 'raw_ident_eliminate_r_hashtag/internal/struct.B.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'name'
// @has 'raw_ident_eliminate_r_hashtag/internal/struct.B.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'other name'
pub struct B;
}
/// See [name].
///
/// [name]: internal::mod
// @has 'raw_ident_eliminate_r_hashtag/struct.A.html' '//a[@href="../raw_ident_eliminate_r_hashtag/internal/struct.mod.html"]' 'name'
struct A;
|
Address LLVM assertion failure by prepopulating with *just* name-anon-globals. | // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// Test for issue #18804, #[linkage] does not propagate through generic
// functions. Failure results in a linker error.
// ignore-asmjs no weak symbol support
// ignore-emscripten no weak symbol support
// ignore-windows no extern_weak linkage
// ignore-macos no extern_weak linkage
// aux-build:lib.rs
// rust-lang/rust#56772: nikic says we need this to be proper test.
// compile-flags: -C no-prepopulate-passes
extern crate lib;
fn main() {
lib::foo::<i32>();
}
| // Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// Test for issue #18804, #[linkage] does not propagate through generic
// functions. Failure results in a linker error.
// ignore-asmjs no weak symbol support
// ignore-emscripten no weak symbol support
// ignore-windows no extern_weak linkage
// ignore-macos no extern_weak linkage
// aux-build:lib.rs
// rust-lang/rust#56772: nikic says we need this to be proper test.
// compile-flags: -C no-prepopulate-passes -C passes=name-anon-globals
extern crate lib;
fn main() {
lib::foo::<i32>();
}
|
Update the usage of random | use probability::prelude::*;
use test::{Bencher, black_box};
#[bench]
fn cdf(bencher: &mut Bencher) {
let mut source = random::default();
let gaussian = Gaussian::new(0.0, 1.0);
let x = Independent(&gaussian, &mut source).take(1000).collect::<Vec<_>>();
bencher.iter(|| black_box(x.iter().map(|&x| gaussian.cdf(x)).collect::<Vec<_>>()));
}
#[bench]
fn inv_cdf(bencher: &mut Bencher) {
let mut source = random::default();
let gaussian = Gaussian::new(0.0, 1.0);
let uniform = Uniform::new(0.0, 1.0);
let p = Independent(&uniform, &mut source).take(1000).collect::<Vec<_>>();
bencher.iter(|| black_box(p.iter().map(|&p| gaussian.inv_cdf(p)).collect::<Vec<_>>()));
}
#[bench]
fn sample(bencher: &mut Bencher) {
let mut source = random::XorshiftPlus::new([42, 42]);
let gaussian = Gaussian::new(0.0, 1.0);
bencher.iter(|| black_box(gaussian.sample(&mut source)));
}
| use probability::prelude::*;
use test::{Bencher, black_box};
#[bench]
fn cdf(bencher: &mut Bencher) {
let mut source = random::default();
let gaussian = Gaussian::new(0.0, 1.0);
let x = Independent(&gaussian, &mut source).take(1000).collect::<Vec<_>>();
bencher.iter(|| black_box(x.iter().map(|&x| gaussian.cdf(x)).collect::<Vec<_>>()));
}
#[bench]
fn inv_cdf(bencher: &mut Bencher) {
let mut source = random::default();
let gaussian = Gaussian::new(0.0, 1.0);
let uniform = Uniform::new(0.0, 1.0);
let p = Independent(&uniform, &mut source).take(1000).collect::<Vec<_>>();
bencher.iter(|| black_box(p.iter().map(|&p| gaussian.inv_cdf(p)).collect::<Vec<_>>()));
}
#[bench]
fn sample(bencher: &mut Bencher) {
let mut source = random::Xorshift128Plus::new([42, 69]);
let gaussian = Gaussian::new(0.0, 1.0);
bencher.iter(|| black_box(gaussian.sample(&mut source)));
}
|
Add example for R function call | extern crate libr;
use std::env;
use std::ffi::CString;
use libr::ffi::embedded;
use libr::ffi::internals::*;
fn main() {
if let Err(_) = env::var("R_HOME") {
panic!("Rembedded test need R_HOME be setted");
}
let mut s = Box::new(vec![CString::new("R").unwrap().into_raw(),
CString::new("--quiet").unwrap().into_raw(),
CString::new("--no-save").unwrap().into_raw()]);
unsafe {
embedded::Rf_initEmbeddedR(s.len() as i32, s.as_mut_ptr());
Rprintf(CString::new("Hello world").unwrap().into_raw());
embedded::Rf_endEmbeddedR(0);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.