repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
hlfcoding/UAFInteractiveNavigationController
|
UAFInteractiveNavigationController.podspec
|
Pod::Spec.new do |s|
s.name = "UAFInteractiveNavigationController"
s.version = "0.1.3"
s.summary = "UAFInteractiveNavigationController makes life easier."
s.description = <<-DESC
UAFInteractiveNavigationController mirrors
UINavigationController behavior, but combines it with the
scroll-and-snap transition behavior of
UIPageViewController. It is meant for apps not using the
custom view-controller transitions iOS7.
DESC
s.homepage = "http://useallfive.github.io/UAFInteractiveNavigationController"
s.license = "MIT"
s.authors = { "<NAME>" => "<EMAIL>" }
s.source = { :git => "https://github.com/UseAllFive/UAFInteractiveNavigationController.git",
:tag => "0.1.3" }
s.platform = :ios, '5.0'
s.requires_arc = true
s.source_files = 'UAFInteractiveNavigationController'
s.dependency 'UAFToolkit/Utility'
s.dependency 'UAFToolkit/UIKit'
s.dependency 'UAFToolkit/Boilerplate'
s.dependency 'UAFToolkit/Navigation'
end
|
p/jira-proxy
|
config/deploy.rb
|
load 'deploy'
set :application, 'issues'
role :web, 'etal.bsdpower.com'
set :user, 'jiraproxy'
set :deploy_to, "/home/#{user}/#{application}"
set :cache_dir, "/var/cache/#{application}"
set :scm, :subversion
set :repository, "http://svn.bsdpower.com/webtools/jira-proxy/trunk"
set :deploy_via, :export
set :keep_releases, 5
# deploy:cleanup wants to use sudo
set :use_sudo, false
namespace :deploy do
task :setup do
run "mkdir -p #{releases_path} #{shared_path} #{shared_path}/config"
end
task :finalize_update do
run <<-CMD
for file in main.ini production.ini; do
ln -s #{shared_path}/config/$file #{release_path}/config;
done
CMD
end
task :restart do
sudo "fcgictl issues softrestart"
end
task :clear_proxy_cache do
run "rm -rf #{cache_dir}/*"
end
end
|
nnhansg/xero-ruby
|
accounting/spec/models/organisation_spec.rb
|
=begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::Organisation
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'Organisation' do
before do
# run before each test
@instance = XeroRuby::Organisation.new
end
after do
# run after each test
end
describe 'test an instance of Organisation' do
it 'should create an instance of Organisation' do
expect(@instance).to be_instance_of(XeroRuby::Organisation)
end
end
describe 'test attribute "organisation_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "api_key"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "legal_name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "pays_tax"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "version"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["AU", "NZ", "GLOBAL", "UK", "US", "AUONRAMP", "NZONRAMP", "GLOBALONRAMP", "UKONRAMP", "USONRAMP"])
# validator.allowable_values.each do |value|
# expect { @instance.version = value }.not_to raise_error
# end
end
end
describe 'test attribute "organisation_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["COMPANY", "CHARITY", "CLUB_OR_SOCIETY", "LOOK_THROUGH_COMPANY", "NOT_FOR_PROFIT", "PARTNERSHIP", "S_CORPORATION", "SELF_MANAGED_SUPERANNUATION_FUND", "SOLE_TRADER", "SUPERANNUATION_FUND", "TRUST"])
# validator.allowable_values.each do |value|
# expect { @instance.organisation_type = value }.not_to raise_error
# end
end
end
describe 'test attribute "base_currency"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "country_code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "is_demo_company"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "organisation_status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "registration_number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "tax_number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "financial_year_end_day"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "financial_year_end_month"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "sales_tax_basis"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["PAYMENTS", "INVOICE", "NONE", "CASH", "ACCRUAL", "FLATRATECASH", "FLATRATEACCRUAL", "ACCRUALS"])
# validator.allowable_values.each do |value|
# expect { @instance.sales_tax_basis = value }.not_to raise_error
# end
end
end
describe 'test attribute "sales_tax_period"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["MONTHLY", "QUARTERLY1", "QUARTERLY2", "QUARTERLY3", "ANNUALLY", "ONEMONTHS", "TWOMONTHS", "SIXMONTHS", "1MONTHLY", "2MONTHLY", "3MONTHLY", "6MONTHLY", "QUARTERLY", "YEARLY"])
# validator.allowable_values.each do |value|
# expect { @instance.sales_tax_period = value }.not_to raise_error
# end
end
end
describe 'test attribute "default_sales_tax"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "default_purchases_tax"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "period_lock_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "end_of_year_lock_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "created_date_utc"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "timezone"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "organisation_entity_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["COMPANY", "CHARITY", "CLUB_OR_SOCIETY", "LOOK_THROUGH_COMPANY", "NOT_FOR_PROFIT", "PARTNERSHIP", "S_CORPORATION", "SELF_MANAGED_SUPERANNUATION_FUND", "SOLE_TRADER", "SUPERANNUATION_FUND", "TRUST"])
# validator.allowable_values.each do |value|
# expect { @instance.organisation_entity_type = value }.not_to raise_error
# end
end
end
describe 'test attribute "short_code"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "_class"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["DEMO", "TRIAL", "STARTER", "STANDARD", "PREMIUM", "PREMIUM_20", "PREMIUM_50", "PREMIUM_100", "LEDGER", "GST_CASHBOOK", "NON_GST_CASHBOOK"])
# validator.allowable_values.each do |value|
# expect { @instance._class = value }.not_to raise_error
# end
end
end
describe 'test attribute "edition"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["BUSINESS", "PARTNER"])
# validator.allowable_values.each do |value|
# expect { @instance.edition = value }.not_to raise_error
# end
end
end
describe 'test attribute "line_of_business"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "addresses"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "phones"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "external_links"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "payment_terms"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
nnhansg/xero-ruby
|
accounting/lib/xero-ruby/version.rb
|
<gh_stars>1-10
=begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
module XeroRuby
VERSION = '0.1.2'
end
|
nnhansg/xero-ruby
|
accounting/spec/models/schedule_spec.rb
|
=begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::Schedule
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'Schedule' do
before do
# run before each test
@instance = XeroRuby::Schedule.new
end
after do
# run after each test
end
describe 'test an instance of Schedule' do
it 'should create an instance of Schedule' do
expect(@instance).to be_instance_of(XeroRuby::Schedule)
end
end
describe 'test attribute "period"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "unit"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["WEEKLY", "MONTHLY"])
# validator.allowable_values.each do |value|
# expect { @instance.unit = value }.not_to raise_error
# end
end
end
describe 'test attribute "due_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "due_date_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["DAYSAFTERBILLDATE", "DAYSAFTERBILLMONTH", "OFCURRENTMONTH", "OFFOLLOWINGMONTH"])
# validator.allowable_values.each do |value|
# expect { @instance.due_date_type = value }.not_to raise_error
# end
end
end
describe 'test attribute "start_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "next_scheduled_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "end_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
nnhansg/xero-ruby
|
accounting/spec/models/expense_claim_spec.rb
|
=begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::ExpenseClaim
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'ExpenseClaim' do
before do
# run before each test
@instance = XeroRuby::ExpenseClaim.new
end
after do
# run after each test
end
describe 'test an instance of ExpenseClaim' do
it 'should create an instance of ExpenseClaim' do
expect(@instance).to be_instance_of(XeroRuby::ExpenseClaim)
end
end
describe 'test attribute "expense_claim_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["SUBMITTED", "AUTHORISED", "PAID", "VOIDED"])
# validator.allowable_values.each do |value|
# expect { @instance.status = value }.not_to raise_error
# end
end
end
describe 'test attribute "payments"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "user"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "receipts"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "updated_date_utc"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "amount_due"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "amount_paid"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "payment_due_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "reporting_date"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "receipt_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
nnhansg/xero-ruby
|
accounting/spec/models/tax_rate_spec.rb
|
<gh_stars>1-10
=begin
#Accounting API
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: 2.0.0
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.0.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for XeroRuby::TaxRate
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'TaxRate' do
before do
# run before each test
@instance = XeroRuby::TaxRate.new
end
after do
# run after each test
end
describe 'test an instance of TaxRate' do
it 'should create an instance of TaxRate' do
expect(@instance).to be_instance_of(XeroRuby::TaxRate)
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "tax_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "tax_components"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["ACTIVE", "DELETED", "ARCHIVED"])
# validator.allowable_values.each do |value|
# expect { @instance.status = value }.not_to raise_error
# end
end
end
describe 'test attribute "report_tax_type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["OUTPUT", "INPUT", "EXEMPTOUTPUT", "INPUTTAXED", "BASEXCLUDED", "EXEMPTEXPENSES", "EXEMPTINPUT", "ECOUTPUT", "ECOUTPUTSERVICES", "ECINPUT", "ECACQUISITIONS", "CAPITALSALESOUTPUT", "CAPITALEXPENSESINPUT", "MOSSSALES", "NONE", "GSTONIMPORTS", "AVALARA"])
# validator.allowable_values.each do |value|
# expect { @instance.report_tax_type = value }.not_to raise_error
# end
end
end
describe 'test attribute "can_apply_to_assets"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "can_apply_to_equity"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "can_apply_to_expenses"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "can_apply_to_liabilities"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "can_apply_to_revenue"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "display_tax_rate"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "effective_rate"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
|
gregdardis/grocery-list-server
|
db/migrate/20180403230813_create_grocery_lists.rb
|
class CreateGroceryLists < ActiveRecord::Migration[5.1]
def change
create_table :grocery_lists do |t|
# table has an ID by default
t.string :title
t.string :owner
t.string :last_modified_by
t.timestamp :created_at
t.timestamps
end
end
end
|
gregdardis/grocery-list-server
|
app/models/grocery_item.rb
|
class GroceryItem < ApplicationRecord
belongs_to :grocery_list
end
|
gregdardis/grocery-list-server
|
app/models/grocery_list.rb
|
<reponame>gregdardis/grocery-list-server<filename>app/models/grocery_list.rb<gh_stars>1-10
class GroceryList < ApplicationRecord
has_many :grocery_items, dependent: :destroy
end
|
gregdardis/grocery-list-server
|
app/controllers/grocery_items_controller.rb
|
class GroceryItemsController < ApplicationController
# (GET) get all grocery items for a list
# PATH: /grocery_lists/:grocery_list_id/grocery_items
def index
@grocery_items = GroceryList.find(params[:grocery_list_id]).grocery_items
# status :ok = 200
render status: :ok, json: @grocery_items
end
# (POST) create new grocery item
# PATH: /grocery_lists/:grocery_list_id/grocery_items
def create
# initializes the GroceryItem with attributes automatically
# mapped to the respective database columns
@grocery_item = GroceryItem.new(grocery_item_params)
@grocery_list_id = params[:grocery_list_id]
@grocery_item.grocery_list_id = @grocery_list_id
# saves the model in the database
if @grocery_item.save
render json: @grocery_item, status: :created
else
render json: {
errors: @grocery_item.errors.full_messages
},
status: 422 #unprocessable entity
end
end
# (GET) get specific grocery item on a grocery list
# PATH: /grocery_lists/:grocery_list_id/grocery_items/:id
def show
@grocery_item = GroceryItem.where(
grocery_list_id: params[:grocery_list_id],
id: params[:id]
)
render json: @grocery_item
end
# (PATCH) update specific grocery item by id
# PATH: /grocery_lists/:grocery_list_id/grocery_items/:id
def update
@grocery_item = GroceryItem.where(
grocery_list_id: params[:grocery_list_id],
id: params[:id]
)
if @grocery_item.update(grocery_item_params)
render json: @grocery_item
else
render json: {
errors: @grocery_item.errors.full_messages
},
status: 404
end
end
# (DELETE) delete specific grocery item by id
# PATH: /grocery_lists/:grocery_list_id/grocery_items/:id
def destroy
@grocery_item = GroceryItem.where(
grocery_list_id: params[:grocery_list_id],
id: params[:id]
)
if @grocery_item.destroy
render status: :ok # 200
else
render json: {
errors: @grocery_item.errors.full_messages
},
status: 404
end
end
private
def grocery_item_params
params.require(:grocery_item)
.permit(
:name,
:crossed_off
)
end
end
|
gregdardis/grocery-list-server
|
app/controllers/grocery_lists_controller.rb
|
class GroceryListsController < ApplicationController
# (GET) get all grocery lists
# PATH: /grocery_lists
def index
@grocery_lists = GroceryList.all
# status :ok = 200
render status: :ok, json: @grocery_lists
end
# (POST) create new grocery list
# PATH: /grocery_lists
def create
# initializes a GroceryList with attributes automatically
# mapped to the respective database columns
@grocery_list = GroceryList.new(grocery_list_params)
# saves the model in the database
if @grocery_list.save
render json: @grocery_list, status: :created
else
render json:{
errors: @grocery_list.errors.full_messages
},
status: 422 # unprocessable entity
end
end
# (GET) get specific grocery list by id
# PATH: /grocery_lists/:id
def show
@grocery_list = GroceryList.find(params[:id])
render json: @grocery_list
end
# (PATCH) update specific grocery list by id
# PATH: /grocery_lists/:id
def update
@grocery_list = GroceryList.find(params[:id])
if @grocery_list.update(grocery_list_params)
render json: @grocery_list
else
render json: {
errors: @grocery_list.errors.full_messages
},
status: 404
end
end
# (DELETE) delete specific grocery list by id
# PATH: /grocery_lists/:id
def destroy
@grocery_list = GroceryList.find(params[:id])
if @grocery_list.destroy
render status: :ok # 200
else
render json: {
errors: @grocery_list.errors.full_messages
},
status: 404
end
end
private
def grocery_list_params
params.require(:grocery_list)
.permit(
:title,
:owner,
:last_modified_by
)
end
end
|
gregdardis/grocery-list-server
|
config/routes.rb
|
Rails.application.routes.draw do
resources :grocery_lists do
resources :grocery_items
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/update_user_type.rb
|
module Types
class UpdateUserType < Types::BaseInputObject
argument :email, String, required: false
argument :password, String, required: false
argument :name, String, required: false
argument :img_url, String, required: false
argument :roles, [String], required: false,
description: "Roles in #{EasySettings.user_roles.keys}"
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/mutations/delete_user.rb
|
module Mutations
class DeleteUser < BaseMutation
description "Delete User"
# return fields
field :user, Types::UserType, null: false
# define arguments
argument :id, ID, required: true, description: "Delete User by ID"
# define resolve method
def resolve(id:)
Util.auth_user_graphql(context[:current_user])
ActiveRecord::Base.transaction do
@user = User.find(id)
default_user_flg = @user.email == EasySettings.default_user.email
demo_user_flg = @user.email == EasySettings.demo_user.email
if default_user_flg || demo_user_flg
msg = "GraphQL: Can not delete demo or default user"
raise GraphQL::ExecutionError, msg
end
@user.destroy!
end
{ user: @user }
end
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/city_type.rb
|
<reponame>jerrywdlee/iuliana-challenges
module Types
class CityType < Types::BaseObject
field :id, ID, null: false
field :name, String, null: false
field :houses, [Types::HouseType], null: true
field :datasets, [Types::DatasetType], null: true
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/mutations/new_user.rb
|
module Mutations
class NewUser < BaseMutation
description "Add New User"
# return fields
field :user, Types::UserType, null: false
# define arguments
argument :user, Types::NewUserType, required: true,
description: "User Info"
# define resolve method
def resolve(user:)
Util.auth_user_graphql(context[:current_user])
ActiveRecord::Base.transaction do
@user = User.create!(user.to_h)
end
{ user: @user }
end
end
end
|
jerrywdlee/iuliana-challenges
|
app/lib/data_process.rb
|
<filename>app/lib/data_process.rb
class DataProcess
class << self
# calc average energy_production per house for cities
def house_energy_prod_time_series
DataProcess.cities_datasets_time_series(:energy_production)
end
# calc average energy_production per person for cities
def person_energy_prod_time_series
max = Arel.sql("MAX(per_person)").as("max")
min = Arel.sql("MIN(per_person)").as("min")
avg = Arel.sql("AVG(per_person)").as("avg")
City.all.each_with_object({}) do |city, results|
sql = <<~SQL
(SELECT year, month,
(CAST(energy_production AS FLOAT) / CAST(num_of_people AS FLOAT)) AS per_person
FROM datasets, houses
WHERE "datasets"."house_id" = "houses"."id" AND "houses"."city_id" = #{city.id})
SQL
arel = Arel::Table.new(nil).project(:year, :month, max, min, avg)
.from(Arel.sql(sql).as("datasets_per_person"))
.group(:year, :month).order(:year, :month)
con = ActiveRecord::Base.connection
results[city.name] = con.select_all(arel)
.to_hash.each_with_object({}) do |sql_res, eng_prod_list|
date_str = DataProcess.date_str(sql_res["year"], sql_res["month"])
eng_prod_list[date_str] = [
sql_res["min"],
sql_res["avg"],
sql_res["max"],
]
end
end
end
# key: [:daylight, :temperature]
def weather_time_series(key)
DataProcess.cities_datasets_time_series(key)
end
# key: [:daylight, :temperature, :energy_production]
# res: { "Oxford" => { "2011-07" => [min, avg, max] } }
def cities_datasets_time_series(key)
data_selector = Dataset.arel_table[key]
max = data_selector.maximum.as("max")
min = data_selector.minimum.as("min")
avg = data_selector.average.as("avg")
City.all.each_with_object({}) do |city, results|
results[city.name] = city.datasets
.select(:year, :month, max, min, avg)
.group(:year, :month).order(:year, :month)
.each_with_object({}) do |sql_res, dataset_list|
date_str = DataProcess.date_str(sql_res.year, sql_res.month)
dataset_list[date_str] = [
sql_res.min.to_f,
sql_res.avg.to_f,
sql_res.max.to_f,
]
end
end
end
def date_str(year, month)
"#{year}-#{month.to_s.rjust(2, '0')}"
end
end
end
|
jerrywdlee/iuliana-challenges
|
app/controllers/contents_controller.rb
|
<gh_stars>0
class ContentsController < ApplicationController
# GET /contents
# GET /contents.json
def index
# If has `public/index.html`, this action will be ignored
end
end
|
jerrywdlee/iuliana-challenges
|
app/models/user.rb
|
<reponame>jerrywdlee/iuliana-challenges
class User < ApplicationRecord
include Devise::JWT::RevocationStrategies::JTIMatcher
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :validatable,
:jwt_authenticatable, jwt_revocation_strategy: self
scope :roles_equal, ->(role_list) { where(roles_code: role_list_to_code(role_list)) }
scope :has_role, lambda { |role|
role_code = EasySettings.user_roles[role].to_i
where("roles_code & ? = ?", role_code, role_code)
}
validates :email, presence: true,
format: { with: /\A([\w+\-]\.?)+@[\w\d\-]+(\.[\w\d\-]+)*\.[a-z]+\z/i }
before_create :set_default_role, :set_default_img, :set_default_name
# attr_accessor :roles
@roles
def roles
@roles = []
EasySettings.user_roles.each do |role, mask|
if Util.auth_binary(roles_code.to_i, mask.to_i)
@roles << role
end
end
@roles
end
def roles=(role_list)
self.roles_code = User.role_list_to_code(role_list)
end
EasySettings.user_roles.keys.each do |role_name|
method_name = role_name + "?"
define_method(method_name.to_sym) {
self.roles.include?(role_name.to_s)
}
scope role_name.to_sym, lambda {
role_code = EasySettings.user_roles[role_name].to_i
where("roles_code & ? = ?", role_code, role_code)
}
end
def self.ransackable_scopes(_auth_object = nil)
%i(has_role)
end
private
def set_default_role
if roles.blank?
self.roles = [EasySettings.user_roles.keys.last]
end
end
def set_default_img
if img_url.blank?
self.img_url = EasySettings.default_user.img_url_tmp.gsub(/\<@email@\>/, email)
end
end
def set_default_name
if name.blank?
self.name = email.split(/@/).map do |str|
"#{str.first(3)}.".upcase_first
end.join(" ")
end
end
def self.role_list_to_code(role_list)
role_ids = role_list.map do |role|
EasySettings.user_roles[role].to_i
end
Util.set_binary_codes(role_ids)
end
end
|
jerrywdlee/iuliana-challenges
|
app/services/data_series_service.rb
|
class DataSeriesService
def date_labels
Dataset.order(:year, :month).distinct
.pluck(:year, :month).map do |year, month|
DataProcess.date_str(year, month)
end
end
def house_energy_prod
DataProcess.house_energy_prod_time_series
end
def person_energy_prod
DataProcess.person_energy_prod_time_series
end
def temperature
DataProcess.weather_time_series(:temperature)
end
def daylight
DataProcess.weather_time_series(:daylight)
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/dataset_type_spec.rb
|
<gh_stars>0
require "rails_helper"
RSpec.describe "GraphQL on DatasetType" do
it "Should return all datasets" do
query = <<~GRAPHQL
{
datasets { id }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["datasets"].size).to be > 0
end
it "Should find datasets by ransack" do
dataset = Dataset.last
query = <<~GRAPHQL
{
datasets(q: { idEq: #{dataset.id} }) { id, daylight }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["datasets"][0]["daylight"]).to eq dataset.daylight
end
it "Should find datasets sorted and with pagination" do
query = <<~GRAPHQL
{
datasets(q: { s: "energyProduction desc" }, page: 2, per: 20) {
id, energyProduction
}
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["datasets"].size).to eq 20
end
it "Should find datasets sorted by multiple conditions" do
query = <<~GRAPHQL
{
datasets(q: { s: ["temperature desc", "daylight asc"] }, page: 1) {
id
}
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["datasets"].size).to be > 0
end
end
|
jerrywdlee/iuliana-challenges
|
spec/models/user_spec.rb
|
<reponame>jerrywdlee/iuliana-challenges<gh_stars>0
require "rails_helper"
RSpec.describe User, type: :model do
before do
@user = User.first
end
it "Should create a user" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["admin"]
})
expect(@user).to be_truthy
end
it "Should has roles" do
expect(@user.roles).to include "admin"
end
it "Should update roles" do
@user.roles = ["admin", "editor"]
roles_code = EasySettings.user_roles.admin | EasySettings.user_roles.editor
@user.save!
expect(@user.roles).to include "editor"
expect(@user.roles_code).to eq roles_code
end
it "Should find by roles" do
@users = User.has_role("editor")
expect(@users.size).to be > 0
expect(@users.first.roles).to include "editor"
end
it "Should find by roles use scope" do
@users = User.admin
expect(@users.size).to be > 0
expect(@users.first.admin?).to be_truthy
end
it "Should set default roles" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>
})
expect(@user.roles).to eq [EasySettings.user_roles.keys.last]
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/app_config_type_spec.rb
|
<reponame>jerrywdlee/iuliana-challenges
require "rails_helper"
RSpec.describe "GraphQL on AppConfigType" do
it "Should Get total_watt_url from challenge2" do
query = <<~GRAPHQL
{
appConfigs { challenge2 { totalWattUrl } }
}
GRAPHQL
data = Util.graphql_query(query)["appConfigs"]
expect(data.dig("challenge2", "totalWattUrl")).to be_truthy
end
it "Should Get house_data_url and dataset_url from challenge3" do
query = <<~GRAPHQL
{
appConfigs { challenge3 { houseDataUrl, datasetUrl } }
}
GRAPHQL
data = Util.graphql_query(query)["appConfigs"]
expect(data.dig("challenge3", "houseDataUrl")).to be_truthy
expect(data.dig("challenge3", "datasetUrl")).to be_truthy
end
it "Should return all app_configs" do
query = <<~GRAPHQL
{
appConfigs {
general { allowGraphiql }
challenge2 { totalWattUrl }
challenge3 { datasetUrl, houseDataUrl }
fieldKeys
}
}
GRAPHQL
data = Util.graphql_query(query).deep_symbolize_keys
data = data[:appConfigs]
expect(data[:general][:allowGraphiql]).to eq AppConfig.general[:allow_graphiql]
expect(data[:challenge2][:totalWattUrl]).to eq AppConfig.challenge2[:total_watt_url]
expect(data[:challenge3][:datasetUrl]).to eq AppConfig.challenge3[:dataset_url]
expect(data[:challenge3][:houseDataUrl]).to eq AppConfig.challenge3[:house_data_url]
expect(data[:fieldKeys].size).to be > 0
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/app_config_input_type.rb
|
module Types
class GeneralInputType < Types::BaseInputObject
argument :allow_graphiql, Boolean, required: false
argument :show_demo_user, Boolean, required: false
end
class Challenge2InputType < Types::BaseInputObject
argument :total_watt_url, String, required: false
end
class Challenge3InputType < Types::BaseInputObject
argument :house_data_url, String, required: false
argument :dataset_url, String, required: false
end
class AppConfigInputType < Types::BaseInputObject
argument :general, GeneralInputType, required: false
argument :challenge2, Challenge2InputType, required: false
argument :challenge3, Challenge3InputType, required: false
end
end
|
jerrywdlee/iuliana-challenges
|
spec/access/data_loader_spec.rb
|
require "rails_helper"
RSpec.describe "DataLoader" do
it "Should load house_data.csv from URL" do
uri = "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/house_data.csv"
DataLoader.load_houses(uri)
house_num = House.all.size
expect(house_num).to be > 0
end
it "Should create cities from cities" do
DataLoader.load_cities
city_num = City.all.size
expect(city_num).to be > 0
end
it "Should add city_id to houses" do
DataLoader.sync_cities_houses
house_num = House.where(city_id: nil).size
expect(house_num).to eq 0
end
it "Should load dataset_50.csv from URL" do
uri = "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/dataset_50.csv"
DataLoader.load_dataset(uri)
dataset_num = Dataset.all.size
expect(dataset_num).to be > 0
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/user_type_spec.rb
|
require "rails_helper"
RSpec.describe "GraphQL on User" do
it "Should get all users" do
query = <<~GRAPHQL
{
users {
id, email, name, roles
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)["users"]
expect(data.size).to eq User.count
end
it "Should search user by id" do
@user = User.last
query = <<~GRAPHQL
{
users (q: { idEq: #{@user.id} }) {
id, email, name, roles
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)["users"][0]
expect(data["id"]).to eq @user.id.to_s
expect(data["email"]).to eq @user.email
@user.roles.each do |role|
expect(data["roles"]).to include role
end
end
it "Should search user by email" do
query = <<~GRAPHQL
{
users (q: { emailCont: "@", s: "id desc"}) {
id, email, name, roles
}
}
GRAPHQL
@users = User.where(User.arel_table[:email].matches("%@%"))
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)["users"]
expect(data.size).to eq @users.size
end
it "Should search user by role: admin" do
role = "admin"
query = <<~GRAPHQL
{
users (q: { hasRole: "#{role}"}) {
id, roles
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)["users"]
expect(data.size).to eq User.admin.size
data.each do |user|
expect(user["roles"]).to include role
end
end
it "Should search user by role: observer" do
role = "observer"
query = <<~GRAPHQL
{
users (q: { hasRole: "#{role}"}) {
id, roles
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)["users"]
expect(data.size).to eq User.observer.size
data.each do |user|
expect(user["roles"]).to include role
end
end
it "Should throw error if user is not admin" do
query = <<~GRAPHQL
{
users (q: { idEq: 1 }) {
id, email, name, roles
}
}
GRAPHQL
context = { current_user: User.observer.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Need admin user"
end
end
|
jerrywdlee/iuliana-challenges
|
spec/services/data_series_service_spec.rb
|
require "rails_helper"
RSpec.describe "DataSeriesService" do
before do
@data_series = DataSeriesService.new
end
it "Should return date_labels" do
date_labels = @data_series.date_labels
expect(date_labels.size).to be > 0
expect(date_labels.first).to include "-"
end
it "Should return house_energy_prod" do
city = City.first
city_name = city.name
date_str = city.datasets.first.date_str
house_energy_prod = @data_series.house_energy_prod
expect(house_energy_prod).to be_truthy
expect(house_energy_prod[city_name]).to be_truthy
expect(house_energy_prod[city_name][date_str]).to be_truthy
expect(house_energy_prod[city_name][date_str].size).to eq 3
end
it "Should return person_energy_prod" do
city = City.first
city_name = city.name
date_str = city.datasets.first.date_str
person_energy_prod = @data_series.person_energy_prod
expect(person_energy_prod).to be_truthy
expect(person_energy_prod[city_name]).to be_truthy
expect(person_energy_prod[city_name][date_str]).to be_truthy
expect(person_energy_prod[city_name][date_str].size).to eq 3
end
it "Should return temperature" do
expect(@data_series.temperature).to be_truthy
end
it "Should return daylight" do
expect(@data_series.daylight).to be_truthy
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/house_type.rb
|
<reponame>jerrywdlee/iuliana-challenges<gh_stars>0
module Types
class HouseType < Types::BaseObject
field :id, ID, null: false
field :firstname, String, null: false
field :lastname, String, null: false
field :full_name, String, null: false
# field :city_text, String, null: false
# field :city_id, Integer, null: false
field :city, Types::CityType, null: false
field :num_of_people, Integer, null: false
field :has_child, String, null: false
field :has_child_bool, Boolean, null: false
field :datasets, [Types::DatasetType], null: true
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/user_type.rb
|
<filename>app/graphql/types/user_type.rb
module Types
class UserType < Types::BaseObject
field :id, ID, null: false
field :email, String, null: false
field :name, String, null: false
field :img_url, String, null: false
field :roles, [String], null: false
field :roles_code, Integer, null: false
field :created_at, DateTimeType, null: false
field :updated_at, DateTimeType, null: false
end
end
|
jerrywdlee/iuliana-challenges
|
db/schema.rb
|
<reponame>jerrywdlee/iuliana-challenges
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2019_06_24_065530) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "cities", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["name"], name: "index_cities_on_name"
end
create_table "datasets", force: :cascade do |t|
t.integer "label"
t.integer "house_id"
t.integer "year"
t.integer "month"
t.float "temperature"
t.float "daylight"
t.integer "energy_production"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["daylight"], name: "index_datasets_on_daylight"
t.index ["energy_production"], name: "index_datasets_on_energy_production"
t.index ["house_id"], name: "index_datasets_on_house_id"
t.index ["label"], name: "index_datasets_on_label"
t.index ["month"], name: "index_datasets_on_month"
t.index ["temperature"], name: "index_datasets_on_temperature"
t.index ["year"], name: "index_datasets_on_year"
end
create_table "houses", force: :cascade do |t|
t.string "firstname"
t.string "lastname"
t.string "city_text"
t.integer "city_id"
t.integer "num_of_people"
t.boolean "has_child"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["city_id"], name: "index_houses_on_city_id"
t.index ["city_text"], name: "index_houses_on_city_text"
t.index ["firstname"], name: "index_houses_on_firstname"
t.index ["has_child"], name: "index_houses_on_has_child"
t.index ["lastname"], name: "index_houses_on_lastname"
t.index ["num_of_people"], name: "index_houses_on_num_of_people"
end
create_table "settings", force: :cascade do |t|
t.string "var", null: false
t.text "value"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["var"], name: "index_settings_on_var", unique: true
end
create_table "users", force: :cascade do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "jti", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.string "name", default: "", null: false
t.string "img_url", default: "", null: false
t.integer "roles_code", default: 0, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["jti"], name: "index_users_on_jti", unique: true
t.index ["name"], name: "index_users_on_name"
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
t.index ["roles_code"], name: "index_users_on_roles_code"
end
end
|
jerrywdlee/iuliana-challenges
|
app/controllers/users/sessions_controller.rb
|
# frozen_string_literal: true
class Users::SessionsController < Devise::SessionsController
skip_before_action :verify_authenticity_token
# before_action :configure_sign_in_params, only: [:create]
# GET /resource/sign_in
def new
super
end
# POST /resource/sign_in
def create
if request.xhr?
opts = auth_options
opts[:recall] = "#{controller_path}#xhr_failure"
self.resource = warden.authenticate!(opts)
sign_in(resource_name, resource)
xhr_success
else
super
end
end
# DELETE /resource/sign_out
def destroy
super
end
def xhr_success
render json: { result: true, code: 20000 }
end
def xhr_failure
render json: { result: false, code: 50008, errors: ["Login failed."] }
end
# protected
# If you have extra params to permit, append them to the sanitizer.
# def configure_sign_in_params
# devise_parameter_sanitizer.permit(:sign_in, keys: [:attribute])
# end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/mutation_type.rb
|
module Types
class MutationType < Types::BaseObject
field :new_user, mutation: Mutations::NewUser
field :update_user, mutation: Mutations::UpdateUser
field :delete_user, mutation: Mutations::DeleteUser
field :update_app_config, mutation: Mutations::UpdateAppConfig
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/city_type_spec.rb
|
require "rails_helper"
RSpec.describe "GraphQL on CityType" do
it "Should find city by name" do
city = City.all.last
query = <<~GRAPHQL
{
city(name: "#{city.name}") { id, name }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data.dig("city", "id")).to eq city.id.to_s
end
it "Should find city by ransack like query" do
city = House.find(1).city
query = <<~GRAPHQL
{
cities(q: {housesIdEq: 1}) { id, name }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["cities"][0]["name"]).to eq city.name
end
it "Should return all cities" do
city_num = City.all.size
query = <<~GRAPHQL
{
cities { id, name }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["cities"].size).to eq city_num
end
it "Should find cities with multiple conditions" do
query = <<~GRAPHQL
{
cities(q: {
m: "or",
g: [
{ name_cont: "Lon" },
{ name_cont: "Ox" },
]
}) { id, name }
}
GRAPHQL
data = Util.graphql_query(query)
expect(data["cities"].size).to eq 2
end
end
|
jerrywdlee/iuliana-challenges
|
app/models/city.rb
|
<reponame>jerrywdlee/iuliana-challenges
class City < ApplicationRecord
has_many :houses
has_many :datasets, through: :houses
validates :name, uniqueness: true
end
|
jerrywdlee/iuliana-challenges
|
app/controllers/application_controller.rb
|
class ApplicationController < ActionController::Base
# skip_before_action :verify_authenticity_token
# protect_from_forgery with: :null_session
end
|
jerrywdlee/iuliana-challenges
|
app/models/dataset.rb
|
class Dataset < ApplicationRecord
belongs_to :house
has_one :city, through: :house
validates_presence_of :label, :year, :month, :temperature, :daylight, :energy_production
scope :order_by_date, -> { order(:year, :month) }
def date_str
DataProcess.date_str(year, month)
end
end
# Columns:
# label, house_id, year, month, temperature, daylight, energy_production
|
jerrywdlee/iuliana-challenges
|
app/controllers/api_controller.rb
|
class ApiController < ApplicationController
skip_before_action :verify_authenticity_token
before_action :authenticate_user!, except: [:default_user]
def default_user
if EasySettings.default_user.show
render json: EasySettings.default_user
elsif AppConfig.general[:show_demo_user]
render json: EasySettings.demo_user
else
render json: {}
end
end
def user_info
user = current_user
render json: {
userId: user.id,
roles: user.roles,
avatar: user.img_url,
name: user.name,
}
end
def load_csv
if params[:house_data_url].blank? || params[:dataset_url].blank?
render(json: { error: "url not found" }, status: 400) && return
end
case params[:key]
when "challenge-3"
begin
ActiveRecord::Base.transaction do
ActiveRecord::Base.connection
.execute("TRUNCATE TABLE cities, houses, datasets")
DataLoader.load_houses(params[:house_data_url])
DataLoader.load_cities
DataLoader.sync_cities_houses
DataLoader.load_dataset(params[:dataset_url])
render json: { result: "ok" }
end
rescue => e
render json: { error: e.message }, status: 500
end
else
render json: { error: "key not found" }, status: 400
end
end
def create_user
unless current_user.roles.include?("admin")
render(json: { error: "Only Admin User Are Allowed." }, status: 400) && return
end
begin
ActiveRecord::Base.transaction do
user = User.new(
email: params[:email],
password: params[:password],
password_confirmation: params[:password],
roles: params[:roles],
)
user.save!
render json: {
id: user.id,
roles: user.roles,
avatar: user.img_url,
name: user.name,
}
end
rescue => e
render json: { error: e.message }, status: 500
end
end
end
|
jerrywdlee/iuliana-challenges
|
spec/models/dataset_spec.rb
|
<filename>spec/models/dataset_spec.rb
require "rails_helper"
RSpec.describe Dataset, type: :model do
before do
@dataset = Dataset.first
end
it "Should find a Dataset" do
expect(@dataset).to be_truthy
end
it "Should have relations on city" do
expect(@dataset.city).to be_truthy
end
it "Should have relations on house" do
expect(@dataset.house).to be_truthy
end
it "Should have `date_str` year-month string" do
expect(@dataset.date_str).to be_truthy
date = Date.parse("#{@dataset.date_str}-1")
expect(date).to be_truthy
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/date_time_type.rb
|
<gh_stars>0
module Types
class DateTimeType < Types::BaseScalar
description "ActiveRecord::Type::DateTime"
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/mutations/update_app_config.rb
|
module Mutations
class UpdateAppConfig < BaseMutation
description "Update application configs"
# return fields
field :app_configs, Types::AppConfigType, null: false
# arguments
argument :app_configs, Types::AppConfigInputType, required: true,
description: "App configs, Partial update availble"
# resolve method
def resolve(app_configs:)
Util.auth_user_graphql(context[:current_user])
app_conf = app_configs.to_h
app_conf.each do |fld, val|
value_origin = AppConfig.try(fld)
next if value_origin.nil? || val.blank?
value_to_update = value_origin.is_a?(Hash) ?
value_origin.deep_merge(val) : val
method_name = "#{fld}="
AppConfig.try(method_name, value_to_update)
end
{ app_configs: AppConfig }
end
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/data_series_type_spec.rb
|
<reponame>jerrywdlee/iuliana-challenges
require "rails_helper"
RSpec.describe "GraphQL on DataSeriesType" do
it "Should get date_labels" do
query = <<~GRAPHQL
{
dataSeries { dateLabels }
}
GRAPHQL
data = Util.graphql_query(query)["dataSeries"]
expect(data["dateLabels"].size).to be > 0
end
it "Should get house_energy_prod" do
query = <<~GRAPHQL
{
dataSeries { houseEnergyProd }
}
GRAPHQL
city_name = City.last.name
data = Util.graphql_query(query)["dataSeries"]
expect(data["houseEnergyProd"][city_name].size).to be > 0
end
it "Should get person_energy_prod" do
query = <<~GRAPHQL
{
dataSeries { personEnergyProd }
}
GRAPHQL
city_name = City.last.name
data = Util.graphql_query(query)["dataSeries"]
expect(data["personEnergyProd"][city_name].size).to be > 0
end
it "Should get daylight and temperature" do
query = <<~GRAPHQL
{
dataSeries { daylight, temperature }
}
GRAPHQL
city_name = City.last.name
data = Util.graphql_query(query)["dataSeries"]
expect(data["daylight"][city_name].size).to be > 0
expect(data["temperature"][city_name].size).to be > 0
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/dataset_type.rb
|
module Types
class DatasetType < Types::BaseObject
field :id, ID, null: false
field :label, Integer, null: false
field :house_id, Integer, null: false
field :house, Types::HouseType, null: false
field :city, Types::CityType, null: false
field :year, Integer, null: false
field :month, Integer, null: false
field :date_str, String, null: false
field :temperature, Float, null: false
field :daylight, Float, null: false
field :energy_production, Integer, null: false
end
end
|
jerrywdlee/iuliana-challenges
|
spec/models/city_spec.rb
|
require "rails_helper"
RSpec.describe City, type: :model do
before do
@city = City.first
end
it "Should find a city" do
expect(@city).to be_truthy
end
it "Should have relations on houses" do
expect(@city.houses).to be_truthy
end
it "Should have relations on datasets" do
expect(@city.datasets).to be_truthy
end
end
|
jerrywdlee/iuliana-challenges
|
spec/controllers/api_controller_spec.rb
|
require "rails_helper"
RSpec.describe ApiController, type: :controller do
it "Create dummy users" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@admin_user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["admin"]
})
email = "<EMAIL>"
@normal_user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>
})
expect(@admin_user).to be_truthy
expect(@normal_user).to be_truthy
end
describe "Normal API" do
it "Should get default_user" do
get :default_user
res = JSON.parse(response.body)
expect(response).to have_http_status(:success)
expect(res["email"]).to eq EasySettings.default_user.email
end
it "Should get demo_user" do
# Stub for EasySettings.default_user.show
allow(EasySettings.default_user).to receive(:show).and_return(false)
AppConfig.general[:show_demo_user] = true
get :default_user
res = JSON.parse(response.body)
expect(response).to have_http_status(:success)
expect(res["email"]).to eq EasySettings.demo_user.email
end
it "Should not get any user" do
allow(EasySettings.default_user).to receive(:show).and_return(false)
AppConfig.general[:show_demo_user] = false
get :default_user
res = JSON.parse(response.body)
expect(response).to have_http_status(:success)
expect(res["email"]).to be_falsey
end
end
describe "Secured API" do
before do
@user = User.admin.first
sign_in @user
end
context "api#user_info" do
it "Should return current_user" do
get :user_info
res = JSON.parse(response.body)
expect(response).to have_http_status(:success)
expect(res["roles"]).to eq ["admin"]
end
end
context "api#load_csv" do
it "Should raise error if no url" do
post :load_csv
res = JSON.parse(response.body)
expect(response).to have_http_status(400)
expect(res["error"]).to be_truthy
end
it "Should raise error if no key" do
post :load_csv, params: {
house_data_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/house_data.csv",
dataset_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/dataset_50.csv",
}
res = JSON.parse(response.body)
expect(response).to have_http_status(400)
expect(res["error"]).to be_truthy
end
it "Should load challenge-3 data csv from URL" do
post :load_csv, params: {
key: "challenge-3",
house_data_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/house_data.csv",
dataset_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/dataset_50.csv",
}
res = JSON.parse(response.body)
expect(response).to have_http_status(:success)
expect(res["result"]).to eq "ok"
end
end
context "api#create_user" do
it "Should raise error if not admin" do
@normal_user = User.where(email: "<EMAIL>").first
sign_in @normal_user
post :create_user
res = JSON.parse(response.body)
expect(response).to have_http_status(400)
expect(res["error"]).to be_truthy
end
it "Should create a user" do
expect {
post :create_user, params: {
email: "<EMAIL>",
password: "<PASSWORD>",
roles: ["observer"],
}
}.to change(User, :count).by(1)
expect(User.find_by(email: "<EMAIL>")).to be_truthy
end
end
end
end
|
jerrywdlee/iuliana-challenges
|
spec/access/graphiql_spec.rb
|
require "rails_helper"
require "devise/jwt/test_helpers"
headers = {
"Accept" => "application/json",
"Content-Type" => "application/json",
}
RSpec.describe "Test GraphiQL Page Shown", type: :request do
it "Create dummy users" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@admin_user = User.create({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["admin"]
})
end
it "Should show GraphiQL" do
@user = User.admin.first
AppConfig.general = {
allow_graphiql: true,
}
auth_headers = Devise::JWT::TestHelpers.auth_headers(headers, @user)
get graphiql_rails_path(auth: URI.encode(auth_headers["Authorization"]))
expect(response).to be_successful
end
it "Should Reject GraphiQL if allow_graphiql: false" do
@user = User.admin.first
AppConfig.general = {
allow_graphiql: false,
}
auth_headers = Devise::JWT::TestHelpers.auth_headers(headers, @user)
expect {
get graphiql_rails_path(auth: URI.encode(auth_headers["Authorization"]))
}.to raise_error(ActionController::BadRequest)
end
it "Should Reject GraphiQL if no `auth` field" do
expect {
get graphiql_rails_path
}.to raise_error(ActionController::BadRequest)
end
end
|
jerrywdlee/iuliana-challenges
|
app/models/house.rb
|
<reponame>jerrywdlee/iuliana-challenges
class House < ApplicationRecord
has_many :datasets
belongs_to :city, optional: true
validates_presence_of :firstname, :lastname, :city_text, :num_of_people, :has_child
enum has_child: {
Yes: true,
No: false,
}
def has_child_bool
has_child_before_type_cast
end
def full_name
"#{firstname} #{lastname}"
end
end
# Columns:
# firstname, lastname, city_text, city_id, num_of_people, has_child
|
jerrywdlee/iuliana-challenges
|
config/routes.rb
|
Rails.application.routes.draw do
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root "contents#index"
devise_for :users, controllers: {
sessions: "users/sessions",
}
resources :api, only: [] do
collection do
get :default_user
get :user_info
post :load_csv
post :create_user
end
end
# GraphiQL
# See `config/initializers/graphiql.rb`
constraints GraphiQLAuthenticate.new do
mount GraphiQL::Rails::Engine, at: "/graphiql", graphql_path: "/graphql"
end
post "/graphql", to: "graphql#execute"
end
|
jerrywdlee/iuliana-challenges
|
config/initializers/graphiql.rb
|
<reponame>jerrywdlee/iuliana-challenges<filename>config/initializers/graphiql.rb
# See https://github.com/rmosolgo/graphiql-rails
GraphiQL::Rails.config.headers['Authorization'] = -> (context) {
# this `context` is a `view_context`
# allow iframe use
context.headers['X-Frame-Options'] = 'ALLOWALL'
# eg: localhost:18000/graphiql?auth=Bearer%20XXXXXXXXXXX
# JWT Token: "<KEY> <PASSWORD>"
context.params['auth']
}
class GraphiQLAuthenticate
def matches?(request)
return true if Rails.env.development?
auth_code = request.params['auth']
if !AppConfig.general[:allow_graphiql] || auth_code.blank?
# will cause http status 400
raise ActionController::BadRequest
end
_, token = URI.decode(auth_code).split(' ')
jwt_info, _ = JWT.decode(token, Devise::JWT.config[:secret])
# expired_at = Time.at(jwt_info['exp']) # `JWT.decode` will do tihs
# raise JWT::ExpiredSignature if expired_at < Time.now
# Active This for admin check, etc.
# user = User.find_by(jwt_info.symbolize_keys.slice(:jti, :id))
# raise ActiveRecord::RecordNotFound if user.blank? || !user.admin?
true
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/mutations/update_app_config_spec.rb
|
require "rails_helper"
RSpec.describe "GraphQL Mutation on AppConfigs" do
it "Create dummy users" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@admin_user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["admin"]
})
email = "<EMAIL>"
@normal_user = User.create!({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["observer"]
})
expect(@admin_user).to be_truthy
expect(@normal_user).to be_truthy
end
it "Should update challenge2" do
context = { current_user: User.admin.first }
dummy_url = "https://example.org/total_watt_url.csv"
query = <<~GRAPHQL
mutation {
updateAppConfig(
input: {
appConfigs: { challenge2: { totalWattUrl: "#{dummy_url}" } }
}
) { appConfigs { challenge2 { totalWattUrl } } }
}
GRAPHQL
data = Util.graphql_query(query, context: context)
.dig("updateAppConfig", "appConfigs")
expect(data["challenge2"]["totalWattUrl"]).to eq dummy_url
end
it "Should update challenge3 partial" do
context = { current_user: User.admin.first }
dummy_url = "https://example.com/house_data_url.csv"
query = <<~GRAPHQL
mutation {
updateAppConfig(
input: {
appConfigs: { challenge3: { houseDataUrl: "#{dummy_url}" } }
}
) { appConfigs { challenge3 { houseDataUrl, datasetUrl } } }
}
GRAPHQL
data = Util.graphql_query(query, context: context)
.dig("updateAppConfig", "appConfigs")
expect(data["challenge3"]["houseDataUrl"]).to eq dummy_url
expect(data["challenge3"]["datasetUrl"]).to eq AppConfig.challenge3[:dataset_url]
end
it "Should throw error if user is not admin" do
context = { current_user: User.observer.first }
query = <<~GRAPHQL
mutation {
updateAppConfig( input: { appConfigs: {} }) {
appConfigs { challenge2 { totalWattUrl } }
}
}
GRAPHQL
expect { Util.graphql_query(query) }.to raise_error "GraphQL: Need admin user"
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/mutations/update_user.rb
|
module Mutations
class UpdateUser < BaseMutation
description "Update User Info"
# return fields
field :user, Types::UserType, null: false
# define arguments
argument :id, ID, required: true,
description: "Find User by ID for Update"
argument :user, Types::UpdateUserType, required: true,
description: "User Info"
# define resolve method
def resolve(id:, user:)
Util.auth_user_graphql(context[:current_user])
user_params = user.to_h
if user_params[:password].present?
user_params[:password_confirmation] = user_params[:password]
end
ActiveRecord::Base.transaction do
@user = User.find(id)
alt_login_flg = user_params[:email].present? || user_params[:password].present?
default_user_flg = @user.email == EasySettings.default_user.email
demo_user_flg = @user.email == EasySettings.demo_user.email
@user.assign_attributes(user_params)
if default_user_flg && (alt_login_flg || !@user.admin?)
msg = "GraphQL: Can not alt default user's email, password or roles"
raise GraphQL::ExecutionError, msg
end
if demo_user_flg && alt_login_flg
msg = "GraphQL: Can not alt demo user's email or password"
raise GraphQL::ExecutionError, msg
end
@user.save!
end
{ user: @user }
end
end
end
|
jerrywdlee/iuliana-challenges
|
app/lib/util.rb
|
require "json"
class Util
class << self
def form_ransack_params(params)
param_org = params.deep_dup
param_res = {}
if param_org.instance_of?(String)
param_org = JSON.parse(param_org)
end
param_org.each do |key, val|
key = key.to_s.underscore
if (key == "s" || key == "sorts") && val.instance_of?(String)
param_res[key] = val.squish.underscore
elsif (key == "s" || key == "sorts") && val.instance_of?(Array)
param_res[key] = val.map { |v| v.squish.underscore }
elsif (key == "g" || key == "groupings") && (val.instance_of?(Hash) || val.instance_of?(Array))
if val.instance_of?(Hash)
param_res[key] = val.values.map { |v| Util.form_ransack_params(v) }
else
param_res[key] = val.map { |v| Util.form_ransack_params(v) }
end
else
param_res[key] = val
end
end
param_res
end
def graphql_query(query_string, raise_err = true, context: {})
result = CaesarisSchema.execute(query_string, context: context)
if result["errors"].present? && raise_err
raise result["errors"][0]["message"]
else
result["data"]
end
end
def auth_binary(code, mask)
_code = code.is_a?(String) ? code.to_i(2) : code.to_i
_mask = mask.is_a?(String) ? mask.to_i(2) : mask.to_i
(_code & _mask) == _mask
end
def set_binary_codes(mask_list)
mask_list.reduce(0) do |result, mask|
result |= mask.is_a?(String) ? mask.to_i(2) : mask.to_i
end
end
def auth_user_graphql(user, role = "admin")
return true if Rails.env.development?
if user.blank? || !user.try("#{role}?")
raise GraphQL::ExecutionError.new("GraphQL: Need admin user")
end
end
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/mutations/update_user_spec.rb
|
<filename>spec/graphql/mutations/update_user_spec.rb
require "rails_helper"
RSpec.describe "Create and update user by GraphQL Mutation" do
it "Create dummy users" do
email = "<EMAIL>"
pass = "<PASSWORD>"
@admin_user = User.create({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["admin"]
})
email = "<EMAIL>"
@normal_user = User.create({
email: email, password: <PASSWORD>,
password_confirmation: <PASSWORD>,
roles: ["observer"]
})
end
context "Create user" do
email = "<EMAIL>"
role = "observer"
query = <<~GRAPHQL
mutation {
newUser (
input: {
user: {
email: "#{email}",
password: "<PASSWORD>",
roles: ["#{role}"]
}
}) {
user {
id, email, name, roles, imgUrl
}
}
}
GRAPHQL
it "Should throw error if user is not admin" do
context = { current_user: User.observer.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Need admin user"
end
it "Should create new user" do
context = { current_user: User.admin.first }
data = nil
expect {
data = Util.graphql_query(query, context: context).dig("newUser", "user")
}.to change { User.count }.by(+1)
expect(data["email"]).to eq email
expect(data["roles"]).to include role
expect(User.find_by(email: email)).to be_truthy
end
it "Should throw error if user email doubled" do
context = { current_user: User.admin.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "Validation failed: Email has already been taken"
end
end
context "Update user" do
it "Should throw error if user is not admin" do
@user = User.observer.last
roles = %w(editor observer)
query = <<~GRAPHQL
mutation {
updateUser (
input: {
id: #{@user.id}
user: { roles: #{roles} }
}) {
user {
id, email, name, roles
}
}
}
GRAPHQL
context = { current_user: User.observer.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Need admin user"
end
it "Should update user" do
@user = User.observer.last
roles = %w(editor observer)
query = <<~GRAPHQL
mutation {
updateUser (
input: {
id: #{@user.id}
user: {
password: "<PASSWORD>"
roles: #{roles}
}
}) {
user {
id, email, name, roles
}
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = Util.graphql_query(query, context: context)
.dig("updateUser", "user")
user_after = User.find(@user.id)
expect(data["email"]).to eq user_after.email
expect(data["name"]).to eq user_after.name
user_after.roles.each do |role|
expect(data["roles"]).to include role
end
end
it "Should not update default_user" do
@default_user = User.find_or_initialize_by(
email: EasySettings.default_user.email,
)
if @default_user.id.blank?
password = EasySettings.default_user.password
@default_user.assign_attributes(
password: password,
password_confirmation: password,
roles: [EasySettings.user_roles.keys.first],
)
@default_user.save
end
@default_user = User.find_by(
email: EasySettings.default_user.email,
)
new_email = "<EMAIL>"
query = <<~GRAPHQL
mutation {
updateUser (
input: {
id: #{@default_user.id}
user: { email: "#{new_email}" }
}) {
user {
id, email, name, roles
}
}
}
GRAPHQL
context = { current_user: User.admin.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Can not alt default user's email, password or roles"
end
it "Should not update demo_user" do
@demo_user = User.find_or_initialize_by(
email: EasySettings.demo_user.email,
)
if @demo_user.id.blank?
password = <PASSWORD>Settings.demo_user.password
@demo_user.assign_attributes(
password: password,
password_confirmation: password,
)
@demo_user.save
end
@demo_user = User.find_by(
email: EasySettings.demo_user.email,
)
new_email = "<EMAIL>"
query = <<~GRAPHQL
mutation {
updateUser (
input: {
id: #{@demo_user.id}
user: { email: "#{new_email}" }
}) {
user {
id, email, name, roles
}
}
}
GRAPHQL
context = { current_user: User.admin.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Can not alt demo user's email or password"
end
end
context "Delete user" do
it "Should throw error if user is not admin" do
query = <<~GRAPHQL
mutation {
deleteUser(input: {id: 3}) {
user { id email }
}
}
GRAPHQL
context = { current_user: User.observer.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Need admin user"
end
it "Should not delete default_user" do
@default_user = User.find_by(
email: EasySettings.default_user.email,
)
query = <<~GRAPHQL
mutation {
deleteUser(input: {id: #{@default_user.id}}) {
user { id email }
}
}
GRAPHQL
context = { current_user: User.admin.first }
expect {
Util.graphql_query(query, context: context)
}.to raise_error "GraphQL: Can not delete demo or default user"
end
it "Should delete user" do
@user = User.find_by(
email: "<EMAIL>",
)
query = <<~GRAPHQL
mutation {
deleteUser(input: {id: #{@user.id}}) {
user { id email }
}
}
GRAPHQL
context = { current_user: User.admin.first }
data = nil
expect {
data = Util.graphql_query(query, context: context)
.dig("deleteUser", "user")
}.to change { User.count }.by(-1)
expect(data["email"]).to eq @user.email
user_after = User.find_by(
email: "<EMAIL>",
)
expect(user_after).to be_falsey
end
end
end
|
jerrywdlee/iuliana-challenges
|
spec/graphql/house_type_spec.rb
|
<filename>spec/graphql/house_type_spec.rb
require "rails_helper"
RSpec.describe "GraphQL on HouseType" do
it "Should exec house query" do
query = <<~GRAPHQL
{
house(id: 1) { firstname, lastname }
}
GRAPHQL
data = Util.graphql_query(query)
house = House.find(1)
expect(data.dig("house", "firstname")).to eq house.firstname
expect(data.dig("house", "lastname")).to eq house.lastname
end
it "Should find city from house query" do
query = <<~GRAPHQL
{
house(id: 2) { city { name } }
}
GRAPHQL
data = Util.graphql_query(query)
city = House.find(2).city
expect(data.dig("house", "city", "name")).to eq city.name
end
it "Should find houses by city" do
city = City.all.first
query = <<~GRAPHQL
{
houses(city: "#{city.name}") { id, firstname }
}
GRAPHQL
data = Util.graphql_query(query)
house_num = city.houses.size
expect(data["houses"].size).to eq house_num
end
it "Should find houses by ransack like query" do
query = <<~GRAPHQL
{
houses(q: { idEq: 3 }) { id, firstname }
}
GRAPHQL
data = Util.graphql_query(query)
house = House.find(3)
expect(data["houses"][0]["firstname"]).to eq house.firstname
end
it "Should find houses by ransack in json" do
city = City.first
houses = city.houses
query = <<~GRAPHQL
{
houses(q: "{ \\"cityNameCont\\": \\"#{city.name}\\"}") {
id, fullName, city { name }
}
}
GRAPHQL
data = Util.graphql_query(query)
house = House.find(3)
expect(data["houses"].size).to eq houses.size
expect(data.dig("houses", 0, "city", "name")).to eq city.name
end
end
|
jerrywdlee/iuliana-challenges
|
spec/models/house_spec.rb
|
require "rails_helper"
RSpec.describe House, type: :model do
before do
@house = House.first
end
it "Should find a house" do
expect(@house).to be_truthy
end
it "Should have relations on city" do
expect(@house.city).to be_truthy
end
it "Should have relations on datasets" do
expect(@house.datasets).to be_truthy
end
it "Should have `has_child` as enum and boolean" do
@house = House.where(has_child: true).first
expect(@house.has_child).to eq "Yes"
expect(@house.has_child_bool).to eq true
end
it "Should return full name" do
@house = House.last
expect(@house.full_name).to include @house.firstname
expect(@house.full_name).to include @house.lastname
end
end
|
jerrywdlee/iuliana-challenges
|
db/migrate/20190612075751_create_datasets.rb
|
class CreateDatasets < ActiveRecord::Migration[5.2]
def change
create_table :datasets do |t|
t.integer :label
t.integer :house_id
t.integer :year
t.integer :month
t.float :temperature
t.float :daylight
t.integer :energy_production
t.timestamps
end
add_index :datasets, :label
add_index :datasets, :house_id
add_index :datasets, :year
add_index :datasets, :month
add_index :datasets, :temperature
add_index :datasets, :daylight
add_index :datasets, :energy_production
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/app_config_type.rb
|
<filename>app/graphql/types/app_config_type.rb
module Types
class GeneralType < Types::BaseObject
field :allow_graphiql, Boolean, null: false
field :show_demo_user, Boolean, null: false
end
class Challenge2Type < Types::BaseObject
field :total_watt_url, String, null: false
end
class Challenge3Type < Types::BaseObject
field :house_data_url, String, null: false
field :dataset_url, String, null: false
end
class AppConfigType < Types::BaseObject
field :general, GeneralType, null: false
field :challenge2, Challenge2Type, null: false
field :challenge3, Challenge3Type, null: false
field :field_keys, [String], null: false
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/data_series_type.rb
|
<filename>app/graphql/types/data_series_type.rb
module Types
class DataSeriesType < Types::BaseObject
field :date_labels, [String], null: false
field :house_energy_prod, GraphQL::Types::JSON, null: false
field :person_energy_prod, GraphQL::Types::JSON, null: false
field :temperature, GraphQL::Types::JSON, null: false
field :daylight, GraphQL::Types::JSON, null: false
end
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/new_user_type.rb
|
module Types
class NewUserType < Types::UpdateUserType
argument :email, String, required: true
argument :password, String, required: true
end
end
|
jerrywdlee/iuliana-challenges
|
lib/tasks/build_admin.rake
|
namespace :vue_admin do
task clean: :environment do
system('echo "Clean up before assets:precompile"')
public_path = Rails.root.join('public')
cmd = []
cmd << "rm -rf #{public_path.join('precache-manifest.*')}"
cmd << "rm -rf #{public_path.join('css')}"
cmd << "rm -rf #{public_path.join('js')}"
cmd << "rm -rf #{public_path.join('img')}"
cmd << "rm -rf #{public_path.join('fonts')}"
system(cmd.join(" && "))
end
task build: :environment do
admin_path = Rails.root.join('vendor/admin')
public_path = Rails.root.join('public')
cmd = []
cmd << "cd #{admin_path} && yarn build"
cmd << "cp -Rf #{admin_path.join('dist/*')} #{public_path}"
system(cmd.join(" && "))
end
end
Rake::Task['assets:precompile'].enhance ['vue_admin:clean'] do
Rake::Task['vue_admin:build'].invoke
end
|
jerrywdlee/iuliana-challenges
|
app/graphql/types/query_type.rb
|
<gh_stars>0
module Types
class QueryType < Types::BaseObject
# Add root-level fields here.
# They will be entry points for queries on your schema.
# TODO: Fix vulnerability of circular reference
# Eg: `house(id: 1) { datasets { house { datasets { house { id } } } } }`
field :house, Types::HouseType, null: true do
description "Find a house by ID"
argument :id, ID, required: true,
description: "House ID"
end
def house(id:)
House.find(id)
end
field :houses, [HouseType], null: true do
description "Find houses by ransack"
argument :city, String, required: false,
description: "City name for house"
argument :q, BaseScalar, required: false,
description: "Ransack params"
end
def houses(q: nil, city: nil)
if q.present?
# when search on `has_child`, use `{hasChildEq: false}`
@q = House.includes(:datasets, :city).joins(:city, :datasets)
.ransack(Util.form_ransack_params(q))
@q.sorts = "id asc" if @q.sorts.blank?
@q.result.distinct
elsif city.present?
House.joins(:city).where(cities: { name: city.capitalize }).order(:id)
else
House.all.order(:id)
end
end
field :city, CityType, null: true do
description "Get city by city name"
argument :name, String, required: true,
description: "City name"
end
def city(name:)
City.find_by(name: name.capitalize)
end
field :cities, [CityType], null: true do
description "Find cities by ransack"
argument :q, BaseScalar, required: false,
description: "Ransack params"
end
def cities(q: nil)
if q.present?
@q = City.includes(:houses, :datasets).joins(:houses, :datasets)
.ransack(Util.form_ransack_params(q))
@q.sorts = "id asc" if @q.sorts.blank?
@q.result(distinct: true)
else
City.all.order(:id)
end
end
field :datasets, [DatasetType], null: true do
description "Find datasets by ransack"
argument :q, BaseScalar, required: false,
description: "Ransack params"
argument :page, Integer, required: false,
description: "Page number"
argument :per, Integer, required: false,
description: "Items per page"
end
def datasets(q: nil, page: nil, per: nil)
if q.present?
# when search at `cities`, use `{house_city_name_cont: "London"}`
@q = Dataset.includes(house: :city).joins(house: :city)
.ransack(Util.form_ransack_params(q))
@q.sorts = "id asc" if @q.sorts.blank?
if per.nil? && page.nil?
@q.result(distinct: true)
else
@q.result(distinct: true).page(page).per(per)
end
else
Dataset.all.order(:id)
end
end
field :data_series, DataSeriesType, null: true do
description "Get Data Series"
end
def data_series
DataSeriesService.new
end
field :app_configs, AppConfigType, null: true do
description "Get Application Configs"
end
def app_configs
AppConfig
end
field :users, [UserType], null: true do
description "Find users by ransack"
argument :q, BaseScalar, required: false,
description: "Ransack params"
end
def users(q: nil)
Util.auth_user_graphql(context[:current_user])
if q.present?
@q = User.ransack(Util.form_ransack_params(q))
@q.sorts = "id asc" if @q.sorts.blank?
@q.result(distinct: true)
else
User.all.order(:id)
end
end
end
end
|
jerrywdlee/iuliana-challenges
|
app/lib/data_loader.rb
|
require "csv"
require "open-uri"
class DataLoader
class << self
### For Challenge 3 ###
def load_houses(uri)
file = load_file_as_stream(uri)
ActiveRecord::Base.transaction do
CSV.new(file, **csv_options).each do |line|
# CSV headers:
# :id, :firstname, :lastname, :city, :num_of_people, :has_child
house_params = line.to_hash
House.create!(
**house_params.except(:id, :city),
city_text: house_params[:city],
# House ID is used by Datasets, must use csv's ID
id: house_params[:id], # TODO: deal with IDs when adding new files
)
end
end
end
def load_cities
city_texts = House.distinct.pluck(:city_text)
ActiveRecord::Base.transaction do
city_texts.each do |city_text|
City.find_or_create_by!(name: city_text)
end
end
end
def sync_cities_houses
cities = City.all
ActiveRecord::Base.transaction do
cities.each do |city|
houses = House.where(city_text: city.name)
houses.update_all(city_id: city.id)
end
end
end
def load_dataset(uri)
file = load_file_as_stream(uri)
ActiveRecord::Base.transaction do
CSV.new(file, **csv_options).each do |line|
# CSV headers:
# :id, :label, :house, :year, :month, :temperature, :daylight, :energy_production
dataset_params = line.to_hash
Dataset.create!(
**dataset_params.except(:id, :house),
house_id: dataset_params[:house],
id: dataset_params[:id], # TODO: deal with IDs when adding new files
)
end
end
end
def load_file_as_stream(uri)
file = nil
if uri !~ /^https?|^ftp\:\/\//
file = File.open(uri, "r")
else
file = open(uri)
end
file
end
def csv_options
{
headers: :first_row,
header_converters: ->(h) { h.underscore.to_sym },
converters: :all,
skip_blanks: true,
}
end
end
end
|
jerrywdlee/iuliana-challenges
|
db/migrate/20190612070709_create_houses.rb
|
<filename>db/migrate/20190612070709_create_houses.rb<gh_stars>0
class CreateHouses < ActiveRecord::Migration[5.2]
def change
create_table :houses do |t|
t.string :firstname
t.string :lastname
t.string :city_text
t.integer :city_id
t.integer :num_of_people
t.boolean :has_child
t.timestamps
end
add_index :houses, :firstname
add_index :houses, :lastname
add_index :houses, :city_text
add_index :houses, :city_id
add_index :houses, :num_of_people
add_index :houses, :has_child
end
end
|
jerrywdlee/iuliana-challenges
|
app/models/app_config.rb
|
<reponame>jerrywdlee/iuliana-challenges<gh_stars>0
# RailsSettings Model
class AppConfig < RailsSettings::Base
cache_prefix { "v1" }
@field_keys = []
def self.field(key, **opts)
@field_keys << key.to_sym
super(key, **opts)
end
def self.field_keys
@field_keys
end
field :general, type: :hash, default: {
# allow_graphiql: true
allow_graphiql: Rails.env.development? || ENV["ALLOW_GRAPHIQL"].present?,
show_demo_user: Rails.env.development? || ENV["SHOW_DEMO_USER"].present?,
}
field :challenge2, type: :hash, default: {
total_watt_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge2/data/total_watt.csv",
}
field :challenge3, type: :hash, default: {
house_data_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/house_data.csv",
dataset_url: "https://raw.githubusercontent.com/jerrywdlee/EnergyDataSimulationChallenge/master/challenge3/data/dataset_50.csv",
}
end
|
ixti/redis-lockers
|
lib/redis/lockers/lock.rb
|
# frozen_string_literal: true
require "securerandom"
require "concurrent/utility/monotonic_time"
require "redis/prescription"
class Redis
module Lockers
# Single lock instance.
class Lock
LOCK_SCRIPT = Redis::Prescription.read("#{__dir__}/scripts/lock.lua")
private_constant :LOCK_SCRIPT
UNLOCK_SCRIPT = Redis::Prescription.read("#{__dir__}/scripts/unlock.lua")
private_constant :UNLOCK_SCRIPT
# Create a new Lock instance.
# @param key [#to_s] Resource name
# @param ttl [#to_i] TTL in milliseconds
def initialize(key, ttl:)
@key = key.to_s
@ttl = ttl.to_i
@drift = @ttl * 0.01 + 2.0
@nonce = SecureRandom.uuid
end
# Attempts to acquire lease.
#
# @param redis [Redis] Redis client
# @return [Boolean] whenever lock was acquired or not.
def acquire(redis)
deadline = timestamp + @ttl - @drift
success = LOCK_SCRIPT.eval(redis, {
:keys => [@key],
:argv => [@nonce, @ttl]
})
success && timestamp <= deadline || false
end
# Release acquired lease.
#
# @param redis [Redis] Redis client
# @return [void]
def release(redis)
UNLOCK_SCRIPT.eval(redis, :keys => [@key], :argv => [@nonce])
end
private
# Returns monotonic timestamp.
# @return [Float]
def timestamp
Concurrent.monotonic_time
end
end
end
end
|
ixti/redis-lockers
|
spec/redis/lockers_spec.rb
|
# frozen_string_literal: true
require "redis/lockers"
RSpec.describe Redis::Lockers do
describe ".acquire" do
it "yields control when lock was acquired" do
expect { |b| described_class.acquire(REDIS, :xxx, :ttl => 7000, &b) }.
to yield_control
end
it "releases lock lease even if block failed" do
begin
described_class.acquire(REDIS, :xxx, :ttl => 123_456) { raise "boom" }
rescue
nil # do nothing
end
expect { |b| described_class.acquire(REDIS, :xxx, :ttl => 7000, &b) }.
to yield_control
end
it "does not yields control if lock can't be acquired" do
Redis::Lockers::Lock.new(:xxx, :ttl => 123_456).acquire(REDIS)
expect { |b| described_class.acquire(REDIS, :xxx, :ttl => 7000, &b) }.
not_to yield_control
end
end
end
|
ixti/redis-lockers
|
redis-lockers.gemspec
|
# frozen_string_literal: true
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "redis/lockers/version"
Gem::Specification.new do |spec|
spec.name = "redis-lockers"
spec.version = Redis::Lockers::VERSION
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.summary = "Yet another Redis-based lock manager."
spec.description = <<~DESCRIPTION
Simplified implementation of Redlock (distributed lock manger by antirez).
Right now it works with single Redis node only.
DESCRIPTION
spec.homepage = "https://github.com/ixti/redis-lockers"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.require_paths = ["lib"]
spec.add_runtime_dependency "concurrent-ruby", "~> 1.0"
spec.add_runtime_dependency "redis", "~> 4.0"
spec.add_runtime_dependency "redis-prescription", "~> 1.0"
spec.add_development_dependency "bundler", "~> 1.16"
spec.required_ruby_version = "~> 2.3"
end
|
ixti/redis-lockers
|
lib/redis/lockers.rb
|
<reponame>ixti/redis-lockers
# frozen_string_literal: true
require "redis/lockers/version"
require "redis/lockers/lock"
# @see https://github.com/redis/redis-rb
class Redis
# Distributed locks with Redis.
module Lockers
# Creates new lock and yields control if it was successfully acquired.
# Ensures lock is released after execution of the block.
#
# @example
#
# REDIS = Redis.new
# Redis::Lockers.acquire(REDIS, :resource_name, :ttl => 60_000) do
# # lock was successfully acquired - we're good to run our code
# end
#
# @yield [] Executes given block if lock was acquired.
# @return [Object] result of the last expression in the block
def self.acquire(redis, *args, **kwargs)
lock = Lock.new(*args, **kwargs)
yield if lock.acquire(redis)
ensure
lock&.release(redis)
end
end
end
|
ixti/redis-lockers
|
spec/redis/lockers/lock_spec.rb
|
<gh_stars>1-10
# frozen_string_literal: true
require "redis/lockers/lock"
RSpec.describe Redis::Lockers::Lock do
let(:alpha) { described_class.new(:xxx, :ttl => 123_456) }
let(:omega) { described_class.new(:xxx, :ttl => 123_456) }
describe "#acquire" do
it "returns true if lock was acquired" do
expect(alpha.acquire(REDIS)).to be true
end
it "returns false if lock is held by somebody else" do
omega.acquire(REDIS)
expect(alpha.acquire(REDIS)).to be false
end
it "returns false if lock lease acquire took longer than allowed ttl" do
expect(Concurrent).to receive(:monotonic_time).and_return(0, 123_456)
expect(alpha.acquire(REDIS)).to be false
end
end
describe "#release" do
before { alpha.acquire(REDIS) }
it "returns releases owned lease" do
alpha.release(REDIS)
expect(omega.acquire(REDIS)).to be true
end
it "returns does not releases lease owned by other lock" do
omega.release(REDIS)
expect(omega.acquire(REDIS)).to be false
end
end
end
|
ixti/redis-lockers
|
lib/redis/lockers/version.rb
|
# frozen_string_literal: true
class Redis
module Lockers
# Gem version.
VERSION = "1.1.0"
end
end
|
petertseng/adventofcode-rb-2019
|
07_amplification_circuit.rb
|
require_relative 'lib/intcode'
VERBOSE = ARGV.delete('-v')
def const_input(mem, phase, input)
ic = Intcode.new(mem, valid_ops: (1..8).to_a + [99]).continue(input: phase)
ic.continue(input: input) until ic.halted?
ic.output
end
# Assume each amplifier performs a linear mx+b transform.
# Determine m and b by running the amplifiers once, instead of once per permutation.
# Could take it even farther with dynamic programming:
# https://www.reddit.com/r/adventofcode/comments/e7q8fp/2019_day_7_part_2_c_feed_it_forward/
# But this is fast enough and I don't care.
def chain(mem, phases)
amps = phases.to_h { |phase| [phase, {
b: b = const_input(mem, phase, 0),
m: const_input(mem, phase, 1).zip(b).map { |y, bb| y - bb },
}.freeze] }.freeze
puts amps if VERBOSE
sizes = amps.values.flat_map { |a| [a[:b].size, a[:m].size] }.uniq
raise "Incompatible sizes #{sizes}" if sizes.size != 1
phases.permutation.map { |perm|
ms = perm.map { |phase| amps[phase][:m] }.transpose.flatten
bs = perm.map { |phase| amps[phase][:b] }.transpose.flatten
ms.zip(bs).reduce(0) { |signal, (m, b)| m * signal + b }
}.max
end
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
[0...5, 5...10].each { |range| puts chain(input, range.to_a) }
|
petertseng/adventofcode-rb-2019
|
24_planet_of_discord.rb
|
<reponame>petertseng/adventofcode-rb-2019
SIDE_LEN = 5
NUM_ITERS = Hash.new(200)
NUM_ITERS[1205552] = 10
# Only 4 values are important: 0, 1, 2, 3+ (dead for sure)
# that's 2 bits
BITS_PER_NEIGHBOUR_COUNT = 2
NEIGHBOUR_COUNT_MASK = (1 << BITS_PER_NEIGHBOUR_COUNT) - 1
# For deciding whether a cell is alive at the next iteration,
# precompute groups at a time, keyed by concatenation of (neighbour counts, alive bits)
# (to disable cache, just lift the `until` and replace the `until` in `grow_bugs`)
# Through experimentation, 4 was a good group size? You'd think 5, but that was slower.
GROUP_SIZE = 4
BITS_PER_NEIGHBOUR_COUNT_GROUP = BITS_PER_NEIGHBOUR_COUNT * GROUP_SIZE
NEIGHBOUR_COUNT_GROUP_MASK = (1 << BITS_PER_NEIGHBOUR_COUNT_GROUP) - 1
ALIVE_GROUP_MASK = (1 << GROUP_SIZE) - 1
GROUP_CACHE = (1 << (GROUP_SIZE * (BITS_PER_NEIGHBOUR_COUNT + 1))).times.map { |x|
ncs = (x >> GROUP_SIZE) & NEIGHBOUR_COUNT_GROUP_MASK
current_alive = x & ALIVE_GROUP_MASK
pos = 0
new_level = 0
until ncs == 0
nc = ncs & NEIGHBOUR_COUNT_MASK
now_alive = nc == 1 || nc == 2 && current_alive & 1 == 0
new_level |= 1 << pos if now_alive
ncs >>= BITS_PER_NEIGHBOUR_COUNT
current_alive >>= 1
pos += 1
end
new_level
}.freeze
def grow_bugs(grids, neigh)
# Keyed by level, each value is the concatenated neighbour counts of all cells.
# Takes advantage of the fact that it's only possible to spread one level above and below.
# -1 will be at the end of the array, using negative indexing.
# Slightly faster than using Hash.new(0) and lmin, lmax = neigh_count.keys.minmax
neigh_count = Array.new(grids.size + 2, 0)
grids.each_with_index { |grid, level|
neigh.each { |v|
masked = grid & v[:mask]
v[:neigh][masked].each { |dlevel, neigh_contribs|
existing = neigh_count[level + dlevel]
if existing == 0
neigh_count[level + dlevel] = neigh_contribs
else
# Saturating add on each group of two bits.
# These formulae were determined by examining all 16 possibilities,
# and determining formulae by hand.
a = existing & 0xaaaaaaaaaaaaaaa
b = existing & 0x555555555555555
c = neigh_contribs & 0xaaaaaaaaaaaaaaa
d = neigh_contribs & 0x555555555555555
bd = b & d
# upper_bits is pretty much exactly like an adder.
upper_bits = a | c | (bd << 1)
alow = a >> 1
clow = c >> 1
# lower_bits would normally be like an adder (just b ^ d),
# but also adds the following:
# alow & clow, so that 10+10 == 11
# bd & (alow | clow), so that 11+01 == 01+11 == 11
lower_bits = (b ^ d) | (alow & clow) | (bd & (alow | clow))
# Alternative:
# Only cases where lower bit is 0: 00+00, 00+10, 01+01, 10+00.
# So lower bit is 1 if b | d, except if it's 01+01,
# And also need to make 10+10 == 11, which alow & clow will do.
#lower_bits = ((b | d) & (alow | clow | ~bd)) | (alow & clow)
neigh_count[level + dlevel] = upper_bits | lower_bits
end
}
}
}
lmin = -1
lmin += 1 until neigh_count[lmin] != 0
lmax = grids.size
lmax -= 1 until neigh_count[lmax] != 0
# Note this doesn't preserve indices, but it doesn't matter.
# Careful to preserve empty levels, however.
(lmin..lmax).map { |level|
ncs = neigh_count[level]
current_alive = (0...grids.size).cover?(level) ? grids[level] : 0
pos = 0
new_level = 0
until ncs == 0
nc = ncs & NEIGHBOUR_COUNT_GROUP_MASK
now_alive = current_alive & ALIVE_GROUP_MASK
new_level |= GROUP_CACHE[(nc << GROUP_SIZE) | now_alive] << pos
ncs >>= BITS_PER_NEIGHBOUR_COUNT_GROUP
current_alive >>= GROUP_SIZE
pos += GROUP_SIZE
end
new_level
}
end
# the neighbours of each individual position
# Hash[position] => Array[Tuple[delta_depth, position]]
def neigh_map(side_len, recursive: false)
mid_coord = side_len / 2
in_bounds = ->*ns { ns.all? { |n| (0...side_len).cover?(n) } }
directions = [
[-1, 0, ->nx { [1, side_len - 1, nx] }],
[1, 0, ->nx { [1, 0, nx] }],
[0, -1, ->ny { [1, ny, side_len - 1] }],
[0, 1, ->ny { [1, ny, 0] }],
].map(&:freeze).freeze
(side_len * side_len).times.map { |pos|
y, x = pos.divmod(side_len)
unless recursive
next directions.filter_map { |dy, dx, _|
ny = y + dy
nx = x + dx
[0, ny * side_len + nx] if in_bounds[ny, nx]
}
end
directions.flat_map { |dy, dx, inner_neigh|
ny = y + dy
nx = x + dx
if ny == mid_coord && nx == mid_coord
side_len.times.map(&inner_neigh)
elsif in_bounds[ny, nx]
[[0, ny, nx]]
else
[[-1, mid_coord + dy, mid_coord + dx]]
end
}.map { |d, ny, nx| [d, ny * side_len + nx] }
}.freeze
end
# Array[Group]
# Group = {
# mask: Int
# neigh: Hash[Int => Array[Tuple[delta_depth, neighbour_counts]]]
# }
# To compute the neighbour contributions of a group,
# mask the grid bitfield with the group's mask,
# then index into the neigh map.
# Multiple neighbour counts are to be combined with saturating add.
def grouped_neigh_map(side_len, recursive: false)
neigh_map = neigh_map(side_len, recursive: recursive)
mid_coord = side_len / 2
groups = Hash.new { |h, k| h[k] = [] }
(side_len * side_len).times { |pos|
# This seems to be a good division,
# balancing between not having any one group be too large
# vs not having to do as many neighbour count saturating additions.
# Current sizes are 4, 6, 6, 5, 4
# It does perform slightly better than the obvious `group = pos / 5`
y, x = pos.divmod(side_len)
on_vert_edge = y == 0 || y == side_len - 1
on_horiz_edge = x == 0 || x == side_len - 1
group = if on_vert_edge && on_horiz_edge
:corner
elsif on_vert_edge
:vert_edge
elsif on_horiz_edge
:horiz_edge
elsif y == mid_coord || x == mid_coord
:mid
else
:other
end
groups[group] << pos
}
groups.values.map { |group|
neigh = (1 << group.size).times.to_h { |n|
n_bits = n.digits(2)
# neigh_count[dlevel][npos] = 0..3
# Could use one integer (all counts concatenated),
# but this function is such a small portion of the runtime that it's not worth it.
neigh_count = Hash.new { |h, k| h[k] = Hash.new(0) }
shifted = group.zip(n_bits).sum { |pos, bit| (bit || 0) << pos }
n_bits.zip(group) { |bit, pos|
next if bit == 0
neigh_map[pos].each { |dlevel, npos|
neigh_count[dlevel][npos] += 1
}
}
[shifted, neigh_count.transform_values { |count_for_level|
count_for_level.sum { |npos, count_for_pos|
[count_for_pos, NEIGHBOUR_COUNT_MASK].min << (npos * BITS_PER_NEIGHBOUR_COUNT)
}
}.freeze]
}
raise "Should be #{1 << group.size} in neighbours map, only have #{neigh.size}" if neigh.size != 1 << group.size
{
neigh: neigh.freeze,
mask: group.sum { |b| 1 << b },
}.freeze
}.freeze
end
def first_repeat(x)
seen = {}
until seen[x]
seen[x] = true
x = yield x
end
[x, seen.size]
end
def show_grids(grids)
size = SIDE_LEN * SIDE_LEN
grids.each_with_index { |g, i|
puts i if grids.size > 1
bits = g.digits(2)
bits << 0 until bits.size == size
bits.each_slice(SIDE_LEN) { |row| puts row.join.tr('01', '.#') }
puts
}
end
verbose = ARGV.delete('-v')
input = ARGV[0]&.match?(/^[0-9]$/) ? Integer(ARGV) : ARGF.each_line.map { |l|
l.chomp.tap { |lc| raise "wrong size #{l}" if lc.size != SIDE_LEN }
}.join.each_char.with_index.sum { |c, i| (c == ?# ? 1 : 0) << i }
raise "too big #{input}" if input >= 1 << (SIDE_LEN * SIDE_LEN)
neigh = grouped_neigh_map(SIDE_LEN)
if verbose && NUM_ITERS[input] <= 10
grids = [input]
puts "----- 0 minutes -----"
show_grids(grids)
NUM_ITERS[input].times { |i|
grids = grow_bugs(grids, neigh)
puts "----- #{i + 1} minutes -----"
show_grids(grids)
}
end
repeat, time = first_repeat(input) { |x|
xs = grow_bugs([x], neigh)
raise "expanded to another level in part 1??? #{xs}" if xs.size > 1
xs[0] || 0
}
if verbose
puts "----- repeat after #{time} minutes -----"
show_grids([repeat])
end
p repeat
neigh = grouped_neigh_map(SIDE_LEN, recursive: true)
grids = [input]
NUM_ITERS[input].times {
grids = grow_bugs(grids, neigh)
}
if verbose
puts "----- #{NUM_ITERS[input]} minutes, recursive -----"
show_grids(grids)
end
p grids.sum { |g| g.digits(2).count(1) }
|
petertseng/adventofcode-rb-2019
|
mk17.rb
|
<reponame>petertseng/adventofcode-rb-2019
def scaffold(path)
robot_loc = [0, 0].freeze
move = ->dir { robot_loc.zip(dir).map(&:sum).freeze }
dir = [-1, 0]
prev_inter = false
scaffold = {robot_loc => true}
path.split(?,) { |x|
if x == ?R
dir = right(dir)
next
elsif x == ?L
dir = left(dir)
next
else
Integer(x).times {
robot_loc = move[dir]
if scaffold[robot_loc]
puts "WARNING: path doubles back, most solvers can't handle" if prev_inter
prev_inter = true
else
prev_inter = false
end
scaffold[robot_loc] = true
}
end
}
scaffold.keys.sort
end
def left((dy, dx))
# (-1, 0) -> (0, -1) -> (1, 0) -> (0, 1) -> (-1, 0)
[-dx, dy]
end
def right((dy, dx))
# (-1, 0) -> (0, 1) -> (1, 0) -> (0, -1) -> (-1, 0)
[dx, -dy]
end
f = {}
main, f[?A], f[?B], f[?C] = ARGV
expand_main = main.gsub(/[A-C]/, f)
puts expand_main
no_rl_main = expand_main.split(?,).map { |x| Integer(x) rescue x }
while (_, i = no_rl_main.each_cons(2).each_with_index.find { |x, i| i if [[?R, ?L], [?L, ?R]].include?(x) })
before = no_rl_main[i - 1] || 0
after = no_rl_main[i + 2] || 0
raise 'Too many turns in a row' unless before.is_a?(Integer)
raise 'Too many turns in a row' unless after.is_a?(Integer)
if i == 0
no_rl_main[0, 3] = after
else
no_rl_main[i - 1, 4] = before + after
end
end
while (_, i = no_rl_main.each_cons(2).each_with_index.find { |x, i| i if x.all? { |y| y.is_a?(Integer) } })
no_rl_main[i, 2] = no_rl_main[i] + no_rl_main[i + 1]
end
scaffold1 = scaffold(expand_main)
ys, xs = scaffold1.transpose
no_rl_main = no_rl_main.join(?,)
scaffold2 = scaffold(no_rl_main)
raise "WRONG #{scaffold1 - scaffold2} vs #{scaffold2 - scaffold1}" if scaffold1 != scaffold2
puts no_rl_main
Range.new(*ys.minmax).each { |y|
puts Range.new(*xs.minmax).map { |x|
[y, x] == [0, 0] ? ?^ : scaffold1.include?([y, x]) ? ?# : ?.
}.join
}
puts "main: #{main.size}"
f.each { |k, v|
puts "#{k}: #{v.size}"
}
f.keys.permutation(2) { |x|
puts "#{x}: #{main.include?(x.join(?,))}"
}
|
petertseng/adventofcode-rb-2019
|
weightgame.rb
|
def half_bits(n)
raise "sorry must be even" if n % 2 != 0
# Information not known to the player:
# The eight items each weigh a power of two,
# and the answer is four items.
(0...(1 << n)).select { |x| x.to_s(2).count(?1) == n / 2 }
end
def powerset(n)
(0...(1 << n)).to_a
end
def play_game(limit, answer, guesser)
guesses = []
limit.times { |x|
guess = guesser.guess
if guess == answer
guesses << [guess, :ok].freeze
return {
guesses: guesses,
num_guesses: x + 1,
}
end
if guess < answer
guesser[guess] = :too_light
guesses << [guess, :too_light].freeze
else
guesser[guess] = :too_heavy
guesses << [guess, :too_heavy].freeze
end
}
{
guesses: guesses,
num_guesses: nil,
}
end
def rate_guesser(n, answers, new_guesser, verbose: false)
t = Time.now
results = {
failed: 0,
guessed: 0,
total: 0,
minimum: Float::INFINITY,
maximum: 0,
}
answers[n].each { |ans|
result = play_game(1 << n, ans, new_guesser[n, verbose: verbose])
p result[:guesses].map { |guess, result|
[guess.to_s(2).rjust(n, ?0), result]
} if false #|| true
if (guesses = result[:num_guesses])
results[:guessed] += 1
results[:total] += guesses
results[:minimum] = [results[:minimum], guesses].min
results[:maximum] = [results[:maximum], guesses].max
else
results[:failed] += 1
end
}
results.merge(average: results[:total].fdiv(results[:guessed]), time: Time.now - t)
end
class Seq
attr_reader :guess
def initialize(*_)
@guess = 0
end
def []=(_, _)
@guess += 1
end
end
class IncludeExclude
def initialize(n, initial_state, verbose: false)
raise "bad state #{initial_state}" unless %i(include exclude).include?(initial_state)
@n = n
@state = initial_state
@must_include = 0
@must_exclude = 0
@unknown = n.times.map { |x| 1 << x }
@guesses = @unknown.dup
@verbose = verbose
end
def guess
return @must_include if @guesses.empty?
if @state == :include
# See if we must include an item - guess all items except this one and any must-not-haves,
# and see if it's too light.
((1 << @n) - 1) ^ @must_exclude ^ @guesses[0]
elsif @state == :exclude
# See if we must exclude an item - guess must-have items plus this one,
# and see if it's too heavy.
@must_include | @guesses[0]
else
raise "Unknown state #{@state}"
end
end
def []=(guess, feedback)
b = ->x { x.to_s(2).rjust(@n, ?0) }
puts "got #{feedback} for #{b[guess]} which was derived from #{b[@guesses[0]]}" if @verbose
if @state == :include && feedback == :too_light
@must_include |= @guesses[0]
@unknown.delete(@guesses[0])
elsif @state == :exclude && feedback == :too_heavy
@must_exclude |= @guesses[0]
@unknown.delete(@guesses[0])
end
@guesses.shift
return unless @guesses.empty?
@state = @state == :include ? :exclude : :include
@guesses = @unknown.dup
puts "include #{b[@must_include]}, exclude #{b[@must_exclude]}, unknown #{@unknown.map(&b).join(', ')}" if @verbose
end
end
class IncludeThenExclude < IncludeExclude
def initialize(n, **kwargs)
super(n, :include, **kwargs)
end
end
class ExcludeThenInclude < IncludeExclude
def initialize(n, **kwargs)
super(n, :exclude, **kwargs)
end
end
class Possibilities
def initialize(n, verbose: false)
@n = n
@possible = (0...(1 << n)).to_a
@guesses = (0...(1 << n)).to_h { |x| [x, nil] }
@verbose = verbose
end
def []=(guess, feedback)
if feedback == :too_light
@possible.reject! { |candidate| candidate & guess == candidate }
elsif feedback == :too_heavy
@possible.reject! { |candidate| candidate & guess == guess }
else
raise "unknown feedback #{feedback}"
end
puts "got #{feedback} for #{guess}, now #{@possible.size} left" if @verbose
end
end
class MinEntropy < Possibilities
def guess
return @possible[0] if @possible.size == 1
@guesses.keys.min_by { |guess|
eq = 0
subset_of_guess = 0
superset_of_guess = 0
neither = 0
@possible.each { |candidate|
if candidate == guess
eq += 1
elsif candidate & guess == candidate
subset_of_guess += 1
elsif candidate & guess == guess
superset_of_guess += 1
else
neither += 1
end
}
# Day 25 guesser doesn't know object weights, so using comparisons isn't fair.
#n_too_light = @possible.count { |candidate| candidate > guess }
#n_too_heavy = @possible.count { |candidate| candidate < guess }
# careful here,
# *result* will be "too heavy" if guess is superset of real answer (real answer is subset of guess),
# "too light" if guess is subset of real answer (real answer is superset of guess).
#
# No real way to tell for the others so just call them equally likely.
# I'd add Rational(neither, 2) to each, but I'll just multiply through by 2 to avoid Rationals (faster).
n_too_light = 2 * superset_of_guess + neither
n_too_heavy = 2 * subset_of_guess + neither
# If this is too light, any subset is also too light.
# If this is too heavy, any superset is also too heavy.
# eq contributes 1 if guess is possible, else 0, but remember we're multiplying by 2 to avoid halves.
2 * eq + n_too_light * (@possible.size - subset_of_guess) + n_too_heavy * (@possible.size - superset_of_guess)
}.tap { |g| @guesses.delete(g) }
end
end
class MinEntropyNoTrivial < MinEntropy
def initialize(n, **kwargs)
super
@possible.delete(0)
@possible.delete((1 << n) - 1)
end
end
class MinEntropyHalf < MinEntropy
def initialize(n, **kwargs)
super
if n.even?
@possible.select! { |x| x.digits(2).count(1) == n / 2 }
else
want = [n / 2, (n + 1) / 2]
@possible.select! { |x| want.include?(x.digits(2).count(1)) }
end
end
end
# Restricting to only guess possible ones makes things worse;
# sometimes you have to guess one that you know is not possible.
class MinEntropyHalfGuessPossible < MinEntropyHalf
def initialize(n, **kwargs)
super
@guesses = @possible.to_h { |x| [x, nil] }
end
end
class MinEntropyCheater < Possibilities
def guess
return @possible[0] if @possible.size == 1
(0...(1 << @n)).min_by { |guess|
eq = 0
subset_of_guess = 0
superset_of_guess = 0
greater_than_guess = 0
less_than_guess = 0
neither = 0
@possible.each { |candidate|
if candidate == guess
eq += 1
else
if candidate & guess == candidate
subset_of_guess += 1
elsif candidate & guess == guess
superset_of_guess += 1
else
neither += 1
end
if candidate > guess
greater_than_guess += 1
else
less_than_guess += 1
end
end
}
eq + greater_than_guess * (@possible.size - subset_of_guess) + less_than_guess * (@possible.size - superset_of_guess)
}
end
end
# Goes to show that if you try to use a comparison but you don't know the real weights,
# you will assign incorrect probabilities and cost a lot of guesses.
class MinEntropyRevCheater < Possibilities
def guess
return @possible[0] if @possible.size == 1
@guesses.keys.min_by { |guess|
eq = 0
subset_of_guess = 0
superset_of_guess = 0
greater_than_guess = 0
less_than_guess = 0
neither = 0
@possible.each { |candidate|
if candidate == guess
eq += 1
else
if candidate & guess == candidate
subset_of_guess += 1
elsif candidate & guess == guess
superset_of_guess += 1
else
neither += 1
end
if candidate > guess
greater_than_guess += 1
else
less_than_guess += 1
end
end
}
eq + less_than_guess * (@possible.size - subset_of_guess) + greater_than_guess * (@possible.size - superset_of_guess)
}.tap { |g| @guesses.delete(g) }
end
end
class MinMaxSize < Possibilities
def guess
return @possible[0] if @possible.size == 1
(0...(1 << @n)).min_by { |guess|
remain_if_too_light = remain_if_too_heavy = @possible.size
@possible.each { |candidate|
# If this is oo light, any subset is also too light.
remain_if_too_light -= 1 if candidate & guess == candidate
# If this is too heavy, any superset is also too heavy.
remain_if_too_heavy -= 1 if candidate & guess == guess
}
[remain_if_too_light, remain_if_too_heavy].max
}
end
end
#play_game(256, 0b01010101, MinEntropyHalf.new(8, verbose: true))
#exit 0
[
Seq,
IncludeThenExclude,
ExcludeThenInclude,
MinEntropy,
MinEntropyNoTrivial,
MinEntropyHalf,
MinEntropyHalfGuessPossible,
MinEntropyCheater,
MinMaxSize,
MinEntropyRevCheater,
].each { |c|
#puts "#{c}: #{rate_guesser(8, method(:powerset), c.method(:new))}"
puts "#{c}: #{rate_guesser(8, method(:half_bits), c.method(:new))}"
#puts "#{c}: #{rate_guesser(3, method(:powerset), c.method(:new))}"
#puts "#{c}: #{rate_guesser(4, method(:powerset), c.method(:new))}"
}
|
petertseng/adventofcode-rb-2019
|
05_intcode_ii.rb
|
require_relative 'lib/intcode'
DISAS = ARGV.delete('-d')
def ic(mem, input)
ops = (1..8).to_a + [99]
Intcode.new(mem, valid_ops: ops).then { |ic| ic.continue(disas: DISAS, input: input) }.output
end
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
output = ic(input, 1)
all_but_last = output[0..-2]
raise "nonzero outputs #{all_but_last}" unless all_but_last.all?(&:zero?)
puts output[-1]
puts ic(input, 5)
|
petertseng/adventofcode-rb-2019
|
01_rocket_equation.rb
|
def fuel(mass)
mass / 3 - 2
end
def fuel_of_fuel(mass)
Enumerator.produce(fuel(mass), &method(:fuel)).take_while(&:positive?).sum
# Another idea: https://blog.vero.site/post/advent-rocket
# d3 = (mass + 3).digits(3)
# (mass - d3.sum + 15) / 2 - d3[-1] - 3 * d3.size
end
input = ARGF.each_line.map(&method(:Integer))
puts input.sum(&method(:fuel))
puts input.sum(&method(:fuel_of_fuel))
|
petertseng/adventofcode-rb-2019
|
03_crossed_wires.rb
|
<filename>03_crossed_wires.rb
# Two possible approaches:
# 1. Store a set of all points touched by each wire,
# do a set intersection.
#
# 2. Store all segment endpoints and intersect them.
#
# Turns out, the second one is faster.
# 0 = unchanging coordinate
# 1 = changing coordinate min
# 2 = changing coordinate max
# 3 = length at min
# 4 = delta length while moving toward max
wires = ARGF.each_line.map { |l|
horiz = []
vert = []
y = 0
x = 0
total_length = 0
l.split(?,).map { |seg|
dir = seg[0]
length = Integer(seg[1..-1])
old_y = y
old_x = x
old_length = total_length
total_length += length
case dir
when ?U
y -= length
vert << [x, y, old_y, total_length, -1]
when ?D
y += length
vert << [x, old_y, y, old_length, 1]
when ?L
x -= length
horiz << [y, x, old_x, total_length, -1]
when ?R
x += length
horiz << [y, old_x, x, old_length, 1]
end
}
{horiz: horiz, vert: vert}
}
raise "expected two wires not #{wires.size}" if wires.size != 2
from_origin = []
on_wire = []
[
[wires[0][:horiz], wires[1][:vert]],
[wires[1][:horiz], wires[0][:vert]],
].each { |horizs, verts|
horizs.each { |y1, x1min, x1max, l1min, l1d|
verts.each { |x2, y2min, y2max, l2min, l2d|
next unless (y2min..y2max).cover?(y1)
next unless (x1min..x1max).cover?(x2)
next if y1 == 0 && x2 == 0
from_origin << y1.abs + x2.abs
l1 = l1min + l1d * (x2 - x1min)
l2 = l2min + l2d * (y1 - y2min)
on_wire << l1 + l2
}
}
}
puts from_origin.min
puts on_wire.min
|
petertseng/adventofcode-rb-2019
|
19_tractor_beam.rb
|
<filename>19_tractor_beam.rb
require_relative 'lib/intcode'
count_drones = ARGV.delete('-c')
slowscan = ARGV.delete('--slowscan')
slowpull = ARGV.delete('-s') || ARGV.delete('--slowpull')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
@drones_sent = 0
if slowpull
IC = Intcode.new(input)
def pull?(y, x)
@drones_sent += 1
IC.dup.continue(input: [x, y]).output[0] == 1
end
else
before_halt = input[0...input.index(99)]
immed_to_stack = before_halt.each_cons(4).with_index.filter_map { |(op, arg1, arg2, dest), i|
next if dest == 0
if op == 21101
[i, arg1 + arg2]
elsif op == 21102
[i, arg1 * arg2]
end
}.select { |_, arg| arg > 1 }
# The largest function is mult3 with some useless sorting beforehand.
mult3 = Intcode.functions(input).max_by(&:size).begin
calls_to_mult3 = before_halt.each_cons(3).with_index.filter_map { |(*jmparg, dest), i|
i if dest == mult3 && (jmparg == [1106, 0] || jmparg[0] == 1105 && jmparg[1] != 0)
}
# For each call to mult3, the immediate -> stack write most closely preceding it in program text.
x2coeff, y2coeff, xycoeff = calls_to_mult3.filter_map { |call|
immed_to_stack.select { |addr, _| addr < call }.map(&:last).last
}.freeze
define_method(:pull?) { |y, x|
@drones_sent += 1
xycoeff * x * y >= (x2coeff * x * x - y2coeff * y * y).abs
}
end
if slowscan
# This would work but queries all 2500 points. We can do better.
puts (0..49).sum { |y| (0..49).count { |x| pull?(y, x) } }
else
# Save this info for part 2.
current_scan = nil
# Follow the edges. For typical inputs, queries around 300 points or fewer.
# We could go from narrower to wider, but the beam disappears on a few rows,
# and scanning left-to-right means we scan almost an entire row.
# So we go from wider to narrower + right-to-left instead.
# If the beam disappears, we only need to scan the area to the left of the previous edge.
right_x = 49
left_x = nil
puts 49.downto(0).sum { |y|
if (new_right_x = right_x.downto(0).find { |x| pull?(y, x) })
# Found the right edge of this row.
# Next scan will start from this new right edge.
right_x = new_right_x
# Where is left edge? Start from previous-known left edge, or if we don't know, right edge.
left_x = (left_x || (new_right_x - 1)).downto(0).find { |x| !pull?(y, x) } || -1
# +1 here; this left_x is exclusive but current_scan's left_x is inclusive.
current_scan = {width: right_x - left_x, left_x: left_x + 1}.freeze if y == 49
right_x - left_x
else
current_scan = {width: 0, left_x: 0} if y == 49
# Nothing in this row!
# right_x stays the same, so next scan starts from last known right edge.
# left_x becomes nil because we need to find a new left once we have a beam again.
left_x = nil
0
end
}
end
STDERR.puts "#@drones_sent drones" if count_drones
@drones_sent = 0
if slowscan
# This works but queries too many points. Again, we can do better.
min_x = 0
puts 99.step { |y|
# Find min x that's on for this row.
min_x = min_x.step.find { |x| pull?(y, x) }
# Treat that as bottom-left. Is top-right on?
break min_x * 10000 + y - 99 if pull?(y - 99, min_x + 99)
}
STDERR.puts "#@drones_sent drones" if count_drones
exit 0
end
# Scan a row using this principle:
#
# If a previous row was known to be WIDTH wide starting from PREV_LEFT,
# then assume this row is also at least WIDTH wide starting from l > PREV_LEFT.
# That means if we scan every WIDTH squares starting from PREV_LEFT, we'll find it.
#
# Detetermines the left and width of this row.
def scan_row(y, min_x, min_width)
near_left_x = min_x.step(by: min_width).find { |x| pull?(y, x) }
left_x = ((near_left_x - min_width + 1)...near_left_x).bsearch { |x| pull?(y, x) } || near_left_x
max_width = 1.step { |power|
width = min_width << power
break width if !pull?(y, left_x + width)
}
{
left_x: left_x,
width: ((max_width / 2)..max_width).bsearch { |width| !pull?(y, left_x + width) },
}.freeze
end
# For a 100x100 square, bottom row must be y >= 99, so we start there.
# Actually, it may be possible to start at y=149 if we know that y=49 is not a top row;
# for some inputs this is an improvement, for some others it's the opposite.
# So I'll save myself the bookkeeping required and just start at 99.
y = 99
prev_left = nil
current_scan = scan_row(y, current_scan[:left_x], current_scan[:width])
# Previous scanned row is NOT a candidate top if its width < 100,
# which means candidate bottom must be at least 100 more than that.
# If it is a candidate top, at this point we can actually stride Y by whatever we want.
# But 100 seems to work well anyway.
# Other values work better/worse for various inputs,
# so I'll pick something middle-of-the-road.
# I wonder if I can prove that some value or other is the best,
# based on expected number of scans...
Y_STRIDE = 100
# Find a valid bottom row, a row with both of the following characteristics:
# Has width >= 100
# Has a matching top row.
# If I always increase y by 99, I could reuse information and not call `pull?` to check matching top.
# But I don't want to risk any off-by-one errors, so I'll just call it.
until current_scan[:width] >= 100 && pull?(y - 99, current_scan[:left_x] + 99)
prev_left = current_scan[:left_x]
y += Y_STRIDE
current_scan = scan_row(y, current_scan[:left_x], current_scan[:width])
end
# At this point, y is a valid bottom, and y - Y_STRIDE is not.
# We'll just scan everything between them linearly,
# following the bottom edge.
# I don't feel like managing a binary search since if y varies,
# I'm not prepared to track the bounds for the x for a given y.
min_x = prev_left
puts (y - Y_STRIDE + 1).step { |y|
# Find min x that's on for this row.
min_x = min_x.step.find { |x| pull?(y, x) }
# Treat that as bottom-left. Is top-right on?
break min_x * 10000 + y - 99 if pull?(y - 99, min_x + 99)
}
STDERR.puts "#@drones_sent drones" if count_drones
|
petertseng/adventofcode-rb-2019
|
12_n_body_problem.rb
|
def step(poses, vels)
poses.each_with_index { |pi, i|
# pi = 2, p = 5, we want it to increase.
# so we do 5 <=> 2 which is 1.
vels[i] += poses.sum { |p| p <=> pi }
}
vels.each_with_index { |vel, i| poses[i] += vel }
end
def run1k(moons)
pos = moons.dup
vel = moons.map { 0 }
1000.times { step(pos, vel) }
pos.zip(vel)
end
def codegen_vel_update
i = (0...4).to_a
i.combination(2) { |a, b|
# among these options, this one seems to be fastest.
puts "if p#{a} > p#{b}; v#{a} -= 1; v#{b} += 1; elsif p#{b} > p#{a}; v#{a} += 1; v#{b} -= 1; end"
#puts "if p#{a} > p#{b}; v#{a}] -= 1; v#{b} += 1; end"
#puts "if p#{b} > p#{a}; v#{a}] += 1; v#{b} -= 1; end"
#puts "cmp#{a}#{b} = p#{a} <=> p#{b}"
#puts "v#{a} -= cmp#{a}#{b}"
#puts "v#{b} += cmp#{a}#{b}"
}
end
def period(moons)
raise "Can't handle anything other than four moons" if moons.size != 4
p0, p1, p2, p3 = moons
v0 = v1 = v2 = v3 = 0
t = 0
# A lot of code duplication, but this otherwise runs slow (> 1 second).
# I probably just should use a compiled language.
# It's not like I wrote this by hand though, codegen saves the day.
while true
if p0 > p1; v0 -= 1; v1 += 1; elsif p1 > p0; v0 += 1; v1 -= 1; end
if p0 > p2; v0 -= 1; v2 += 1; elsif p2 > p0; v0 += 1; v2 -= 1; end
if p0 > p3; v0 -= 1; v3 += 1; elsif p3 > p0; v0 += 1; v3 -= 1; end
if p1 > p2; v1 -= 1; v2 += 1; elsif p2 > p1; v1 += 1; v2 -= 1; end
if p1 > p3; v1 -= 1; v3 += 1; elsif p3 > p1; v1 += 1; v3 -= 1; end
if p2 > p3; v2 -= 1; v3 += 1; elsif p3 > p2; v2 += 1; v3 -= 1; end
p0 += v0
p1 += v1
p2 += v2
p3 += v3
t += 1
# Given each state, there is only one previous state that could have led to it.
# Because of this, the initial state is guaranteed to be the first repeat state.
# Further, consider any state with velocities all 0:
# t-1: [(p0 , ?), (p1 , ?), (p2, 0), (p3 , ?)]
# t : [(p0 , 0), (p1 , 0), (p2, 0), (p3 , 0)]
# t+1: [(p0+v0, v0), (p1+v1, v1), (p2+v2, v2), (p3+v3, v3)]
#
# We see that positions at t-1 must be equal to positions at t, because velocities ended at 0.
# Since positions are the same, velocity deltas are the same, which means we know more:
#
# t-2: [(p0+v0, ?), (p1+v1, ?), (p2+v2, ?), (p3+v3, ?)]
# t-1: [(p0 , -v0), (p1 , -v1), (p2, -v2), (p3 , -v3)]
# t : [(p0 , 0), (p1 , 0), (p2, 0), (p3 , 0)]
# t+1: [(p0+v0, v0), (p1+v1, v1), (p2+v2, v2), (p3+v3, v3)]
#
# Denoting the delta in velocity at times t+1 and t-2 (which are the same) as a0, a1, a2, a3, then we have:
#
# t-3: [(p0+2*v0+a0, ?), (p1+2*v1+a1, ?), (p2+2*v2+a2, ?), (p3+2*v3+a3, ?)]
# t-2: [(p0+v0, -v0-a0), (p1+v1, -v1-a1), (p2+v2, -v2-a2), (p3+v3, -v3-a3)]
# ...
# t+2: [(p0+2*v0+a0, v0+a0), (p1+2*v1+a1, v1+a1), (p2+2*v2+a2, v2+a2), (p3+2*v3+a3, v3+a3)]
#
# This process continues to repeat.
# So we have this symmetry in velocities on either side of v=0.
# So, if we ever reach a position with velocities 0, we certainly return to the initial state in t*2.
# We could just continue to run the simulation to be sure, but might as well cut runtime in half, right?
return t * 2 if v0 == 0 && v1 == 0 && v2 == 0 && v3 == 0
end
end
verbose = ARGV.delete('-v')
input = ARGF.each_line.map { |l| l.scan(/-?\d+/).map(&method(:Integer)) }
coordinates = input.transpose.map(&:freeze).freeze
moons1k = coordinates.map(&method(:run1k)).transpose
puts moons1k.sum { |moon| moon.transpose.map { |c| c.sum(&:abs) }.reduce(:*) }
periods = coordinates.map(&method(:period))
p periods if verbose
puts periods.reduce(1) { |a, b| a.lcm(b) }
|
petertseng/adventofcode-rb-2019
|
golf9_spaced.rb
|
m=$*.shift.split(?,).map &:to_i;
b=z=0;
while(c=m[z])!=99;
y=-2;
a,(r,q)=m[z+1,3].map{|x|
d=c.to_s[y-=1];
x||=0;
[
x+=d==?2?b:0,
d==?1?x:m[x]||0
]
}.transpose;
# Since 0 is not a valid opcode,
# index from the back instead of the front,
# saving one array entry (2 bytes) but costing one - (1 byte).
s=%i[== < == != x x * +][-c%=100];
n,j=
c==3?(m[a[0]]=gets.to_i;2):
c==4?(p r;2):
c==5||c==6?[3,r.send(s,0)&&q]:
c==9?(b+=r;2):
(x=r.send s,q;m[a[2]]=c<3?x:x ?1:0;4);
z=j||z+n;
end
|
petertseng/adventofcode-rb-2019
|
21_springdroid_adventure.rb
|
<reponame>petertseng/adventofcode-rb-2019<gh_stars>10-100
require_relative 'lib/intcode'
VERBOSE = ARGV.delete('-v')
def run(mem, script, **args)
Intcode.new(mem).continue(input: script, **args)
end
def show_damage(ic)
puts ic.output.select { |x| x <= 127 }.pack('c*') if VERBOSE
puts ic.output.select { |x| x > 127 }
end
def exactly_one_function(mem, name)
exactly_one(
name,
Intcode.functions(mem).select { |f|
mem[f].each_cons(4).any? { |x| yield x }
},
)
end
def exactly_one(name, things)
raise "need exactly one #{name}, not #{things}" if things.size != 1
things[0]
end
def modes(op)
[(op / 100) % 10, (op / 1000) % 10]
end
# Approach 1:
# The slowest way.
# Actually runs the Springscript interpreter with a real script.
def run_springscript_scripts(mem)
# !A+!CD
# Also viable:
# !(A+B+C)D and equivalent (!A+!B+!C)D
show_damage(run(mem, <<~CODE))
NOT C J
AND D J
NOT A T
OR T J
WALK
CODE
# D!(AB(C+!H))
# equivalent D(!A+!B+(!CH))
# Alternatives:
# !A+!(BC)DH and equivalent !A+(!B+!C)DH
# (!A+!B+!C)D(E+H) and equivalent !(ABC)D(E+H)
# !A+!B!E+!CD(E+H)
show_damage(run(mem, <<~CODE))
NOT H J
OR C J
AND A J
AND B J
NOT J J
AND D J
RUN
CODE
end
# Approach 2:
# An okay way, showcasing the use of the CUSTOM_OPCODE.
# Overwrite the Springscript interpreter and use Ruby to decide when to jump.
def overwrite_springscript_interpreter(input)
# What address stores the number of instructions?
viable_addresses = nil
# 1 is used often as the identity for multiplication, so start at 2.
(2..15).each { |num_insts|
ic = run(input, ['NOT T T'] * num_insts << 'WALK')
# Assume it's not on the stack.
viable_for_num = ic.mem[0, ic.relative_base].each_with_index.filter_map { |val, addr|
addr if val == num_insts
}
viable_addresses ||= viable_for_num
viable_addresses &= viable_for_num
break if viable_addresses.size <= 1
}
num_insts_addr = exactly_one('number of instructions', viable_addresses)
mem = input.dup
# The Springscript runner needs to compare against the number of instructions,
# so that it knows whether it finished running the script.
# (Okay, it could do other things like just look for a sentinel value,
# but it so happens that it does do a comparison against this value)
springscript_runner = exactly_one_function(mem, 'springscript runner') { |op, *operands, _|
[7, 8].include?(op % 100) && operands.zip(modes(op)).include?([num_insts_addr, 0])
}
stack_frame_size = mem[springscript_runner.begin + 1]
hull_addr = exactly_one(
'hull array base address',
mem[springscript_runner].each_cons(12).with_index(springscript_runner.begin).flat_map { |insts, i|
# We're looking for three instructions of this pattern:
# write S11 S12 D1
# write S21 S22 D2
# write S31 S32 D3
op1, _, _, dst1, op2, _, _, dst2, op3 = insts
next [] unless [op1, op2, op3].all? { |op| [1, 2].include?(op % 100) }
# third instruction must be an an array read (D2 must point to S31 or S32)
next [] if op2 >= 20000
next [] unless [i + 9, i + 10].include?(dst2)
# second instruction must use result of the first (D1 must equal one of S21 or S22)
src2 = insts[5, 2].zip(modes(op2))
next [] unless src2.include?([dst1, 0])
# the argument to the runner must be an input (one of S11, S12, S21, S22 is $rb[-...])
srcs = src2 + insts[1, 2].zip(modes(op1))
next [] unless srcs.include?([-(stack_frame_size - 1), 2])
# Anything that looks like a base address offset
srcs.filter_map { |v, mode| v if v > 0 && mode == 1 }
},
)
mem[springscript_runner.begin] = Intcode::CUSTOM_OPCODE
run_ruby_jumper = ->(command, &should_jump) {
read_len = {WALK: 4, RUN: 9}.fetch(command)
show_damage(run(mem, command.to_s, custom: ->ic {
# Springdroid pos was passed as an argument to Springscript function:
springdroid_pos = ic.mem[ic.relative_base + 1]
# Read next values from hull (values of A, B, C... etc).
# Note that we do not read our current position, so add 1.
regs = ic.mem[hull_addr + springdroid_pos + 1, read_len].map { |x| x != 0 }
# return value of J register
ic.mem[ic.relative_base + 1] = should_jump[regs] ? 1 : 0
ic.mem[ic.relative_base]
}))
}
run_ruby_jumper[:WALK] { |a, _, c, d|
!a || !c && d
}
run_ruby_jumper[:RUN] { |a, b, c, d, _, _, _, h|
d && (!a || !b || !c && h)
}
end
# Approach 3:
# Just calculate the score without running the full Intcode or Springscript.
def auto_score(input)
mem = input.dup
# The damage is printed out right before a halt.
# Determine its location.
damage = exactly_one('damage location', mem.each_cons(3).filter_map { |(a, b, c)|
b if a == 4 && c == 99
})
# Bit patterns tested come soon after the damage.
base = damage.step.find { |x| mem[x] > 0 }
damage = 0
[7, 153].each { |len|
raise "bad #{mem[base, len + 1]}" if mem[base + len] != 0
len.times { |i|
addr = base + i
bits_i = mem[addr]
raise "bad #{bits_i} at #{i} of #{len} (#{mem[base, len + 1]})" unless (1..255).cover?(bits_i)
bits_s = bits_i.to_s(2).rjust(9, ?0)
damage += bits_s.each_char.with_index(10).sum { |c, i|
c == ?0 ? addr * bits_i * i : 0
}
}
base += len + 1
puts damage
}
end
slow = ARGV.delete('-s')
slower = ARGV.delete('-ss')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
if slower
run_springscript_scripts(input)
elsif slow
overwrite_springscript_interpreter(input)
else
auto_score(input)
end
|
petertseng/adventofcode-rb-2019
|
02_intcode.rb
|
require_relative 'lib/intcode'
def run(mem, noun, verb)
mem = mem.dup
mem[1] = noun
mem[2] = verb
Intcode.new(mem, valid_ops: [1, 2, 99]).then(&:continue).memory[0]
end
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
puts run(input, 12, 2)
# Note that for known Advent of Code inputs,
# mem[0] = noun * N + verb * V + base
# And V = 1, but I'll allow for others.
base = run(input, 0, 0)
delta_noun = run(input, 1, 0) - base
delta_verb = run(input, 0, 1) - base
target = 19690720
if delta_noun > delta_verb
noun = (target - base) / delta_noun
verb = (target - base - delta_noun * noun) / delta_verb
else
verb = (target - base) / delta_verb
noun = (target - base - delta_verb * verb) / delta_noun
end
puts noun * 100 + verb
|
petertseng/adventofcode-rb-2019
|
13_breakout.rb
|
<gh_stars>10-100
require_relative 'lib/intcode'
def affine(mem)
score_func = Intcode.functions(mem)[-1]
nums = mem[score_func].each_cons(4).filter_map { |op, a1, a2, d|
next if op != 21101 && op != 21102
[d, op == 21101 ? a1 + a2 : a1 * a2]
}.to_h
a = nums[2]
b = nums[3]
m = nums[4]
game_grid = mem[score_func.end + 3, m]
non_one = game_grid.index { |x| x != 1 }
width = non_one - 1
height = m / width
blocks = game_grid.each_with_index.filter_map { |x, i|
next if x != 2
i.divmod(width)
}
scores = mem[score_func.end + 3 + m, m]
score = blocks.sum { |y, x|
scores[((x * height + y) * a + b) % m]
}
[blocks.size, score]
end
def hijack(ic, blocks)
mem = ic.memory
# where is the function that is called when a block is broken?
block_broken = Intcode.functions(mem).select { |f|
mem[f].each_cons(5).include?([104, -1, 104, 0, 4])
}
raise "need exactly one block_broken not #{block_broken}" if block_broken.size != 1
block_broken = block_broken[0].begin
# Set return addr to current pos (where we're pausing for input)
# so it pauses for input after having called the function
mem[ic.relative_base] = ic.pos
blocks.each { |y, x|
# call block_broken(x, y)
mem[ic.relative_base + 1] = x
mem[ic.relative_base + 2] = y
ic.continue(hijack: block_broken, input: [])
yield ic.output
}
end
slow = ARGV.delete('-s')
slower = ARGV.delete('-ss')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer))
input[0] = 2
input.freeze
ballx = nil
paddlex = nil
blocks = {}
score = 0
parse_output = ->output {
while output.size >= 3
x, y, tile = output.shift(3)
if x == -1 && y == 0
score = tile
next
end
# ordered by frequency, though I'm not sure this matters.
case tile
when 0; blocks.delete([y, x])
when 4; ballx = x
when 3; paddlex = x
when 2; blocks[[y, x].freeze] = true
when 1; # nothing
else raise "Unknown tile #{tile}"
end
end
}
init_game = -> {
Intcode.new(input).continue(input: []).tap { |ic|
parse_output[ic.output]
}
}
if slower
ic = init_game[]
puts blocks.size
until blocks.empty?
# ballx > paddlex: 1; ballx < paddlex: -1
ic.continue(input: ballx <=> paddlex)
parse_output[ic.output]
end
puts score
elsif slow
ic = init_game[]
puts blocks.size
hijack(ic, blocks.keys, &parse_output)
puts score
else
puts affine(input)
end
|
petertseng/adventofcode-rb-2019
|
22_slam_shuffle.rb
|
def simplify_at(steps, i, deck_size)
return unless (op2, arg2 = steps[i + 1])
op1, arg1 = steps[i]
# Adjacent pairs of the same operation can be combined.
# (dealwith by multiplication, cut by addition, reverse by elimination)
#
# Adjacent pairs of different operation can be transposed,
# if applying an appropriate transformation to the pair
# (see each pair for details on its appropriate transformation).
#
# Sift all dealwith to the front of the list,
# and all reverse to the back of the list.
#
# Applied enough times, the list should end with just one of each operation.
case [op1, op2]
when [:dealwith, :dealwith]
steps[i, 2] = [
[:dealwith, (arg1 * arg2) % deck_size].freeze,
]
when [:cut, :dealwith]
# consider a 10-card deck:
# cut 2:
# 2 3 4 5 6 7 8 9 0 1
# cut 2, deal incr 3
# 2 9 6 3 0 7 4 1 8 5
# deal incr 3:
# 0 7 4 1 8 5 2 9 6 3
# deal incr 3, cut 6:
# 2 9 6 3 0 7 4 1 8 5
# so cut N, deal incr M = deal incr M, cut (N * M)
steps[i, 2] = [
[:dealwith, arg2].freeze,
[:cut, (arg1 * arg2) % deck_size].freeze,
]
when [:cut, :cut]
steps[i, 2] = [
[:cut, (arg1 + arg2) % deck_size].freeze,
]
when [:reverse, :dealwith]
# consider a 10-card deck:
# reverse, deal incr 3:
# 9 2 5 8 1 4 7 0 3 6
# deal incr 7:
# 0 3 6 9 2 5 8 1 4 7
# deal incr 7, cut 3:
# 9 2 5 8 1 4 7 0 3 6
# so reverse, deal incr N = deal incr (size - N), cut N
steps[i, 2] = [
[:dealwith, deck_size - arg2].freeze,
[:cut, arg2].freeze,
]
when [:reverse, :cut]
steps[i, 2] = [
[:cut, -arg2].freeze,
[:reverse].freeze,
]
when [:reverse, :reverse]
steps[i, 2] = []
end
end
def simplify(steps, deck_size)
until steps.map(&:first).uniq.size == steps.size
steps.each_index { |i| simplify_at(steps, i, deck_size) }
end
end
def modular_inverse(a, n)
t, newt = [0, 1]
r, newr = [n, a]
until newr == 0
q = r / newr
t, newt = [newt, t - q * newt]
r, newr = [newr, r - q * newr]
end
r > 1 ? nil : t % n
end
def apply(steps, initial_pos, deck_size)
steps.reduce(initial_pos) { |pos, (op, arg)|
case op
when :dealwith
(pos * arg) % deck_size
when :reverse
deck_size - 1 - pos
when :cut
(pos - arg) % deck_size
else raise "unknown #{op} #{arg}"
end
}
end
verbose = ARGV.delete('-v')
input = ARGF.each_line.map(&:chomp)
test = input.size < 40
deck_size = test ? 10 : 10007
steps = input.map { |a|
words = a.split
if words[0, 2] == ['deal', 'with']
n = Integer(words[-1])
[:dealwith, n].freeze
elsif words[0, 2] == ['deal', 'into']
[:reverse].freeze
elsif words[0] == 'cut'
n = Integer(words[-1])
[:cut, n].freeze
else raise "unknown #{words}"
end
}
before_simp = steps.dup.freeze
simplify(steps, deck_size)
p steps if verbose
if test
decks = [before_simp, steps].map { |s|
deck = (0...deck_size).to_h { |pos| [apply(s, pos, deck_size), pos] }
((0...deck_size).map(&deck))
}
raise "different decks #{decks}" if decks.uniq.size != 1
puts decks[0].join(' ')
exit 0
end
puts apply(steps, 2019, deck_size)
bits = {}
deck_size = 119315717514047
steps = before_simp.dup
simplify(steps, deck_size)
# Apply the shuffle repeatedly via exponentiation.
power = 1
num_shuffles = 101741582076661
until power > num_shuffles
bits[power] = steps.dup.freeze
power <<= 1
steps.concat(steps)
simplify(steps, deck_size)
end
relevant_bits = bits.keys.select { |k| num_shuffles & k != 0 }
raise "WRONG BITS!!! #{relevant_bits.sum} vs #{num_shuffles}" if relevant_bits.sum != num_shuffles
final = relevant_bits.flat_map(&bits)
simplify(final, deck_size)
p final if verbose
pos = 2020
final.reverse_each { |op, arg|
case op
when :dealwith
pos = (pos * modular_inverse(arg, deck_size)) % deck_size
when :reverse
pos = deck_size - 1 - pos
when :cut
pos = (pos + arg) % deck_size
else raise "Unknown #{op} #{arg}"
end
}
p pos
|
petertseng/adventofcode-rb-2019
|
20_donut_maze.rb
|
require_relative 'lib/search'
def parse_maze(flat_input, height, width)
portal_pairs = Hash.new { |h, k| h[k] = {outer: nil, inner: nil} }
portal_entrances = {}
dirs = [-width, width, -1, 1].freeze
flat_input.each_char.with_index { |cell, pos|
next if pos < width
next unless flat_input[pos + width]
# Find letters with dots next to them.
next unless (?A..?Z).cover?(cell)
dirs_with_dot = dirs.select { |dpos| flat_input[pos + dpos] == ?. }
raise "more than one dot for #{pos} (#{pos.divmod(width)}): #{dirs_with_dot}" if dirs_with_dot.size > 1
next unless (dir_with_dot = dirs_with_dot[0])
other_letter = flat_input[pos - dir_with_dot]
# dot below (+width) or to right (+1) of letter means other letter comes first.
# dot above (-width) or to left (-1) of letter means other letter comes second.
id = (dir_with_dot > 0 ? other_letter + cell : cell + other_letter).freeze
y, x = (pos - dir_with_dot).divmod(width)
type = y == 0 || y == height - 1 || x == 0 || x == width - 1 ? :outer : :inner
raise "REPEAT #{id} #{type}" if portal_pairs[id][type]
dot_pos = pos + dir_with_dot
portal_entrances[dot_pos] = true
portal_pairs[id][type] = dot_pos
}
portal_entrances.freeze
outer = ->k {
raise "no #{k}" unless (pair = portal_pairs.delete(k))
raise "inner #{k} #{pair[:inner]}" if pair[:inner]
pair[:outer]
}
start = outer['AA']
goal = outer['ZZ']
portal_pairs.each { |k, v|
raise "MISSING INNER FOR #{k}" unless v[:inner]
raise "MISSING OUTER FOR #{k}" unless v[:outer]
}
portal_pairs.freeze
dists = portal_pairs.values.flat_map { |v|
[
[v[:outer], [v[:inner], 1, -1].freeze],
[v[:inner], [v[:outer], 1, 1].freeze],
]
}.to_h
portal_to_portal = portal_to_portal(flat_input, dirs, portal_entrances)
[
start, goal,
# not using this anymore (failed heuristic)
nil && min_in_out(portal_pairs, portal_to_portal),
dists.merge!(portal_to_portal) { |_, v1, v2| (v2 << v1).freeze }.freeze,
]
end
def min_in_out(portal_pairs, portal_to_portal)
outers = portal_pairs.values.to_h { |v| [v[:outer], true] }
portal_pairs.values.flat_map { |v|
next [] unless (inner = v[:inner])
portal_to_portal[inner].filter_map { |dest, dist, ddepth|
next if ddepth != 0
next unless outers[dest]
dist
}
}.min
end
def portal_to_portal(flat_input, dirs, portal_entrances)
portal_entrances.keys.to_h { |src|
other_portals = Search.bfs(
src, num_goals: Float::INFINITY,
neighbours: ->pos {
dirs.map { |dpos| pos + dpos }.select { |npos| flat_input[npos] == ?. }
},
goal: ->pos { pos != src && portal_entrances.has_key?(pos) },
)
[src, other_portals[:goals].map { |pos, dist| [pos, dist, 0].freeze }]
}.freeze
end
input = ARGF.each_line.map(&:chomp).map(&:freeze).freeze
width = input.map(&:size).max
flat_input = input.map { |l| l.ljust(width, ' ') }.join.freeze
height = input.size
start, goal, _min_in_out, dists = parse_maze(flat_input, height, width)
maze_size = flat_input.size
# Attempts to prove a bound on depth have failed:
# https://www.reddit.com/r/adventofcode/comments/ed5ei2/2019_day_20_solutions/fbg6p0s/
# Maze with:
# long corridor whose distance means an optimal solution crosses it once
# left side allowing depths 4, 9, 14... to connect to long corridor
# right side allowing depths 6, 12, 18... to connect to long corridor
# Only contains 11 portal pairs, but best path would go down to depth 24.
#
# Only remaining hope is if there is a property of the inputs to exploit.
#
# However, I need to set some limit here, to allow example 2 to pass on part 2 test.
# Don't want to doom myself to wander the halls of Pluto for all eternity.
# The input described above had its depth determined by multiplying (n - k) * k
# So we'll just limit at the maximum this value could be,
# which is splitting the number of pairs in half as equally as possible.
# 11 -> 6 * 5, 12 -> 6 * 6, etc.
portal_pairs = (dists.size - 2) / 2
half_up = (portal_pairs + 1) / 2
max_depth = portal_pairs / 2 * half_up
search = ->depth_mult {
cost, _junk = Search.astar(
start,
goal: {goal => true}.freeze,
neighbours: ->depth_and_pos {
depth, pos = depth_and_pos.divmod(maze_size)
dists[pos].filter_map { |dest, dist, depth_change|
new_depth = depth + depth_change * depth_mult
[new_depth * maze_size + dest, dist] if (0..max_depth).cover?(new_depth)
}
},
heuristic: ->depth_and_pos {
0
# I would like to use this, but it's non-monotonic (AKA inconsistent),
# 1. that will cause MonotonePriorityQueue to raise.
# 2. my A* implementation assumes I never need to revisit a node,
# but with an inconsistent heuristic, you might.
# I could use PriorityQueue along with this heuristic,
# but PriorityQueue slowdown is more than the heuristic's speedup.
#depth = depth_and_pos / maze_size
#depth * (min_in_out + 1)
# Why it's inconsistent:
# Moving from an outer portal at depth 1 to an inner portal at depth 0
# costs only 1, but decreases heuristic from (min_in_out + 1) to 0.
# This violates the requirement: h(x) <= d(x, y) + h(y)
# because min_in_out + 1 > 1 + 0
},
)
cost || 'impossible'
}
puts search[0]
puts search[1]
|
petertseng/adventofcode-rb-2019
|
lib/search.rb
|
<filename>lib/search.rb
require_relative 'priority_queue'
module Search
module_function
def path_of(prevs, n)
path = [n]
current = n
while (current = prevs[current])
path.unshift(current)
end
path
end
def astar(start, neighbours:, heuristic:, goal:)
g_score = Hash.new(1.0 / 0.0)
g_score[start] = 0
closed = {}
open = MonotonePriorityQueue.new
open[start] = heuristic[start]
prev = {}
while (current = open.pop)
next if closed[current]
closed[current] = true
return [g_score[current], prev] if goal[current]
neighbours[current].each { |neighbour, cost|
next if closed[neighbour]
tentative_g_score = g_score[current] + cost
next if tentative_g_score >= g_score[neighbour]
prev[neighbour] = current
g_score[neighbour] = tentative_g_score
open[neighbour] = tentative_g_score + heuristic[neighbour]
}
end
nil
end
def bfs(start, num_goals: 1, neighbours:, goal:)
current_gen = [start]
prev = {start => nil}
goals = {}
gen = -1
until current_gen.empty?
gen += 1
next_gen = []
while (cand = current_gen.shift)
if goal[cand]
goals[cand] = gen
if goals.size >= num_goals
next_gen.clear
break
end
end
neighbours[cand].each { |neigh|
next if prev.has_key?(neigh)
prev[neigh] = cand
next_gen << neigh
}
end
current_gen = next_gen
end
{
gen: gen,
goals: goals,
prev: prev,
}
end
end
|
petertseng/adventofcode-rb-2019
|
06_universal_orbit_map.rb
|
<filename>06_universal_orbit_map.rb
require_relative 'lib/search'
input = ARGF.each_line.map(&:chomp)
orbit = {}
transfer = Hash.new { |h, k| h[k] = [] }
input.each { |x|
a, b = x.split(?))
orbit[b] = a
transfer[a] << b
transfer[b] << a
}
orbit.freeze
transfer.freeze
# Not convinced the cache makes that big of a difference, but sure.
cache = {}
depth = ->x { cache[x] ||= (orbiting = orbit[x]) ? 1 + depth[orbiting] : 0 }
puts orbit.keys.sum(&depth)
unless (youOrbit = orbit['YOU']) && (sanOrbit = orbit['SAN'])
puts 'nonexistent'
exit 0
end
result = Search.bfs(youOrbit, neighbours: transfer, goal: {sanOrbit => true})
puts result[:goals].empty? ? 'impossible' : result[:gen]
|
petertseng/adventofcode-rb-2019
|
18_many_worlds_interpretation.rb
|
require_relative 'lib/search'
def bitfield(chars, range)
base = range.begin.ord
chars.select { |c| range.cover?(c) }.map { |c| 1 << (c.ord - base) }.reduce(0, :|)
end
def key_to_key(flat_input, width, sources)
# AoC-specific optimisation:
# For all paths between key -> key that contain doors,
# the doors block the ONLY path to the key.
# Given this property is true, all key -> key paths are precomputed.
# This property is false for these inputs, with sources:
# https://www.reddit.com/r/adventofcode/comments/ecj4e7/2019_day_18_challenging_input/
# ##########
# #.a###.Ab#
# #.B..@.###
# #...######
# ##########
# https://www.reddit.com/r/adventofcode/comments/ecgyey/2019_day_18_part_1_im_not_seeing_how_to_optimize/fbc3iih/
# #######
# #....@#
# #.###A#
# #.###b#
# #.aBCc#
# #######
idx = sources.each_with_index.to_h
# BFS from {start, each key} to all other single keys.
# Paths picking up multiple keys will be computed after, using this info.
# Positions will be renumbered to be their index in the list.
sources.map { |src|
have_new_key = ->pos { pos != src && (?a..?z).cover?(flat_input[pos]) }
keys = Search.bfs(
src, num_goals: Float::INFINITY,
neighbours: ->pos {
next [] if have_new_key[pos]
[pos - width, pos + width, pos - 1, pos + 1].select { |npos|
flat_input[npos] != ?#
}
},
goal: have_new_key,
)
keys[:goals].to_h { |pos, dist|
path = Search.path_of(keys[:prev], pos)
things_on_path = path.map { |path_pos| flat_input[path_pos] }
[idx[pos], {
pos: idx[pos],
dist: dist,
# Represent keys and doors as bitfields so set intersections become cheap
keys: bitfield([flat_input[pos], flat_input[src]], ?a..?z),
doors: bitfield(things_on_path, ?A..?Z),
}.freeze]
}
}
end
def all_pairs(keys_from)
# https://en.wikipedia.org/wiki/Floyd%E2%80%93Warshall_algorithm
# Using this is much faster than traveling the entire map for each key.
keys_from.each_index { |k|
keys_from.each_index { |i|
next if k == i
next unless (ik = keys_from.dig(i, k))
keys_from.each_index { |j|
next if i == j || k == j
next unless (kj = keys_from.dig(k, j))
new_dist = ik[:dist] + kj[:dist]
ij = keys_from.dig(i, j)
if !ij || ij[:dist] > new_dist
keys_from[i][j] = {
pos: kj[:pos],
dist: new_dist,
keys: ik[:keys] | kj[:keys],
doors: ik[:doors] | kj[:doors],
}.freeze
end
}
}
}
keys_from.map { |vs| vs.values.sort_by { |v| -v[:dist] }.freeze }.freeze
end
def all_keys_time(keys_from, num_keys, robots)
all_keys = (1 << num_keys) - 1
# Pack all robot positions into one int.
# Now that positions are renumbered to max 31 (4 robots + 26 keys + 1 dummy start),
# they fit in 5 bits.
# With the key bitfield taking 26 bits, the entire state fits within 46 bits.
bits_per_robot = keys_from.size.bit_length
robot_mask = (1 << bits_per_robot) - 1
robot_base = (0...robots.size).map { |i| bits_per_robot * i + num_keys }
cost, _junk = Search.astar(
# Assumption: Renumbering done by keys_from put the robots first.
robots.zip(robot_base).map { |bot, base| bot << base }.reduce(0, :|),
neighbours: ->(robots_and_keys) {
keys = robots_and_keys & all_keys
robot_base.flat_map { |base|
robot = (robots_and_keys >> base) & robot_mask
keys_from[robot].filter_map { |key|
# Have these keys already.
next if key[:keys] | keys == keys
# Don't have all keys needed.
next unless key[:doors] | keys == keys
[(robots_and_keys & ~(robot_mask << base)) | (key[:pos] << base) | key[:keys], key[:dist]]
}
}
},
# heuristic - max dist to remaining keys is at most harmless,
# but does help for certain inputs, it seems.
# Also tried unsuccessfully:
# * MST of remaining keys
# * number remaining keys * minimum distance between two keys
# * Dijkstra's:
# heuristic: Hash.new(0),
heuristic: ->(robots_and_keys) {
keys = robots_and_keys & all_keys
robot_base.sum { |base|
robot = (robots_and_keys >> base) & robot_mask
# since keys_from is sorted in descending order of dist:
not_picked_up = keys_from[robot].find { |key|
key[:keys] | keys != keys
}
not_picked_up&.[](:dist) || 0
}
},
goal: ->(robots_and_keys) { robots_and_keys & all_keys == all_keys },
)
cost
end
input = ARGF.each_line.map(&:chomp).map(&:freeze).freeze
# Represent position as y * width + x, indexing into flattened grid.
# The edge of the grid is all walls, so this is fine.
width = input.map(&:size).max
flat_input = input.map { |l| l.ljust(width, ' ') }.join.freeze
keys = []
robots = []
input.each_with_index { |row, y|
row.chars.each_with_index { |cell, x|
pos = y * width + x
keys << pos if (?a..?z).cover?(cell)
robots << pos if cell == ?@
}
}
# If this is a part 1 that can be converted to a part 2, then do both.
# If not, it's fine, just do what's given, since tests use maps that only do part 1 or only do part 2.
can_part_2 = robots.size == 1 && begin
bot = robots[0]
diagonal = [-width, width].product([-1, 1]).map(&:sum)
orthogonal = [-width, width, -1, 1]
surrounding = diagonal + orthogonal
surrounding.all? { |s| flat_input[bot + s] == ?. }
end
if can_part_2
# Calculate the key-to-key map for part 2,
# then transform it into one for part 1.
# (This is faster because of traveling the map fewer times)
robots = diagonal.map { |diag| bot + diag }
flat_input = flat_input.dup
orthogonal.each { |orth| flat_input[bot + orth] = ?# }
flat_input.freeze
k2k = key_to_key(flat_input, width, [bot] + robots + keys)
k2k1 = k2k.map(&:dup)
add_pair = ->(i, j, dist) {
k2k1[i][j] = {pos: j, dist: dist, keys: 0, doors: 0}.freeze
k2k1[j][i] = {pos: i, dist: dist, keys: 0, doors: 0}.freeze
}
(1..4).each { |i|
# Allow each key to go back to the corner (part 2 entrances).
# Normally, they would not try to because the corner has no keys.
k2k1[i].each { |k, v| k2k1[k][i] = v.merge(pos: i) }
# Centre (part 1 entrance) is 2 away from each corner (part 2 entrances)
add_pair[0, i, 2]
}
(1..4).to_a.combination(2) { |i, j|
# Allow each corner (part 2 entrances) to reach each other.
y1, x1 = robots[i - 1].divmod(width)
y2, x2 = robots[j - 1].divmod(width)
add_pair[i, j, (y1 - y2).abs + (x1 - x2).abs]
}
puts all_keys_time(all_pairs(k2k1), keys.size, [0])
puts all_keys_time(all_pairs(k2k), keys.size, (1..robots.size).to_a)
else
k2k = key_to_key(flat_input, width, robots + keys)
puts all_keys_time(all_pairs(k2k), keys.size, (0...robots.size).to_a)
end
|
petertseng/adventofcode-rb-2019
|
16_flawed_frequency_transmission.rb
|
<reponame>petertseng/adventofcode-rb-2019<filename>16_flawed_frequency_transmission.rb
def fft(digits)
sum = 0
sum_left = digits.map { |d| sum += d }
sum_left.unshift(0)
digits.each_index { |i|
n = i + 1
sign = 1
base = i
total = 0
while base < digits.size
total += ((sum_left[base + n] || sum_left[-1]) - sum_left[base]) * sign
base += n * 2
sign *= -1
end
digits[i] = total.abs % 10
}
end
input = ARGF.read.chomp.chars.map(&method(:Integer)).freeze
digits = input.dup
100.times { fft(digits) }
puts digits.take(8).join
offset = Integer(input.take(7).join, 10)
raise "Can't do it the fast way" unless offset * 2 >= input.size * 10000
# As long as we are in the latter half of the list,
# each value is just the sum of all values coming after it.
# Just going right-to-left with a running sum is sufficient to solve this relatively quickly.
# But let's do slightly better.
# Observe the contribution of a lone 1.
# It's binomial coefficients.
# Going left adds 1 to n and k, going down (one iteration) adds 1 to n.
# The coefficients for the 100th iteration can be calculated and reused.
# They would otherwise be very large, but all operations are modulo 10.
# So a few theorems can help make it easier.
#
# We want we're going to want binom(99 + i, i) % 10.
# Because binom(n, k) == binom(n, n - k), that's equal to:
# binom(99 + i, 99) % 10.
# By Chinese Remainder Theorem...
#
# x congruent to a_i mod n_i
# x = sum a_i y_i z_i
# y_i is the product of all other moduli
# z_i is the modular inverse of y_i mod n_i
#
# Modular inverse of 5 mod 2 is 1
# Modular inverse of 2 mod 5 is 3
#
# 1 * 5 = 5
# 2 * 3 = 6
#
# So that tells us we want:
# (binom(99 + i, 99) % 2) * 5 + (binom(99 + i, 99) % 5) * 6
big_size = input.size * 10000 - offset
rsize = 8
result = [0] * rsize
stride = ->(big_stride, little_strides) {
((0...big_size) % big_stride).each { |big_stride_base|
little_strides.each { |little_stride, coeff|
i = big_stride_base + little_stride
dist_from_end = big_size - i
[dist_from_end, rsize].min.times { |j|
result[j] += input[(offset + i + j) % input.size] * coeff
}
}
}
}
# By Lucas's Theorem, binom(n, k) % m depends on the base-m expansion of n and k.
# It's the product of the binomials for each pair of digits of n and k paired
# So for each position, that digit of n must be >= that digit of k.
#
# For base 2:
# n & k == k ? 1 : 0
# So (binom(99 + i, 99) % 2) * 5 is rewritten as:
# ((99 + i) & 99 == 99 ? 1 : 0) * 5.
# Since 99 in base 2 is 1100011, we know that the lower seven bits of (99 + i) must be 0b11xxx11.
# So subtract 99 and the lower 7 bits of i must be 0b00xxx00.
# Notice the values increase by 4 each time and three bits means 8 values.
# Seven bits is 128.
# Thus:
# ((0...32) % 4).include?(i % 128) ? 5 : 0
stride[128, ((0...32) % 4).to_h { |x| [x, 5] }]
# For base 5:
# The base-5 expansion of 99 is 344_5.
# The only possibilities for last three base-5 digits of base-5 expansion for n must be 344_5 or 444_5.
# For 344_5, result is binom(3, 3) * binom(4, 4) * binom(4, 4) = 1 * 1 * 1 = 1.
# For 444_5, result is binom(4, 3) * binom(4, 4) * binom(4, 4) = 4 * 1 * 1 = 4.
# Multiply by 6 to get 6 and 24, mod by 10 to get 6 and 4.
# These cases happen when (99 + i) % 125 == 99 and (99 + i) % 125 == 124.
# Subtract 99 from both sides to get i % 125 == 0 and i % 125 == 25.
stride[125, {0 => 6, 25 => 4}]
puts result.map { |x| x % 10 }.join
|
petertseng/adventofcode-rb-2019
|
09_intcode_relative.rb
|
require_relative 'lib/intcode'
OPT = !ARGV.delete('--no-opt')
SPARSE = ARGV.delete('-sp')
DISAS_DYNAMIC = ARGV.delete('-dd')
disas_static = ARGV.delete('-ds')
stats = ARGV.delete('-s')
def run(mem, input)
Intcode.new(mem, sparse: SPARSE, funopt: OPT).continue(disas: DISAS_DYNAMIC, input: input).output
end
mem = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
puts run(mem, 1)
puts run(mem, 2)
Intcode.disas(mem) if disas_static
if stats
ic = Intcode.new(mem)
ic.continue(stats: true, input: 2)
p ic.times_run.sort_by(&:last)
p ic.jumps_taken.sort_by(&:last)
end
|
petertseng/adventofcode-rb-2019
|
08_space_image_format.rb
|
verbose = ARGV.delete('-v')
width = 25
layer = width * 6
input = ARGF.read.chomp
layers = input.each_char.each_slice(layer).to_a
min_layer = layers.min_by { |x| x.count(?0) }
p min_layer.tally if verbose
puts min_layer.count(?1) * min_layer.count(?2)
pixels = layers.transpose.map { |pixel| pixel.find { |layer| layer != ?2 } }
pixels.each_slice(width) { |row| puts row.map { |pixel| pixel == ?1 ? ?# : ' ' }.join }
|
petertseng/adventofcode-rb-2019
|
plain_intcode.rb
|
<reponame>petertseng/adventofcode-rb-2019<gh_stars>10-100
require_relative 'lib/intcode'
DISAS_DYNAMIC = ARGV.delete('-dd')
disas_static = ARGV.delete('-ds')
MEM = ARGV.delete('-m')
STATS = ARGV.delete('-s')
def run(mem, inputs)
mem = mem.dup
inputs = inputs.dup
Intcode.new(mem).continue(stats: STATS, disas: DISAS_DYNAMIC, mem_all: MEM, input: -> { inputs.shift })
end
mem = ARGV.shift.split(?,).map(&method(:Integer)).freeze
inputs = ARGV.map(&method(:Integer))
ic = run(mem, inputs)
Intcode.disas(ic.mem) if disas_static
if STATS
p ic.times_run.sort_by(&:last)
p ic.jumps_taken.sort_by(&:last)
end
puts "%0 #{ic.memory[0]}"
puts "out #{ic.output}"
|
petertseng/adventofcode-rb-2019
|
11_intcode_langtons_ant.rb
|
require 'set'
require_relative 'lib/intcode'
# Unknown grid size
# we'll assume they won't exceed approx 1<<29 in each direction.
# Two coordinates; (1<<60).object_id indicates it is still Fixnum, not Bignum.
COORD = 30
Y = 1 << COORD
ORIGIN = (Y / 2) << COORD | (Y / 2)
L = -1
R = 1
U = -Y
D = Y
TURN = [
# 0 = left
{U => L, L => D, D => R, R => U}.freeze,
# 1 = right
{U => R, L => U, D => L, R => D}.freeze,
].freeze
def understand_ant(mem)
ic = Intcode.new(mem).continue(input: 0)
initial_pair = ic.output.dup
ic.output.clear
pos = ic.pos
ic.continue(input: 1)
ic.continue(input: 1) until ic.pos == pos
# Expectations:
# The ant runs N cycles where each cycle takes M inputs
# Colours is always the opposite of what's given.
# Turns checks whether the input is the same as it was M inputs ago (1 cycle ago).
# One cycle isn't actually enough to tell that both patterns hold,
# but I don't feel like being smarter.
colours, turns = ic.output.each_slice(2).to_a.transpose
raise "unexpected colours #{colours}" if colours.include?(1)
# Determine N by examining the code:
halt = mem.index(99)
insts = mem[halt - 7, 7]
cmp, arg1, arg2, dst, jmp, jmparg, jmpdst = insts
raise "compare isn't a compare: #{insts}" unless [7, 8].include?(cmp % 100)
raise "compare doesn't compare pos to immed: #{insts}" unless [1, 10].include?(cmp / 100)
n = cmp / 100 == 1 ? arg1 : arg2
raise "jump isn't a jump: #{insts}" unless [5, 6].include?(jmp % 100)
raise "jump doesn't test comparison result: #{insts}" if jmparg != dst
raise "jump destination isn't initial position: #{insts}" if jmpdst != pos
{
initial_pair: initial_pair.freeze,
# For easier accounting, I'll report how many inputs I need to take.
# The code does a <, and the counter gets incremented before compared,
# so it's 1 fewer cycle than the printed value of N.
n: (n - 1) * turns.size,
state: turns.freeze,
}
end
def run(keep_drawing, draw, origin_white: false)
visited = Set.new
pos = ORIGIN
white = Set.new
white.add(pos) if origin_white
n = origin_white ? 0 : 0
dir = U
while keep_drawing[]
# The programs for this day have been sure to give exactly two outputs per one input,
# but this would handle other ratios.
output = draw[white.include?(pos) ? 1 : 0]
while output.size >= 2
colour, turn = output.shift(2)
visited << pos
if n > 0
STDERR.puts("#{colour} #{turn} from #{pos.divmod(Y).map { |x| x - Y / 2 }}")
n -= 1
end
white.send(colour == 1 ? :<< : :delete, pos)
dir = TURN[turn][dir]
pos += dir
end
end
[white, visited]
end
slow = ARGV.delete('-s')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
_, visit = if slow
ic = Intcode.new(input)
run(->{ !ic.halted? }, ->w { ic.continue(input: w).output })
else
ant = understand_ant(input)
initial_consumed = false
ant_state = ant[:state].dup
run(->{ ant[:n] > 0 }, ->w {
unless initial_consumed
initial_consumed = true
next ant[:initial_pair].dup
end
ant[:n] -= 1
prev = ant_state.shift
ant_state << w
[1 - w, w ^ prev ^ 1]
})
end
puts visit.size
ic = Intcode.new(input)
white, _ = run(->{ !ic.halted? }, ->w { ic.continue(input: w).output }, origin_white: true)
ys, xs = white.to_a.map { |pos| pos.divmod(Y) }.transpose
Range.new(*ys.minmax).each { |y|
puts Range.new(*xs.minmax).map { |x|
white.include?(y * Y + x) ? ?# : ' '
}.join
}
|
petertseng/adventofcode-rb-2019
|
15_intcode_search.rb
|
require_relative 'lib/intcode'
require_relative 'lib/search'
# Unknown grid size (well, I know it's 41x41, but without that knowledge)
# we'll assume they won't exceed approx 1<<29 in each direction.
# Two coordinates; (1<<60).object_id indicates it is still Fixnum, not Bignum.
COORD = 30
Y = 1 << COORD
ORIGIN = (Y / 2) << COORD | (Y / 2)
MOVE = [nil, -Y, Y, -1, 1]
COMPUTERS = {}
def search(start, goal:)
Search.bfs(
start,
neighbours: ->(pos) {
# Actually, we could be really naive,
# and only track the last direction traveled,
# and avoid reversing it (don't go south if you just went north)
# This works because there are no 2x2 open areas in the maze.
# But I'll actually track my position,
# which allows me to draw the maze if I choose to (debugging only).
(1..4).filter_map { |move|
new_pos = pos + MOVE[move]
COMPUTERS[new_pos] ||= COMPUTERS[pos].dup.continue(input: move)
new_pos if COMPUTERS[new_pos].output.last != 0
}
},
goal: goal,
)
end
disas = ARGV.delete('-d')
draw_map = ARGV.delete('-m')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
COMPUTERS[ORIGIN] = Intcode.new(input)
result = search(ORIGIN, goal: ->pos { COMPUTERS[pos].output.last == 2 })
raise 'oxygen not found' if result[:goals].empty?
puts result[:gen]
if disas
path = result[:path].each_cons(2).map { |a, b| MOVE.index(b - a) }
ic = Intcode.new(input).continue(input: path, stats: true)
Intcode.disas(ic.mem, addrs_run: ic.times_run)
end
puts search(result[:goals].keys.first, goal: ->_ { false })[:gen]
Kernel.exit(0) unless draw_map
ys, xs = COMPUTERS.keys.map { |pos| pos.divmod(Y) }.transpose
Range.new(*ys.minmax).each { |y|
puts Range.new(*xs.minmax).map { |x|
pos = y * Y + x
next ?S if pos == ORIGIN
next ?# unless (computer = COMPUTERS[pos])
case computer.output.last
when 0; ?#
when 1; ' '
when 2; ?O
end
}.join
}
|
petertseng/adventofcode-rb-2019
|
10_monitoring_station.rb
|
<gh_stars>10-100
TO_DESTROY = 200
def asteroid_in_direction(start, dy, dx, asteroids, height, width)
# Be careful here.
# You cannot just add (dy * width + dx) to (y * width + x) blindly.
# You might wrap around a row when you're not supposed to.
# (To detect this, see whether y changed more than you expected it to)
# It's too much work to track that so I'll just keep y and x separate.
y, x = start.divmod(width)
y += dy
x += dx
pos = y * width + x
dpos = dy * width + dx
while (0...height).cover?(y) && (0...width).cover?(x)
return pos if asteroids[pos]
y += dy
x += dx
pos += dpos
end
nil
end
def detect?(a1, a2, asteroids, height, width)
y1, x1 = a1.divmod(width)
y2, x2 = a2.divmod(width)
dy = y2 - y1
dx = x2 - x1
g = dy.gcd(dx)
# This shortcut saves a little bit of time.
# The code would still be correct without it,
# so it's purely for saving a little bit of unnecessary work.
return true if g == 1
asteroid_in_direction(a1, dy / g, dx / g, asteroids, height, width) == a2
end
verbose = ARGV.delete('-v')
input = ARGF.each_line.map(&:chomp)
height = input.size
width = input.map(&:size).max
# Just like day 03, encode a coordinate as y * width + x,
# because creating [y, x] for all asteroids is bad perf.
# About 2.4x as fast with this.
asteroids = {}
input.each_with_index { |row, y|
row.chars.each_with_index { |c, x|
asteroids[y * width + x] = true if c == ?#
}
}
detect = Hash.new(0)
asteroids.keys.combination(2) { |a1, a2|
# For each pair, check whether they detect each other.
# Another idea: add their reduced (dy, dx) to a set,
# and find the set with the most elements.
# That turns out to be about 1.5x slower.
if detect?(a1, a2, asteroids, height, width)
detect[a1] += 1
detect[a2] += 1
end
}
station, max = detect.max_by(&:last)
p max
p station if verbose
# <= instead of < is intentioal: can't destroy itself!
if asteroids.size <= TO_DESTROY
puts "bad #{asteroids.size}"
exit 1
end
sy, sx = station.divmod(width)
has_at_least = Hash.new { |h, k| h[k] = [0, 0, 0, 0] }
in_dir = Hash.new { |h, k| h[k] = [] }
# Two optimisations:
# - Using rationals is slightly faster than using atan2.
# - Skip quadrants to avoid having to sort them.
# But part 2 runs in about 1/30 the time of part 1
# (even without both of these), so this was mostly academic.
asteroids.keys.each { |pos|
next if pos == station
y, x = pos2d = pos.divmod(width)
dy = y - sy
dx = x - sx
quadrant, _ = key = if dy < 0 && dx >= 0
[0, Rational(dx, -dy)]
elsif dy >= 0 && dx > 0
[1, Rational(dy, dx)]
elsif dy > 0 && dx <= 0
[2, Rational(-dx, dy)]
elsif dy <= 0 && dx < 0
[3, Rational(-dy, -dx)]
else
raise "no quadrant for #{dy} #{dx}"
end
new_size = (in_dir[key] << pos2d).size
has_at_least[new_size][quadrant] += 1
}
remain = TO_DESTROY
round = 1
quadrant = 0
until has_at_least[round][quadrant] >= remain
remain -= has_at_least[round][quadrant]
quadrant += 1
if quadrant == 4
quadrant = 0
round += 1
end
end
candidates = in_dir.select { |(q, _), v| q == quadrant && v.size >= round }
_, at_angle = candidates.sort_by(&:first)[remain - 1]
y, x = at_angle.min_by(round) { |y, x| (y - sy).abs + (x - sx).abs }[-1]
puts x * 100 + y
|
petertseng/adventofcode-rb-2019
|
23_category_six.rb
|
<reponame>petertseng/adventofcode-rb-2019<filename>23_category_six.rb
require_relative 'lib/intcode'
# vars prefixed with an underscore are not used by this implementation,
# but may be used as temporaries by the Intcode implementation.
Computer = Struct.new(:sent, :tmp1, :_tmp2, :_tmp3, :_y, :slot_divisor, :rx_slots, :rx_base, :tx_f_addr, :_to_send, :num_txs, :tx_base) {
attr_reader :id
def initialize(mem, *args)
super(*args)
@id = tmp1
@rxs = mem[rx_base, rx_slots * 2].each_slice(2).map { |present, val|
present == 1 ? val : present == 0 ? nil : (raise "unknown present #{present} #{val}")
}
@txs = mem[tx_base, num_txs * 2].each_slice(2).to_a
self.sent = sent == 1 ? true : sent == 0 ? false : (raise "unknown sent #{sent}")
# NB: None of these tx_f actually use their tx_arg
case tx_f_addr
when 253
@tx_f_name = :add_rxs
@tx_f = ->_ { @rxs.sum }
when 302
@tx_f_name = :multiply_rxs
@tx_f = ->_ { @rxs.reduce(1, :*) }
when 351
@tx_f_name = :divide_rxs
@tx_f = ->_ { @rxs[0] / @rxs[1] }
when 556
@tx_f_name = :first_rx
@tx_f = ->_ { @rxs[0] }
else raise "unknown tx_f #{tx_f}"
end
end
def to_s
"slot_divisor #{slot_divisor}, rxs #@rxs, tx_f #@tx_f_name, txs #@txs"
end
def no_packet
sent ? [] : send_packets
end
def receive_packet(x, y)
rx_slot = x / slot_divisor - 1
return [] unless (0...rx_slots).cover?(rx_slot)
return [] if @rxs[rx_slot] == y
@rxs[rx_slot] = y
send_packets
end
def send_packets
self.sent = true
return [] if @rxs.include?(nil)
# The actual computers do call it with 210 (same across inputs), but it's never used.
y = @tx_f[210]
@txs.map { |addr, x| [addr, x, y] }
end
}
def run_nics(nics, verbose: false)
qs = nics.map { [] }.freeze
nat = nil
last_y_sent_by_nat = nil
0.step { |t|
nics.zip(qs).each_with_index { |(nic, q), i|
rx = q.size
tx = []
(yield nic, q.shift(rx)).each { |addr, x, y|
tx << [addr, x, y]
if addr == 255
puts y if nat.nil?
nat = [x, y].freeze
else
qs[addr] << [x, y].freeze
end
}
puts "time #{t}: #{i} receives #{rx} and sends #{tx.size} #{tx}" if verbose && (rx != 0 || tx.size != 0)
}
if qs.all?(&:empty?)
if nat[1] == last_y_sent_by_nat
puts nat[1]
exit 0
end
last_y_sent_by_nat = nat[1]
qs[0] << nat.dup.freeze
end
}
end
verbose = ARGV.delete('-v')
slow = ARGV.delete('-s')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
nics = (0..49).map { |x| Intcode.new(input).continue(input: x) }
if slow
run_nics(nics, verbose: verbose) { |nic, q|
nic.continue(input: q.empty? ? -1 : q.flatten)
nic.output.shift(nic.output.size / 3 * 3).each_slice(3)
}
else
run_nics(nics.map { |nic| Computer.new(nic.mem, *nic.mem[61..72]) }, verbose: verbose) { |nic, q|
q.empty? ? nic.no_packet : q.flat_map { |pkt| nic.receive_packet(*pkt) }
}
end
|
petertseng/adventofcode-rb-2019
|
25_cryostasis.rb
|
require_relative 'lib/intcode'
def find_string(mem, str)
expected_delta = str.chars.each_cons(2).map { |a, b| (b.ord - 1) - a.ord }
deltas = mem.each_cons(2).map { |a, b| b - a }
substr_starts = deltas.each_cons(str.size - 1).each_with_index.filter_map { |ds, i|
i if ds == expected_delta
}
first_char = str[0].ord
substr_starts.flat_map { |start|
encoded_start = mem[start]
(0...start).select { |len_addr|
len = mem[len_addr]
encoded_start + len + (start - (len_addr + 1)) == first_char
}
}
end
def exactly_one(name, things)
raise "need exactly one #{name}, not #{things}" if things.size != 1
things[0]
end
def exactly_one_function(functions, mem, name)
exactly_one('function ' + name, functions.select { |f|
mem[f].each_cons(4).any? { |x| yield x }
})
end
def modes(op)
[(op / 100) % 10, (op / 1000) % 10, (op / 10000) % 10]
end
def infer_answer(mem)
# The prompt tells us to look for an airlock password,
# so it stands to reason that the password will be printed alongside the string "airlock".
airlock_string = exactly_one('airlock string', find_string(mem, 'airlock'))
functions = Intcode.functions(mem)
airlock_string_printer = exactly_one_function(functions, mem, 'printing airlock string') { |a, b, c, d|
# Writes base of airlock string to a location on the stack.
next unless [1, 2].include?(a % 100)
m = modes(a)
m[2] == 2 && d > 0 && [b, c].zip(m).any? { |arg, mode| mode == 1 && arg == airlock_string }
}
# The address printed before the airlock string will eventually contain the answer.
address_used = exactly_one(
'address printed before airlock',
mem[airlock_string_printer].each_cons(4).with_index.flat_map { |(a, b, c, d), i|
next [] unless [1, 2].include?(a % 100)
m = modes(a)
next [] unless m[2] == 2 && d > 0
[b, c].zip(m).filter_map { |arg, mode| arg if mode == 0 }
},
)
# One function writes a constant to this address.
# (The constant is not the answer, it's just 0,
# but it signifies that the answer is about to be computed into that address)
# (Another way to find this is to find the one room that has an on_entry function)
const_write_to_address = exactly_one_function(functions, mem, 'writing const to password address') { |a, _, _, d|
[1102, 1101].include?(a) && d == address_used
}
# That function multiplies two values to get a target value and stores the target value.
target_loc, target = exactly_one(
'target',
mem[const_write_to_address].each_cons(4).filter_map { |a, b, c, d|
[d, mem[b] * mem[c]] if a == 2 && [b, c, d].all? { |x| x > 0 && mem[x] }
}
)
# Find a function that compares against the target value.
comparer = exactly_one_function(functions, mem, 'comparing against target') { |op, *operands|
[7, 8].include?(op % 100) && operands.zip(modes(op)).include?([target_loc, 0])
}
# That function uses elements of an array in the comparison.
weight_base_addr = exactly_one(
'weight array base address',
mem[comparer].each_cons(8).with_index(comparer.begin).flat_map { |insts, i|
# We're looking for two instructions of this pattern:
# write S11 S12 D1
# write S21 S22 D2
op1, src1, src2, dst1, op2 = insts
next [] unless [op1, op2].all? { |op| [1, 2].include?(op % 100) }
# second instruction must be an an array read (D1 must point to S21 or S22)
next [] if op1 >= 20000
next [] unless [i + 5, i + 6].include?(dst1)
# Anything that looks like a base address offset
[src1, src2].zip(modes(op1)).filter_map { |v, mode| v if v > 0 && mode == 1 }
},
)
# Find the length of the array, as follows:
array_len = exactly_one('weight array length', mem.each_cons(8).flat_map { |insts|
# Find an instruction that stores the address of the function.
op1, src11, src12, _, op2, src21, src22 = insts
next [] unless [op1, op2].all? { |op| [1, 2].include?(op % 100) }
next [] unless [src21, src22].zip(modes(op2)).include?([comparer.begin, 1])
# The array length is stored right before the address of the function.
[src11, src12].zip(modes(op1)).filter_map { |v, mode| v if v > 1 && mode == 1 }
})
mem[weight_base_addr, array_len].map { |x| x < target ? 0 : 1 }.join.to_i(2)
end
def string_at(mem, i)
len = mem[i]
mem[i + 1, len].map.with_index { |c, j| (c + len + j).chr }.join
end
def strings(mem)
mem.each_with_index.filter_map { |len, i|
next if len <= 0
next unless mem[i + 1 + len]
potential_string = len.times.map { |j|
mem[i + 1 + j] + j + len
}
next unless potential_string.all? { |c| c == 10 || (32..127).cover?(c) }
{
start: i,
len: len,
s: potential_string.pack('c*').freeze,
}.freeze
}
end
def items(mem)
mem[4601, 13 * 4].each_slice(4).map.with_index { |(a, b, c, d), i|
{
loc_id: a,
loc_name: a == -1 ? 'Inventory' : string_at(mem, a + 7),
name: string_at(mem, b),
weight: c - 27 - i,
on_pickup: d == 0 ? nil : d,
}.freeze
}
end
def fmt_items(mem)
items(mem).map { |item|
"%-20<name>s %10<weight>d in %-24<loc_name>s#{" #{item[:on_pickup]} on pickup" if item[:on_pickup]}" % item
}
end
def room_at(input, i)
{
id: i,
name_addr: name_addr = input[i],
text_addr: text_addr = input[i + 1],
on_entry: input[i + 2] == 0 ? nil : input[i + 2],
neighbours: %i(north east south west).zip(input[i + 3, 4]).to_h.select { |_, v| v > 0 },
name: string_at(input, name_addr).freeze,
text: string_at(input, text_addr).freeze,
}.freeze
end
def path_to(rooms, to, from, seen = {})
return [] if to == from
new_seen = seen.merge(from => true)
rooms[from][:neighbours].each { |ndir, nid|
next if seen[nid]
if (sub_path_to = path_to(rooms, to, nid, new_seen))
return [ndir] + sub_path_to
end
}
nil
end
def rooms(mem)
room_queue = [[mem[3], mem[4]].max]
rooms = {}
while (room_addr = room_queue.shift)
next if rooms[room_addr]
rooms[room_addr] = room = room_at(mem, room_addr)
room_queue.concat(room[:neighbours].values)
end
rooms
end
def fmt_rooms(mem, current: nil, text: false)
items = items(mem).group_by { |i| i[:loc_id] }
rooms = rooms(mem)
rooms.values.map { |r|
parts = ["\e[1;32m#{r[:name]}\e[0m"]
parts << "\e[1;35mYou are here\e[0m" if r[:id] == current&.[](:id)
parts.concat((items[r[:id]] || []).map { |item|
"\e[1;#{item[:on_pickup] ? 31 : 34}m#{item[:name]}\e[0m"
})
parts.concat(r[:neighbours].filter_map { |ndir, nid|
"\e[1;33m#{ndir.to_s[0].upcase} = #{rooms[nid][:name]}\e[0m"
})
parts << "route #{path_to(rooms, r[:id], current[:id]).map { |dir| dir.to_s[0].upcase }.join}" if current
parts << r[:text] if text
parts.join(' - ')
}
end
def brute_force(ic, items, dir, prefix)
include_items = []
exclude_items = []
unknown_items = -> { items - include_items - exclude_items }
status = -> {
prefix + ' ' + [
['include', include_items],
['exclude', exclude_items],
['unknown', unknown_items[]],
].map { |name, items|
"#{name}: \e[1m#{items.map { |item| item[:name] }.join(', ')}\e[0m (#{items.sum { |item| item[:weight] }})"
}.join(' - ')
}
items.size.times {
unknown_items[].each { |item|
case attempt_pressure(ic, ["drop #{item[:name]}", dir, "take #{item[:name]}"])
when :ok
exclude_items << item
include_items.concat(unknown_items[])
puts status[]
return
when :too_light
puts "#{prefix} too light without #{item[:name]} (#{item[:weight]}) - include it."
include_items << item
end
}
ic.continue(input: unknown_items[].map { |item| "drop #{item[:name]}" })
unknown_items[].each { |item|
case attempt_pressure(ic, ["take #{item[:name]}", dir, "drop #{item[:name]}"])
when :ok
include_items << item
exclude_items.concat(unknown_items[])
puts status[]
return
when :too_heavy
puts "#{prefix} too heavy with #{item[:name]} (#{item[:weight]}) - exclude it."
exclude_items << item
end
}
ic.continue(input: unknown_items[].map { |item| "take #{item[:name]}" })
}
puts status[]
# Our inventory should have just the items we need now, so let's just try to move in.
ic.output.clear
ic.continue(input: dir)
# Let the main loop print out the output.
end
def attempt_pressure(ic, cmd)
ic.continue(input: cmd)
output = ic.ascii_output
status = if output.include?('lighter')
:too_heavy
elsif output.include?('heavier')
:too_light
elsif output.include?('proceed')
:ok
else
raise "Unknown output #{output}"
end
ic.output.clear if status != :ok
status
end
show_strings = ARGV.delete('-s')
show_items = ARGV.delete('-i')
show_rooms = ARGV.delete('-r')
manual = ARGV.delete('-m')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read).split(?,).map(&method(:Integer)).freeze
strings(input).each { |s| puts "@#{s[:start]} (#{s[:len]}): #{s[:s]}" } if show_strings
puts fmt_rooms(input, text: true) if show_rooms
puts fmt_items(input) if show_items
unless manual
p infer_answer(input)
Kernel.exit(0)
end
ic = Intcode.new(input).continue(input: [])
rooms = rooms(ic.mem).freeze
rooms_by_name = rooms.values.to_h { |r| [r[:name], r] }.freeze
prefix = '!!!'.freeze
saves = {}
current_loc = nil
prev_output = nil
loop {
unless ic.output.empty?
prev_output = ic.ascii_output
puts prev_output
ic.output.clear
if prev_output =~ /== ([A-Za-z ]+) ==/ && current_loc&.[](:name) != $1
current_loc = rooms_by_name[$1]
saves['auto'] = {ic: ic.dup, output: prev_output, loc: current_loc}
end
end
s = STDIN.gets
break if s.nil?
fast_travel = ->(target, purpose = '') {
moves = path_to(rooms, target[:id], current_loc[:id])
puts "#{prefix} Using \e[1;33m#{moves}\e[0m to fast travel to \e[1;32m#{target[:name]}\e[0m #{purpose}"
# Discard output from all but last
last_move = moves.pop
ic.continue(input: moves.map(&:to_s))
ic.output.clear
ic.continue(input: last_move.to_s)
current_loc = target
}
if s.start_with?('sa')
name = s.split[1] || 'unnamed'
saves[name] = {ic: ic.dup, output: prev_output, loc: current_loc}
puts "#{prefix} Saved #{name}"
elsif s.start_with?(?l)
name = s.split[1] || 'unnamed'
if (save = saves[name])
puts "#{prefix} Loading #{name}"
ic = save[:ic].dup
current_loc = save[:loc]
puts save[:output]
else
puts "#{prefix} There is no save named #{name}. Try one of: #{saves.keys}"
end
elsif s.start_with?(?i) && s.chomp != 'inv'
puts fmt_items(ic.mem)
elsif s.start_with?(?r)
puts fmt_rooms(ic.mem, current: current_loc)
elsif s.start_with?('ft')
unless (query = s.split[1])
puts "#{prefix} Need a target to fast travel to"
next
end
# Hmm, first try substring matching.
targets = rooms.values.select { |r| r[:name].downcase.include?(query.downcase) }
# That didn't work, guess let's try regex... (subsequence matching)
if targets.size != 1
regex = query.downcase.chars.join('.*')
targets = rooms.values.select { |r| r[:name].downcase.match?(regex) }
end
if targets.size == 0
puts "#{prefix} No matching rooms for #{query}"
elsif targets.size > 1
puts "#{prefix} Too many matches: #{targets.map { |r| r[:name] }}, disambiguate"
else
fast_travel[targets[0]]
end
elsif s.chomp == 'ta' || s.start_with?('takea') || s.start_with?('take all')
# So, I could use BFS to find the shortest path that takes all items and ends at the checkpoint...
# but I don't feel like it.
pickup = 0
already_in = 0
skip = 0
items(ic.mem).each { |item|
if item[:loc_id] == -1
puts "#{prefix} \e[1;32m#{item[:name]}\e[0m is already in the inventory"
already_in += 1
next
end
if item[:on_pickup]
puts "#{prefix} \e[1;31m#{item[:name]}\e[0m executes a function on pick-up, skipping."
skip += 1
next
end
pickup += 1
fast_travel[rooms[item[:loc_id]], "to pick up \e[1;34m#{item[:name]}\e[0m"]
ic.continue(input: "take #{item[:name]}")
}
ic.output.clear
puts "#{prefix} Picked up #{pickup} items, now have #{already_in + pickup} in inventory. Skipped #{skip} items that execute functions."
elsif s.start_with?('b')
final_room = exactly_one('room executing function on entry', rooms.values.select { |r| r[:on_entry] })
penultimate_room = exactly_one('room leading to final room', rooms.values.select { |r|
r[:neighbours].has_value?(final_room[:id])
})
fast_travel[penultimate_room] unless current_loc == penultimate_room
dir = exactly_one(
'direction to final room',
path_to(rooms, final_room[:id], penultimate_room[:id]),
).to_s
items = items(ic.mem).select { |item| item[:loc_id] == -1 }
brute_force(ic, items, dir, prefix)
else
autosave = ic.dup
ic.continue(input: s)
if ic.halted?
puts ic.ascii_output
puts "#{prefix} Halted. Rolling back."
ic = autosave.dup
puts prev_output
end
end
}
|
petertseng/adventofcode-rb-2019
|
unspace_golf.rb
|
code = File.readlines('golf9_spaced.rb').map(&:strip).grep_v(/^ *#/).join
File.open('golf9.rb', ?w) { |f| f.write(code) }
def run(script, day, input)
`echo #{input} | ruby #{script} $(tail -1 #{day}*.rb)`
end
def compare(in1, in2, day)
ps1 = run('golf9_spaced.rb', day, in1).lines.drop_while { |x| x == "0\n" }.join
ps2 = run('golf9_spaced.rb', day, in2)
p1 = run('golf9.rb', day, in1).lines.drop_while { |x| x == "0\n" }.join
p2 = run('golf9.rb', day, in2)
puts "huh? outputs not the same for compacted version, #{ps1} vs #{p1}" if ps1 != p1
puts "huh? outputs not the same for compacted version, #{ps2} vs #{p2}" if ps2 != p2
raise 'bad' if ps1 != p1 || ps2 != p2
puts p1
puts p2
answer = "#{p1.chomp}\n#{p2.chomp}"
system("echo '#{answer}' | diff -su - expected_output/#{day}")
end
compare(1, 5, '05')
compare(1, 2, '09')
system('wc -c golf9.rb')
|
petertseng/adventofcode-rb-2019
|
17_set_and_forget.rb
|
<filename>17_set_and_forget.rb
require_relative 'lib/intcode'
def exactly_one(name, things)
raise "need exactly one #{name}, not #{things}" if things.size != 1
things[0]
end
def modes(op)
[(op / 100) % 10, (op / 1000) % 10]
end
def read_intcode_map(mem)
_, dust_update = find_dust(mem)
width = mem[dust_update].each_cons(4) { |op, arg1, arg2, _|
next if op % 100 != 2
modes = modes(op)
break arg1 if modes == [1, 2]
break arg2 if modes == [2, 1]
}
dot = true
pos = 0
scaffold = {}
inter = Hash.new(0)
range = mem[7, 2].max...mem[11, 2].max
mem[range].each { |len|
if dot
pos += len
else
len.times {
scaffold[pos] = true
inter[pos] = 1 if scaffold[pos - 1] && scaffold[pos - width]
inter[pos - 1] += 1
inter[pos - width] += 1
pos += 1
}
end
dot = !dot
}
robot_loc = mem[robot_loc_addr(mem, dust_update), 2].reverse
alignment_sum = inter.sum { |pos, v|
raise "impossible #{pos} #{v}" unless (0..3).cover?(v)
v == 3 ? pos.divmod(width).reduce(:*) : 0
}
[robot_loc, scaffold.keys.map { |x| x.divmod(width) }, width, alignment_sum]
end
def read_ascii_map(img)
img = img.lines.map(&:freeze).freeze
alignment_sum = 0
robot_loc = nil
scaffold = []
width = img.map { |x| x.chomp.size }.max
img.each_with_index { |row, y|
break if row == "\n"
row.chomp.chars.each_with_index { |cell, x|
case cell
when ?#
scaffold << [y, x]
intersection = x > 0 && y > 0 && img[y][x - 1, 3] == '###' && img[y - 1][x] == ?# && img[y + 1]&.[](x) == ?#
alignment_sum += y * x if intersection
when ?.
# ok
when ?^
raise "multiple robots? #{robot_loc} vs #{y} #{x}" if robot_loc
robot_loc = [y, x]
else raise "bad char #{cell}"
end
}
}
[robot_loc, scaffold, width, alignment_sum]
end
def robot_loc_addr(mem, dust_update)
compare = mem[dust_update].each_cons(4).find { |op, *| op % 100 == 8 }
exactly_one('robot loc', compare[1, 2].zip(modes(compare[0])).filter_map { |v, mode|
v if mode == 0
})
end
def find_dust(mem)
dust = exactly_one('dust location', mem.each_cons(3).filter_map { |(a, b, c)|
b if a == 4 && c == 99
})
dust_update = exactly_one(
'dust_update',
Intcode.functions(mem).select { |f|
mem[f].each_cons(4).any? { |(a, _, _, d)|
# Writes to the dust address
a < 20000 && [1, 2].include?(a % 100) && d == dust
}
},
)
[dust, dust_update]
end
def teleport_robot(ic, scaffold)
mem = ic.mem
dust, dust_update = find_dust(mem)
robot_loc_addr = robot_loc_addr(mem, dust_update)
# Turn printing off
mem[robot_loc_addr - 1] = 0
# Set return addr to current pos (where we're pausing for input)
# so it pauses for input after having called the function
mem[ic.relative_base] = ic.pos
scaffold.each { |y, x|
# Teleport to this scaffold location and call dust update function.
mem[robot_loc_addr, 2] = [x, y]
ic.continue(hijack: dust_update.begin, input: [])
}
mem[dust]
end
# This is not actually that much faster,
# but might as well keep the code to have for reference.
def auto_dust(mem, scaffold, width)
_, dust_update = find_dust(mem)
scaffold_base_addr = exactly_one(
'scaffold base address',
mem[dust_update].each_cons(8).with_index(dust_update.begin).flat_map { |insts, i|
# We're looking for three instructions of this pattern:
# write S11 S12 D1
# write S21 S22 D2
# anything S31 S32 ...
op1, _, _, dst1, op2, _, _, dst2 = insts
next [] unless [op1, op2].all? { |op| [1, 2].include?(op % 100) }
# third instruction must be an array read (D2 must point to S31 or S32)
next [] if op2 >= 20000
next [] unless [i + 9, i + 10].include?(dst2)
# second instruction must use result of the first (D1 must equal one of S21 or S22)
next [] if dst1 == 0
next [] unless insts[5, 2].include?(dst1)
# Anything that looks like a base address offset
insts[1, 2].zip(modes(op1)).filter_map { |v, mode| v if v > 0 && mode == 1 }
},
)
scaffold.each_with_index.sum { |(y, x), i|
scaffold_base_addr + x + y * width + x * y + i + 1
}
end
slower = ARGV.delete('-ss')
slow = ARGV.delete('-s')
verbose = ARGV.delete('-v')
input = (ARGV[0]&.include?(?,) ? ARGV[0] : ARGF.read)
if input.include?(?,)
mem = input.split(?,).map(&method(:Integer)).freeze
ic = Intcode.new([2] + mem.drop(1))
if slow || slower
ic.continue(input: [])
map = read_ascii_map(ic.ascii_output)
else
map = read_intcode_map(mem)
end
elsif input.include?(?#)
map = read_ascii_map(input)
else
raise 'Unknown kind of input'
end
robot_loc, scaffold, width, alignment_sum = map
p alignment_sum
if ic && !slower
if slow
puts teleport_robot(ic, scaffold)
else
puts auto_dust(ic.mem, scaffold, width)
end
exit 0
end
def left((dy, dx))
# (-1, 0) -> (0, -1) -> (1, 0) -> (0, 1) -> (-1, 0)
[-dx, dy]
end
def right((dy, dx))
# (-1, 0) -> (0, 1) -> (1, 0) -> (0, -1) -> (-1, 0)
[dx, -dy]
end
move = ->dir { robot_loc.zip(dir).map(&:sum) }
scaffold = scaffold.to_h { |x| [x, true] }
remain_scaffold = scaffold.dup
can_move = ->dir { scaffold[move[dir]] }
steps = []
robot_dir = [-1, 0]
until remain_scaffold.empty?
moves = 0
while can_move[robot_dir]
robot_loc = move[robot_dir]
moves += 1
remain_scaffold.delete(robot_loc)
next
end
steps << moves if moves > 0
break if remain_scaffold.empty?
can_turn = %i(left right).select { |dir| can_move[send(dir, robot_dir)] }
if can_turn.size == 1
steps << can_turn[0]
robot_dir = send(can_turn[0], robot_dir)
next
end
raise "need exactly one turn at #{robot_loc}, not #{can_turn}"
end
steps_str = steps.map { |x| {left: ?L, right: ?R}[x] || (?F * x) }.join
MAX_LEN = 20
# Only meant for use when items will STRICTLY alternate between F and non-F.
# Not for use for displaying intermediate compression results, where we might have AA etc.,
# which would just show up as A.
def chunk(func_raw)
func_raw.chars.chunk(&:itself).map { |letter, insts| letter == ?F ? insts.size : letter }
end
# OK for use when showing intermediate compression results
def chunk2(func_raw)
func_raw.chars.chunk { |x| x == ?F }.flat_map { |f, insts| f ? [insts.size] : insts }
end
def compress(
free_letters, main, assigned_letters = [],
split_moves: false, split_turns: false, split_turns2: false,
verbose: false
)
if free_letters.empty?
chars = main.chars
return [] unless chars.all? { |c| (?A..?C).cover?(c) }
main = chars.join(?,)
return [] if main.size > MAX_LEN
return [[main] + assigned_letters]
end
unless (start = main.chars.index { |c| !(?A..?C).cover?(c) })
# Erm, I guess we don't need to assign anything more?
return compress([], main, assigned_letters + free_letters.map { '' })
end
letter = free_letters.first
possible_lengths = []
1.step { |len|
break if (?A..?C).cover?(main[start + len - 1])
break if start + len > main.size
next if !split_moves && main[start + len - 1] == ?F && main[start + len] == ?F
func_raw = main[start, len]
func_chunks = chunk(func_raw)
comma_joined_length = func_chunks.join(?,).size
break if comma_joined_length > MAX_LEN
possible_lengths << [func_raw, func_chunks, comma_joined_length]
}
possible_lengths.reverse_each.flat_map { |func_raw, func_chunks, comma_joined_length|
# If it ends on a number, consider adding a turn to this function and the opposite turn to the next.
# Not necessary on askalski's input, but I remain convinced it's theoretically necessary.
# See mk17.rb A,A,B,C,A,C,C,B,A R,10,L,6,L,10,R,6,L 6,L,4,L,10,R,10,L,8 R,8,L,10,L,6,R,6
possible_funcs = if split_turns2 && func_chunks[-1].is_a?(Integer) && comma_joined_length + 2 <= MAX_LEN
turn_pairs = [[nil, ''], [?L, ?R], [?R, ?L]]
turn_pairs.map { |term_turn, add_turn|
[
func_raw,
(func_chunks + (term_turn ? [term_turn] : [])).join(?,),
letter + add_turn,
]
}
else
[[func_raw, func_chunks.join(?,), letter]]
end
possible_funcs.flat_map { |func_raw, func_comma, replace_with|
allowed_subs = [[func_raw, replace_with]]
if split_turns && func_raw.size > 1
# If it starts with a turn, also allow placing the opposite turn before.
if func_raw.start_with?(?R)
allowed_subs << [func_raw[1..-1], ?L + replace_with]
elsif func_raw.start_with?(?L)
allowed_subs << [func_raw[1..-1], ?R + replace_with]
end
end
if split_turns2 && func_raw.size > 1 && replace_with.size > 1 && 'LR'.include?(func_comma[-1])
# Function ends with a turn (such as A = 10,R), there are two choices:
# Replace 10 with A,L (equivalent to 10,R,L = 10)
# Replace 10,R with A
allowed_subs << [func_raw + func_comma[-1], replace_with[0..-2]]
allowed_subs << ['LR', '']
allowed_subs << ['RL', '']
end
new_main = main.dup
allowed_subs.each { |from, to|
new_main.gsub!(from, to)
# If it ends with a turn, allow replacing at the very end as well.
if from.size > 1 && 'LR'.include?(from[-1])
new_main.sub!(/#{from[0..-2]}$/, to)
end
}
# Since function calls are irreducible,
# prune search if we have too many of them.
# (N function calls needs N-1 commas, so it's 2N-1)
function_calls = new_main.chars.count { |c| (?A..?C).cover?(c) }
next [] if function_calls * 2 - 1 > MAX_LEN
# For debugging, put the expected letter assignments here.
expected = [
nil,
]
right_track = (assigned_letters + [func_comma]).zip(expected).all? { |a, b| a == b }
if verbose || right_track
puts "#{chunk2(main).join(?,)}: assign #{letter} <- #{func_comma}, replace #{func_raw} w/ #{replace_with}, now #{chunk2(new_main).join(?,)}"
end
compress(
free_letters[1..-1], new_main, assigned_letters + [func_comma],
verbose: verbose, split_moves: split_moves, split_turns: split_turns,
)
}
}
end
# split_moves and split_turns only needed to solve some hard inputs:
# https://www.reddit.com/r/adventofcode/comments/ebz338/2019_day_17_part_2_pathological_pathfinding/
# Try it without them first, then try it if needed.
solns = compress(%w(A B C), steps_str)
solns = compress(%w(A B C), steps_str, split_moves: true, split_turns: true) if solns.empty?
if solns.empty?
puts 'split_turns2 needed' if verbose
solns = compress(%w(A B C), steps_str, split_moves: true, split_turns: true, split_turns2: true)
end
solns.each { |soln|
soln << ?n
if ic
output = ic.dup.continue(input: soln).output
puts output.select { |x| x > 127 }
end
}
if verbose
puts solns
elsif !ic
puts solns.empty? ? 'impossible' : 'possible'
end
|
petertseng/adventofcode-rb-2019
|
14_space_stoichiometry.rb
|
def ore_to_make(things, leftovers = Hash.new(0), ceil: true, verbose: false)
puts "make #{things} w/ leftovers #{leftovers.select { |_, v| v > 0 }}" if verbose
return things[:ORE] if things.keys == [:ORE]
ore_to_make({}.merge(*things.map { |thing, amount_needed|
next {ORE: amount_needed} if thing == :ORE
if leftovers.has_key?(thing)
use_leftover = [leftovers[thing], amount_needed].min
amount_needed -= use_leftover
leftovers[thing] -= use_leftover
end
recipe = RECIPES[thing]
times = Rational(amount_needed, recipe[:produced])
times = times.ceil if ceil
next {} if times == 0
recipe[:inputs].transform_values { |v| v * times }.tap {
leftovers[thing] += recipe[:produced] * times - amount_needed
}
}) { |_, v1, v2| v1 + v2 }, leftovers, ceil: ceil, verbose: verbose)
end
def name_and_amount(thing)
amount, name = thing.split
[name.to_sym, Integer(amount)]
end
RECIPES = {}
verbose = ARGV.delete('-v')
ARGF.each_line.map { |l|
inputs, output = l.split(' => ')
output_name, output_amount = name_and_amount(output)
if (existing = RECIPES[output_name])
raise "#{output_name} already has #{existing}"
end
RECIPES[output_name] = {
produced: output_amount,
inputs: inputs.split(', ').to_h { |x| name_and_amount(x) }.freeze
}.freeze
}
RECIPES.freeze
puts make_one = ore_to_make({FUEL: 1}, verbose: verbose)
trillion = 1_000_000_000_000
# The binary search still runs almost instantly even with bounds (1..trillion)
# so these tighter bounds aren't strictly necessary.
lower_bound = trillion / make_one
upper_bound = (trillion / ore_to_make({FUEL: 1}, ceil: false)).ceil
puts (lower_bound..upper_bound).bsearch { |x|
ore_to_make({FUEL: x}) > trillion
} - 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.