CombinedText stringlengths 4 3.42M |
|---|
cask "scratch" do
version "3.12.0"
sha256 "7d811ca578a38850e518a0ab22318193201916e533b50b5ca065b97c87c959e1"
url "https://downloads.scratch.mit.edu/desktop/Scratch%20Desktop-#{version}.dmg"
name "Scratch"
desc "Programs interactive stories, games, and animations"
homepage "https://scratch.mit.edu/download"
depends_on macos: ">= :high_sierra"
app "Scratch Desktop.app"
end
Update scratch from 3.12.0 to 3.15.0 (#90063)
cask "scratch" do
version "3.15.0"
sha256 "fa3459ffdf01453e41aa8b9f0f43ad6da88e50006aa04d9b525370bc770b66f0"
url "https://downloads.scratch.mit.edu/desktop/Scratch%20Desktop-#{version}.dmg"
name "Scratch"
desc "Programs interactive stories, games, and animations"
homepage "https://scratch.mit.edu/download"
depends_on macos: ">= :high_sierra"
app "Scratch Desktop.app"
end
|
cask "session" do
version "1.6.4"
sha256 "0157d080aa404124920243601bb56f38a2567614f0af26df02371b23347cd69c"
url "https://github.com/loki-project/session-desktop/releases/download/v#{version}/session-desktop-mac-#{version}.dmg",
verified: "github.com/loki-project/session-desktop/"
name "Session"
desc "Onion routing based messenger"
homepage "https://getsession.org/"
livecheck do
url :url
strategy :github_latest
end
app "Session.app"
zap trash: [
"~/Library/Application Support/Session",
"~/Library/Caches/Session",
"~/Library/Preferences/com.loki-project.messenger-desktop.plist",
"~/Library/Saved Application State/com.loki-project.messenger-desktop.savedState",
]
end
Update Session from 1.6.4 to 1.6.5 (#106900)
cask "session" do
version "1.6.5"
sha256 "50eafa01642d18d0276cec324c0d01e3a8dc99fbb63148963e4dbd32bca05dfc"
url "https://github.com/loki-project/session-desktop/releases/download/v#{version}/session-desktop-mac-#{version}.dmg",
verified: "github.com/loki-project/session-desktop/"
name "Session"
desc "Onion routing based messenger"
homepage "https://getsession.org/"
livecheck do
url :url
strategy :github_latest
end
app "Session.app"
zap trash: [
"~/Library/Application Support/Session",
"~/Library/Caches/Session",
"~/Library/Preferences/com.loki-project.messenger-desktop.plist",
"~/Library/Saved Application State/com.loki-project.messenger-desktop.savedState",
]
end
|
cask "sparrow" do
version "1.5.0"
sha256 "a4deab8cb164f0375344462be80ab2a59a5177029efc50cea4f458e6de956bee"
url "https://github.com/sparrowwallet/sparrow/releases/download/#{version}/Sparrow-#{version}.dmg",
verified: "github.com/sparrowwallet/sparrow/"
name "Sparrow Bitcoin Wallet"
desc "Bitcoin wallet application"
homepage "https://sparrowwallet.com/"
app "Sparrow.app"
zap trash: "~/.sparrow"
end
Update sparrow from 1.5.0 to 1.5.1 (#112317)
cask "sparrow" do
version "1.5.1"
sha256 "f6e2279642d2bfbcee253fdc5d0c47957b742a2bbdb81817138457fa6a506a12"
url "https://github.com/sparrowwallet/sparrow/releases/download/#{version}/Sparrow-#{version}.dmg",
verified: "github.com/sparrowwallet/sparrow/"
name "Sparrow Bitcoin Wallet"
desc "Bitcoin wallet application"
homepage "https://sparrowwallet.com/"
app "Sparrow.app"
zap trash: "~/.sparrow"
end
|
cask "texmacs" do
arch = Hardware::CPU.intel? ? "" : "-arm"
version "2.1.2"
if Hardware::CPU.intel?
sha256 "db1f9a525554d76794e0339cc19fb4d45eee79bce5f7a176c8dc8b8667181b08"
else
sha256 "d0a5abf3dafba31073ffc3bbfbeff5300453954a54495affee8a16a2f9196587"
end
url "https://ftp.texmacs.org/TeXmacs/tmftp/macos/TeXmacs-#{version}#{arch}.dmg"
name "GNU TeXmacs"
desc "Scientific editing platform"
homepage "https://www.texmacs.org/"
livecheck do
url "http://ftp.texmacs.org/TeXmacs/appcast/macos.xml"
strategy :sparkle
end
app "TeXmacs.app"
zap trash: [
"~/.TeXmacs",
"~/Library/Preferences/org.texmacs.TeXmacs.plist",
"~/Library/Saved Application State/org.texmacs.TeXmacs.savedState",
]
end
texmacs: use `arch` and `on_arch_conditional` DSLs
cask "texmacs" do
arch arm: "-arm"
version "2.1.2"
if Hardware::CPU.intel?
sha256 "db1f9a525554d76794e0339cc19fb4d45eee79bce5f7a176c8dc8b8667181b08"
else
sha256 "d0a5abf3dafba31073ffc3bbfbeff5300453954a54495affee8a16a2f9196587"
end
url "https://ftp.texmacs.org/TeXmacs/tmftp/macos/TeXmacs-#{version}#{arch}.dmg"
name "GNU TeXmacs"
desc "Scientific editing platform"
homepage "https://www.texmacs.org/"
livecheck do
url "http://ftp.texmacs.org/TeXmacs/appcast/macos.xml"
strategy :sparkle
end
app "TeXmacs.app"
zap trash: [
"~/.TeXmacs",
"~/Library/Preferences/org.texmacs.TeXmacs.plist",
"~/Library/Saved Application State/org.texmacs.TeXmacs.savedState",
]
end
|
cask 'texshop' do
version '3.87'
sha256 'fe1164aa9c8a4898b390ea00568b74a42b29ab7e91a2c872198f40d66487619b'
url "http://pages.uoregon.edu/koch/texshop/texshop-64/texshop#{version.no_dots}.zip"
appcast 'http://pages.uoregon.edu/koch/texshop/texshop-64/texshopappcast.xml',
checkpoint: '792050c8090854eb117d2abded51517f626e78512d9e9cafe2a3d27aa44f5944'
name 'TeXShop'
homepage 'http://pages.uoregon.edu/koch/texshop/'
depends_on macos: '>= :mountain_lion'
app 'TeXShop.app'
end
Update texshop to 3.88 (#38726)
cask 'texshop' do
version '3.88'
sha256 '96ee307a8062f4ca69b5b7fb517b0456a25957dbd02f19af6041284c5457e9b4'
url "http://pages.uoregon.edu/koch/texshop/texshop-64/texshop#{version.no_dots}.zip"
appcast 'http://pages.uoregon.edu/koch/texshop/texshop-64/texshopappcast.xml',
checkpoint: '37a73f8d3a4847a4ce21e3a14a9341811b317bea01fd4755bfe2152362a85d0f'
name 'TeXShop'
homepage 'http://pages.uoregon.edu/koch/texshop/'
depends_on macos: '>= :mountain_lion'
app 'TeXShop.app'
end
|
cask "thorium" do
version "1.5.0"
sha256 "6c206707eee3a3749ab7283ef4eeefa97f29c1563890e3eed800fd0a7358c0dc"
url "https://github.com/edrlab/thorium-reader/releases/download/v#{version}/Thorium-#{version}.dmg",
verified: "github.com/edrlab/thorium-reader/"
name "Thorium Reader"
desc "Epub reader"
homepage "https://www.edrlab.org/software/thorium-reader/"
livecheck do
url :url
strategy :github_latest
end
app "Thorium.app"
zap trash: [
"~/Library/Application Support/EDRLab.ThoriumReader",
"~/Library/Preferences/io.github.edrlab.thorium.plist",
]
end
Update thorium from 1.5.0 to 1.6.0 (#101524)
cask "thorium" do
version "1.6.0"
sha256 "c28a07d42675dcd373a2b830322eb9fa93c2eb2eef7658733393f502f8a9214f"
url "https://github.com/edrlab/thorium-reader/releases/download/v#{version}/Thorium-#{version}.dmg",
verified: "github.com/edrlab/thorium-reader/"
name "Thorium Reader"
desc "Epub reader"
homepage "https://www.edrlab.org/software/thorium-reader/"
livecheck do
url :url
strategy :github_latest
end
app "Thorium.app"
zap trash: [
"~/Library/Application Support/EDRLab.ThoriumReader",
"~/Library/Preferences/io.github.edrlab.thorium.plist",
]
end
|
cask "thunder" do
version "4.2.1.65254"
sha256 "7d9f5fc6248ce8f9b0b283705469a66ea462eb184e49af4cd697da67bd862b84"
url "https://down.sandai.net/mac/thunder_#{version}.dmg",
verified: "down.sandai.net/mac/"
name "Thunder"
name "迅雷"
desc "VPN and WiFi proxy"
homepage "https://www.xunlei.com/"
livecheck do
url "https://dl.xunlei.com"
regex(/thunder[._-](\d+(?:\.\d+)+)\.dmg/i)
end
auto_updates true
depends_on macos: ">= :yosemite"
app "Thunder.app"
zap trash: [
"~/Library/Application Support/Thunder",
"~/Library/Caches/com.xunlei.Thunder",
"~/Library/Caches/com.xunlei.XLPlayer",
"~/Library/Cookies/com.xunlei.Thunder.binarycookies",
"~/Library/Preferences/com.xunlei.Thunder.loginSDK.plist",
"~/Library/Preferences/com.xunlei.Thunder.plist",
"~/Library/Preferences/com.xunlei.XLPlayer.plist",
"~/Library/Saved Application State/com.xunlei.Thunder.savedState",
"~/Library/Saved Application State/com.xunlei.XLPlayer.savedState",
"~/Library/WebKit/com.xunlei.Thunder",
]
end
thunder: correct minimum macOS
cask "thunder" do
version "4.2.1.65254"
sha256 "7d9f5fc6248ce8f9b0b283705469a66ea462eb184e49af4cd697da67bd862b84"
url "https://down.sandai.net/mac/thunder_#{version}.dmg",
verified: "down.sandai.net/mac/"
name "Thunder"
name "迅雷"
desc "VPN and WiFi proxy"
homepage "https://www.xunlei.com/"
livecheck do
url "https://dl.xunlei.com"
regex(/thunder[._-](\d+(?:\.\d+)+)\.dmg/i)
end
auto_updates true
depends_on macos: ">= :high_sierra"
app "Thunder.app"
zap trash: [
"~/Library/Application Support/Thunder",
"~/Library/Caches/com.xunlei.Thunder",
"~/Library/Caches/com.xunlei.XLPlayer",
"~/Library/Cookies/com.xunlei.Thunder.binarycookies",
"~/Library/Preferences/com.xunlei.Thunder.loginSDK.plist",
"~/Library/Preferences/com.xunlei.Thunder.plist",
"~/Library/Preferences/com.xunlei.XLPlayer.plist",
"~/Library/Saved Application State/com.xunlei.Thunder.savedState",
"~/Library/Saved Application State/com.xunlei.XLPlayer.savedState",
"~/Library/WebKit/com.xunlei.Thunder",
]
end
|
cask 'trailer' do
version '1.4.1'
sha256 '218cac2f18847306292d1046cf416e697bf463d59c4e440e5cf412178a6857a2'
url "https://ptsochantaris.github.io/trailer/trailer#{version.no_dots}.zip"
appcast 'https://ptsochantaris.github.io/trailer/appcast.xml',
checkpoint: 'c0c76e12e4d1bc700e73cd37dea74e95103a483741ed43b2bdd9abafc9b3de49'
name 'Trailer'
homepage 'https://ptsochantaris.github.io/trailer/'
license :mit
app 'Trailer.app'
zap delete: [
'~/Library/Application Support/com.housetrip.Trailer',
'~/Library/Caches/com.housetrip.Trailer',
'~/Library/Preferences/com.housetrip.Trailer.plist',
]
end
updated trailer (1.4.2) (#22097)
cask 'trailer' do
version '1.4.2'
sha256 '5dcf11adda16d08e26616eb2df672c06d0b40ccd15cbc2c9454cd946a10ff3fa'
url "https://ptsochantaris.github.io/trailer/trailer#{version.no_dots}.zip"
appcast 'https://ptsochantaris.github.io/trailer/appcast.xml',
checkpoint: 'a1371c4826b3cbc6cdb8cb089332192d9186d029248de3da6ca48d0103bbcfeb'
name 'Trailer'
homepage 'https://ptsochantaris.github.io/trailer/'
license :mit
app 'Trailer.app'
zap delete: [
'~/Library/Application Support/com.housetrip.Trailer',
'~/Library/Caches/com.housetrip.Trailer',
'~/Library/Preferences/com.housetrip.Trailer.plist',
]
end
|
cask 'trufont' do
version '0.3.0'
sha256 'dd274baeef87cf43971af7d6e4c845e384c3dc0d15e1888e9483aa9959804f90'
url "https://github.com/trufont/trufont/releases/download/#{version}/TruFont.app.zip"
appcast 'https://github.com/trufont/trufont/releases.atom',
checkpoint: 'df73f40f726b307dd7746ce4bdff4cf3d8882395c3ad972e8cf1ec0be4bf2a39'
name 'TruFont'
homepage 'https://trufont.github.io/'
license :oss
app 'TruFont.app'
end
updated trufont (0.4.0) (#20742)
cask 'trufont' do
version '0.4.0'
sha256 '05a3b3b9b9188dfe7af9fc7c3d19dc301e7c877ec249dce7f01ac183e7a8af27'
url "https://github.com/trufont/trufont/releases/download/#{version}/TruFont.app.zip"
appcast 'https://github.com/trufont/trufont/releases.atom',
checkpoint: '0d89b0382b94ce03d1b09b053773413259850682263fe0f0fe06b1c3efd240fa'
name 'TruFont'
homepage 'https://trufont.github.io/'
license :oss
app 'TruFont.app'
end
|
cask "usenapp" do
version "1.23,361"
sha256 "dbec524dcc13128ea3104ae4daf1c1e76d79432a56dfc6ce232d6f08c6460a12"
url "https://www.usenapp.com/download/Usenapp-#{version.csv.first}.dmg"
name "Usenapp"
desc "Newsreader and Usenet client"
homepage "https://www.usenapp.com/"
livecheck do
url "https://www.usenapp.com/download/appcast_unp1.xml"
strategy :sparkle
end
depends_on macos: ">= :el_capitan"
app "Usenapp.app"
zap trash: [
"~/Library/Application Support/Usenapp",
"~/Library/Caches/com.xoroxsoft.usenapp",
"~/Library/Preferences/com.xoroxsoft.usenapp.plist",
"~/Library/Saved Application State/com.xoroxsoft.usenapp.savedState",
]
end
usenapp 1.24,362
* Update usenapp from 1.23 to 1.24
* Bump min OS
Closes #135286.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "usenapp" do
version "1.24,362"
sha256 "17fffaddfa3a75c0901af015fcc6ca162fd5a88ab5535e2a6ac976d8d9ccf299"
url "https://www.usenapp.com/download/Usenapp-#{version.csv.first}.dmg"
name "Usenapp"
desc "Newsreader and Usenet client"
homepage "https://www.usenapp.com/"
livecheck do
url "https://www.usenapp.com/download/appcast_unp1.xml"
strategy :sparkle
end
depends_on macos: ">= :high_sierra"
app "Usenapp.app"
zap trash: [
"~/Library/Application Support/Usenapp",
"~/Library/Caches/com.xoroxsoft.usenapp",
"~/Library/Preferences/com.xoroxsoft.usenapp.plist",
"~/Library/Saved Application State/com.xoroxsoft.usenapp.savedState",
]
end
|
class Virtaal < Cask
version '0.7.1b2'
sha256 '41fec069ca06eb627c75b8d66460110b7970b0894e19df4f41510ac7b91bdbd0'
url "https://downloads.sourceforge.net/project/translate/Virtaal/#{version.sub(%r{^(\d+\.\d+\.\d+).*},'\1')}/Virtaal-#{version.sub(%r{^(\d+\.\d+\.\d+).*},'\1')}-Mac-Beta-2.dmg"
homepage 'http://virtaal.translatehouse.org/'
license :oss
app 'Virtaal.app'
end
new-style header in virtaal
cask :v1 => 'virtaal' do
version '0.7.1b2'
sha256 '41fec069ca06eb627c75b8d66460110b7970b0894e19df4f41510ac7b91bdbd0'
url "https://downloads.sourceforge.net/project/translate/Virtaal/#{version.sub(%r{^(\d+\.\d+\.\d+).*},'\1')}/Virtaal-#{version.sub(%r{^(\d+\.\d+\.\d+).*},'\1')}-Mac-Beta-2.dmg"
homepage 'http://virtaal.translatehouse.org/'
license :oss
app 'Virtaal.app'
end
|
cask 'vivaldi' do
version '1.9.818.49'
sha256 '1a076f2937765d411455cdae7dfbb453693dee7dd220f7f53c3dae0f12d377f0'
url "https://downloads.vivaldi.com/stable/Vivaldi.#{version}.dmg"
appcast 'https://update.vivaldi.com/update/1.0/mac/appcast.xml',
checkpoint: '00bbbbcca01ecf1bf35819bb4fe78f7008c75e12061a4244d7c8a50f2cc298d3'
name 'Vivaldi'
homepage 'https://vivaldi.com/'
auto_updates true
app 'Vivaldi.app'
zap delete: [
'~/Library/Preferences/com.vivaldi.Vivaldi.plist',
'~/Library/Application Support/Vivaldi',
'~/Library/Caches/Vivaldi',
'~/Library/Caches/com.vivaldi.Vivaldi',
'~/Library/Saved Application State/com.vivaldi.Vivaldi.savedState',
]
end
Update vivaldi to 1.9.818.50 (#34294)
cask 'vivaldi' do
version '1.9.818.50'
sha256 '600d3a50ab462552f67e8fb44f5e5e833c73cf0b7efb16da4c3e9bcba754b37f'
url "https://downloads.vivaldi.com/stable/Vivaldi.#{version}.dmg"
appcast 'https://update.vivaldi.com/update/1.0/mac/appcast.xml',
checkpoint: 'dc48175d5d62efbbec9fa297e50788343de90a6d80750f924bd91b2985f3c08e'
name 'Vivaldi'
homepage 'https://vivaldi.com/'
auto_updates true
app 'Vivaldi.app'
zap delete: [
'~/Library/Preferences/com.vivaldi.Vivaldi.plist',
'~/Library/Application Support/Vivaldi',
'~/Library/Caches/Vivaldi',
'~/Library/Caches/com.vivaldi.Vivaldi',
'~/Library/Saved Application State/com.vivaldi.Vivaldi.savedState',
]
end
|
cask "wavebox" do
arch arm: "macarm64", intel: "mac"
version "10.106.2.2"
sha256 arm: "c21a37521d6dbac5c1d55a853f2ef095c7c8bbeb8b1269b1abbe4c470f8ee7f7",
intel: "d2ca43ccdc5d9a3102449619835b5073391ab1f7c6a55003eacb20a0ff2bfa2e"
url "https://download.wavebox.app/stable/#{arch}/Install%20Wavebox%20#{version}.dmg",
verified: "download.wavebox.app/"
name "Wavebox"
desc "Web browser"
homepage "https://wavebox.io/"
livecheck do
url "https://download.wavebox.app/latest/stable/mac"
strategy :header_match
end
auto_updates true
depends_on macos: ">= :sierra"
app "Wavebox.app"
uninstall quit: "io.wavebox.wavebox"
zap trash: [
"~/Library/Application Support/WaveboxApp",
"~/Library/Caches/com.bookry.wavebox",
"~/Library/Caches/WaveboxApp",
"~/Library/Preferences/com.bookry.wavebox.plist",
"~/Library/Saved Application State/com.bookry.wavebox.savedState",
]
end
wavebox 10.106.5.2
Update wavebox from 10.106.2.2 to 10.106.5.2
Closes #132727.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "wavebox" do
arch arm: "macarm64", intel: "mac"
version "10.106.5.2"
sha256 arm: "ffd67b1a53e5712fe729871d614e82f9d85230749fecf96f1b040fcd46bd08a7",
intel: "a87bf6df078f43a88b979d2737be8121f09061c8d3839c6d82445caf37f5f2fd"
url "https://download.wavebox.app/stable/#{arch}/Install%20Wavebox%20#{version}.dmg",
verified: "download.wavebox.app/"
name "Wavebox"
desc "Web browser"
homepage "https://wavebox.io/"
livecheck do
url "https://download.wavebox.app/latest/stable/mac"
strategy :header_match
end
auto_updates true
depends_on macos: ">= :sierra"
app "Wavebox.app"
uninstall quit: "io.wavebox.wavebox"
zap trash: [
"~/Library/Application Support/WaveboxApp",
"~/Library/Caches/com.bookry.wavebox",
"~/Library/Caches/WaveboxApp",
"~/Library/Preferences/com.bookry.wavebox.plist",
"~/Library/Saved Application State/com.bookry.wavebox.savedState",
]
end
|
cask "wezterm" do
version "20220904-064125,9a6cee2b"
sha256 "843db7d81692dbed78d2a65c5a1eebd47b699ddeec69e54130d97a083dcc4d24"
url "https://github.com/wez/wezterm/releases/download/#{version.csv.first}-#{version.csv.second}/WezTerm-macos-#{version.csv.first}-#{version.csv.second}.zip",
verified: "github.com/wez/wezterm/"
name "WezTerm"
desc "GPU-accelerated cross-platform terminal emulator and multiplexer"
homepage "https://wezfurlong.org/wezterm/"
livecheck do
url :url
regex(%r{href=.*?/WezTerm-macos-(\d{8}-\d{6})-([0-9a-f]+)\.zip}i)
strategy :github_latest do |page, regex|
match = page.match(regex)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
app "WezTerm-macos-#{version.csv.first}-#{version.csv.second}/WezTerm.app"
%w[
wezterm
wezterm-gui
wezterm-mux-server
strip-ansi-escapes
].each do |tool|
binary "#{appdir}/WezTerm.app/Contents/MacOS/#{tool}"
end
zap trash: [
"~/.config/wezterm/",
"~/.wezterm.lua",
"~/Library/Saved Application State/com.github.wez.wezterm.savedState",
]
end
Update wezterm from 20220904-064125 to 20220905-102802 (#131132)
cask "wezterm" do
version "20220905-102802,7d4b8249"
sha256 "05642ce3e12f6a45789aa161670a09cb2ef344f453bb5ea0d8c57f2afa3bce2c"
url "https://github.com/wez/wezterm/releases/download/#{version.csv.first}-#{version.csv.second}/WezTerm-macos-#{version.csv.first}-#{version.csv.second}.zip",
verified: "github.com/wez/wezterm/"
name "WezTerm"
desc "GPU-accelerated cross-platform terminal emulator and multiplexer"
homepage "https://wezfurlong.org/wezterm/"
livecheck do
url :url
regex(%r{href=.*?/WezTerm-macos-(\d{8}-\d{6})-([0-9a-f]+)\.zip}i)
strategy :github_latest do |page, regex|
match = page.match(regex)
next if match.blank?
"#{match[1]},#{match[2]}"
end
end
app "WezTerm-macos-#{version.csv.first}-#{version.csv.second}/WezTerm.app"
%w[
wezterm
wezterm-gui
wezterm-mux-server
strip-ansi-escapes
].each do |tool|
binary "#{appdir}/WezTerm.app/Contents/MacOS/#{tool}"
end
zap trash: [
"~/.config/wezterm/",
"~/.wezterm.lua",
"~/Library/Saved Application State/com.github.wez.wezterm.savedState",
]
end
|
cask 'xquartz' do
version '2.7.11'
sha256 '32e50e8f1e21542b847041711039fa78d44febfed466f834a9281c44d75cd6c3'
# bintray.com/xquartz was verified as official when first introduced to the cask
url "https://dl.bintray.com/xquartz/downloads/XQuartz-#{version}.dmg"
appcast 'https://www.xquartz.org/releases/sparkle/release.xml',
checkpoint: 'da07c258696e2593cbf3f6a451e7125db17a1d70f4f3135e617ba247cdb27a54'
name 'XQuartz'
homepage 'https://www.xquartz.org/'
auto_updates true
conflicts_with cask: 'xquartz-beta'
pkg 'XQuartz.pkg'
postflight do
Pathname.new(File.expand_path('~')).join('Library', 'Logs').mkpath
# Set default path to X11 to avoid the need of manual setup
system_command '/usr/bin/defaults', args: ['write', 'com.apple.applescript', 'ApplicationMap', '-dict-add', 'X11', 'file://localhost/Applications/Utilities/XQuartz.app/']
# Load & start XServer to avoid the need of relogin
system_command '/bin/launchctl', args: ['load', '/Library/LaunchAgents/org.macosforge.xquartz.startx.plist']
end
uninstall quit: 'org.macosforge.xquartz.X11',
launchctl: [
'org.macosforge.xquartz.startx',
'org.macosforge.xquartz.privileged_startx',
],
pkgutil: 'org.macosforge.xquartz.pkg',
delete: [
'/opt/X11',
'/private/etc/manpaths.d/40-XQuartz',
'/private/etc/paths.d/40-XQuartz',
]
zap trash: [
'~/.Xauthority',
'~/Library/Application Support/XQuartz',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/org.macosforge.xquartz.x11.sfl*',
'~/Library/Caches/org.macosforge.xquartz.X11',
'~/Library/Cookies/org.macosforge.xquartz.X11.binarycookies',
'~/Library/Logs/X11/org.macosforge.xquartz.log',
'~/Library/Logs/X11/org.macosforge.xquartz.log.old',
'~/Library/Preferences/org.macosforge.xquartz.X11.plist',
'~/Library/Saved Application State/org.macosforge.xquartz.X11.savedState',
],
rmdir: [
'~/.fonts',
'~/Library/Logs/X11',
]
end
Update xquartz: remove conflict (#43928)
cask 'xquartz' do
version '2.7.11'
sha256 '32e50e8f1e21542b847041711039fa78d44febfed466f834a9281c44d75cd6c3'
# bintray.com/xquartz was verified as official when first introduced to the cask
url "https://dl.bintray.com/xquartz/downloads/XQuartz-#{version}.dmg"
appcast 'https://www.xquartz.org/releases/sparkle/release.xml',
checkpoint: 'da07c258696e2593cbf3f6a451e7125db17a1d70f4f3135e617ba247cdb27a54'
name 'XQuartz'
homepage 'https://www.xquartz.org/'
auto_updates true
pkg 'XQuartz.pkg'
postflight do
Pathname.new(File.expand_path('~')).join('Library', 'Logs').mkpath
# Set default path to X11 to avoid the need of manual setup
system_command '/usr/bin/defaults', args: ['write', 'com.apple.applescript', 'ApplicationMap', '-dict-add', 'X11', 'file://localhost/Applications/Utilities/XQuartz.app/']
# Load & start XServer to avoid the need of relogin
system_command '/bin/launchctl', args: ['load', '/Library/LaunchAgents/org.macosforge.xquartz.startx.plist']
end
uninstall quit: 'org.macosforge.xquartz.X11',
launchctl: [
'org.macosforge.xquartz.startx',
'org.macosforge.xquartz.privileged_startx',
],
pkgutil: 'org.macosforge.xquartz.pkg',
delete: [
'/opt/X11',
'/private/etc/manpaths.d/40-XQuartz',
'/private/etc/paths.d/40-XQuartz',
]
zap trash: [
'~/.Xauthority',
'~/Library/Application Support/XQuartz',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/org.macosforge.xquartz.x11.sfl*',
'~/Library/Caches/org.macosforge.xquartz.X11',
'~/Library/Cookies/org.macosforge.xquartz.X11.binarycookies',
'~/Library/Logs/X11/org.macosforge.xquartz.log',
'~/Library/Logs/X11/org.macosforge.xquartz.log.old',
'~/Library/Preferences/org.macosforge.xquartz.X11.plist',
'~/Library/Saved Application State/org.macosforge.xquartz.X11.savedState',
],
rmdir: [
'~/.fonts',
'~/Library/Logs/X11',
]
end
|
cask 'youview' do
version '0.6.1'
sha256 '95538a324d9344ccd1eb46f4b4dbdf9cb2dc6d1259794b14e02683c35660d5dd'
url 'https://download.mrgeckosmedia.com/YouView.zip'
name 'YouView'
homepage 'https://mrgeckosmedia.com/applications/info/YouView'
license :oss
app 'YouView.app'
end
updated youview (0.6.1)
cask 'youview' do
version '0.6.1'
sha256 '95538a324d9344ccd1eb46f4b4dbdf9cb2dc6d1259794b14e02683c35660d5dd'
url 'https://download.mrgeckosmedia.com/YouView.zip'
appcast 'https://mrgeckosmedia.com/applications/releasenotes/YouView',
checkpoint: '37a9df42602c6f4c3a135691e75a30cf8063474df96ce5197b246138146eaa08'
name 'YouView'
homepage 'https://mrgeckosmedia.com/applications/info/YouView'
license :oss
app 'YouView.app'
end
|
Added cask for 0ad
Due to Ruby's limitations on class names, I had to refer to this game
as "Zero-AD". If anyone knows how to make this work with the command
`$ brew cask install 0ad`, please let me know!
class ZeroAd < Cask
url 'http://releases.wildfiregames.com/0ad-0.0.14-alpha-osx64.dmg'
homepage 'http://www.play0ad.com/'
version '0.0.14-alpha'
sha1 'c6c1b630a7117210d217da1076a18faf988c86a3'
link '0ad.app'
end
|
Pod::Spec.new do |s|
s.name = "Cenarius"
s.version = "2.0.3"
s.summary = "Mobile Hybrid Framework Cenarius iOS Container."
s.homepage = "https://github.com/macula-projects/cenarius-ios"
s.license = "MIT"
s.author = { "M" => "myeveryheart@qq.com" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/macula-projects/cenarius-ios.git", :tag => "#{s.version}" }
s.source_files = "Cenarius.{h,m}","Cenarius/**/*.{h,m}","Cenarius/**/**/*.{h,m}","Cenarius/**/**/**/*.{h,m}","Cenarius/**/**/**/**/*.{h,m}","Cenarius/**/**/**/**/**/*.{h,m}"
s.frameworks = "Foundation","UIKit","CFNetwork"
s.dependency "AFNetworking"
s.requires_arc = true
end
2.0.4
Pod::Spec.new do |s|
s.name = "Cenarius"
s.version = "2.0.4"
s.summary = "Mobile Hybrid Framework Cenarius iOS Container."
s.homepage = "https://github.com/macula-projects/cenarius-ios"
s.license = "MIT"
s.author = { "M" => "myeveryheart@qq.com" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/macula-projects/cenarius-ios.git", :tag => "#{s.version}" }
s.source_files = "Cenarius.{h,m}","Cenarius/**/*.{h,m}","Cenarius/**/**/*.{h,m}","Cenarius/**/**/**/*.{h,m}","Cenarius/**/**/**/**/*.{h,m}","Cenarius/**/**/**/**/**/*.{h,m}"
s.frameworks = "Foundation","UIKit","CFNetwork"
s.dependency "AFNetworking"
s.requires_arc = true
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "fastq-factory"
s.version = "0.1.8"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Anthony Underwood"]
s.date = "2012-09-07"
s.description = "This tool can process fastq files, using fastq_quality_trimmer and quake to correct fastq files and then provide a quality asssessment of the data"
s.email = "anthony.underwood@hpa.org.uk"
s.executables = ["fastq-factory"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/fastq-factory",
"fastq-factory.gemspec",
"lib/fastq-factory.rb",
"lib/fastq-remove-orphans.pl",
"lib/fastq_assessment.rb",
"lib/generate_quality_metrics.rb",
"lib/maths.rb",
"lib/miseq_run_stats.rb",
"lib/trim_and_correct.rb",
"test/helper.rb",
"test/test_fastq-factory.rb"
]
s.homepage = "http://github.com/hpa-bioinformatics/fastq-factory"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.19"
s.summary = "A tool to process and QC fastq files from illumina machines"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<trollop>, ["~> 2.0"])
s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
Regenerate gemspec for version 0.1.9
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "fastq-factory"
s.version = "0.1.9"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Anthony Underwood"]
s.date = "2012-09-07"
s.description = "This tool can process fastq files, using fastq_quality_trimmer and quake to correct fastq files and then provide a quality asssessment of the data"
s.email = "anthony.underwood@hpa.org.uk"
s.executables = ["fastq-factory"]
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"bin/fastq-factory",
"fastq-factory.gemspec",
"lib/fastq-factory.rb",
"lib/fastq-remove-orphans.pl",
"lib/fastq_assessment.rb",
"lib/generate_quality_metrics.rb",
"lib/maths.rb",
"lib/miseq_run_stats.rb",
"lib/trim_and_correct.rb",
"test/helper.rb",
"test/test_fastq-factory.rb"
]
s.homepage = "http://github.com/hpa-bioinformatics/fastq-factory"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.19"
s.summary = "A tool to process and QC fastq files from illumina machines"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<trollop>, ["~> 2.0"])
s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_development_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
else
s.add_dependency(%q<trollop>, ["~> 2.0"])
s.add_dependency(%q<nokogiri>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.1.5"])
s.add_dependency(%q<jeweler>, ["~> 1.8.4"])
s.add_dependency(%q<simplecov>, [">= 0"])
end
end
|
class CounterZoneNginxModule < Formula
desc "Gives realtime counters using NGINX config"
homepage "https://github.com/theromis/ngx_counter_zone"
url "https://github.com/theromis/ngx_counter_zone/archive/4be9e36.tar.gz"
sha256 "9b841ed09ec39cbe27dab16e017433418c0546fba58bc48d853bed0dcae9e322"
def install
(share+"counter-zone-nginx-module").install Dir["*"]
end
end
counter-zone-nginx-module: bottle is unneeded
class CounterZoneNginxModule < Formula
desc "Gives realtime counters using NGINX config"
homepage "https://github.com/theromis/ngx_counter_zone"
url "https://github.com/theromis/ngx_counter_zone/archive/4be9e36.tar.gz"
sha256 "9b841ed09ec39cbe27dab16e017433418c0546fba58bc48d853bed0dcae9e322"
bottle :unneeded
def install
(share+"counter-zone-nginx-module").install Dir["*"]
end
end
|
class GsettingsDesktopSchemas < Formula
desc "GSettings schemas for desktop components"
homepage "https://download.gnome.org/sources/gsettings-desktop-schemas/"
url "https://download.gnome.org/sources/gsettings-desktop-schemas/3.24/gsettings-desktop-schemas-3.24.0.tar.xz"
sha256 "f6573a3f661d22ff8a001cc2421d8647717f1c0e697e342d03c6102f29bbbb90"
bottle do
cellar :any_skip_relocation
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :sierra
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :el_capitan
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gobject-introspection" => :build
depends_on "glib"
depends_on "gettext"
depends_on "libffi"
depends_on "python" if MacOS.version <= :mavericks
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-schemas-compile",
"--enable-introspection=yes"
system "make", "install"
end
def post_install
# manual schema compile step
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gdesktop-enums.h>
int main(int argc, char *argv[]) {
return 0;
}
EOS
system ENV.cc, "-I#{HOMEBREW_PREFIX}/include/gsettings-desktop-schemas", "test.c", "-o", "test"
system "./test"
end
end
gsettings-desktop-schemas: update 3.24.0 bottle for Linuxbrew.
Closes Linuxbrew/homebrew-core#2146.
Signed-off-by: Shaun Jackman <b580dab3251a9622aba3803114310c23fdb42900@gmail.com>
class GsettingsDesktopSchemas < Formula
desc "GSettings schemas for desktop components"
homepage "https://download.gnome.org/sources/gsettings-desktop-schemas/"
url "https://download.gnome.org/sources/gsettings-desktop-schemas/3.24/gsettings-desktop-schemas-3.24.0.tar.xz"
sha256 "f6573a3f661d22ff8a001cc2421d8647717f1c0e697e342d03c6102f29bbbb90"
bottle do
cellar :any_skip_relocation
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :sierra
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :el_capitan
sha256 "b8a7f888ffff66f3f5806659a1bd0e91517ab95a12c439d0ec9551067e41ae72" => :yosemite
sha256 "2cf7e79ab5f580ef4ec2bec7c3d5baf2aa71cf816c4a0a675142e3c0b19ea02e" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gobject-introspection" => :build
depends_on "glib"
depends_on "gettext"
depends_on "libffi"
depends_on "python" if MacOS.version <= :mavericks
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--disable-schemas-compile",
"--enable-introspection=yes"
system "make", "install"
end
def post_install
# manual schema compile step
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gdesktop-enums.h>
int main(int argc, char *argv[]) {
return 0;
}
EOS
system ENV.cc, "-I#{HOMEBREW_PREFIX}/include/gsettings-desktop-schemas", "test.c", "-o", "test"
system "./test"
end
end
|
improving hashes in ruby
# http://www.codewars.com/kata/52c974a26e511108a70001cf/train/ruby
class Hash
def method_missing(m, *args, &block)
self[m] || self[m.to_s] || raise
end
end
|
require 'spidr/actions'
require 'spidr/agent'
require 'spec_helper'
describe Spidr::Actions do
before(:all) do
@url = URI('http://spidr.rubyforge.org/')
end
it "should be able to pause spidering" do
count = 0
agent = Agent.host('spidr.rubyforge.org') do |spider|
spider.every_page do |page|
count += 1
spider.pause! if count >= 2
end
end
agent.should be_paused
agent.history.length.should == 2
end
it "should be able to continue spidering after being paused" do
agent = Agent.new do |spider|
spider.every_page do |page|
spider.pause!
end
end
agent.enqueue(@url)
agent.continue!
agent.visited?(@url).should == true
end
it "should allow skipping of enqueued links" do
agent = Agent.new do |spider|
spider.every_url do |url|
spider.skip_link!
end
end
agent.enqueue(@url)
agent.queue.should be_empty
end
it "should allow skipping of visited pages" do
agent = Agent.new do |spider|
spider.every_page do |url|
spider.skip_page!
end
end
agent.visit_page(@url)
agent.history.should == Set[@url]
agent.queue.should be_empty
end
end
Shorten the namespace.
require 'spidr/actions'
require 'spidr/agent'
require 'spec_helper'
describe Actions do
before(:all) do
@url = URI('http://spidr.rubyforge.org/')
end
it "should be able to pause spidering" do
count = 0
agent = Agent.host('spidr.rubyforge.org') do |spider|
spider.every_page do |page|
count += 1
spider.pause! if count >= 2
end
end
agent.should be_paused
agent.history.length.should == 2
end
it "should be able to continue spidering after being paused" do
agent = Agent.new do |spider|
spider.every_page do |page|
spider.pause!
end
end
agent.enqueue(@url)
agent.continue!
agent.visited?(@url).should == true
end
it "should allow skipping of enqueued links" do
agent = Agent.new do |spider|
spider.every_url do |url|
spider.skip_link!
end
end
agent.enqueue(@url)
agent.queue.should be_empty
end
it "should allow skipping of visited pages" do
agent = Agent.new do |spider|
spider.every_page do |url|
spider.skip_page!
end
end
agent.visit_page(@url)
agent.history.should == Set[@url]
agent.queue.should be_empty
end
end
|
require "spec_helper"
class Widget < Vienna::Model
adapter Vienna::PouchDBAdapter
attributes :name, :part_number
end
class Violet < Vienna::Model
adapter Vienna::PouchDBAdapter
attributes :owner
end
describe Vienna::PouchDBAdapter do
before do
Vienna::PouchDBAdapter.configure do |c|
c.database_name = "test-database-#{rand(1337)}-#{rand(3771)}"
end
end
after do
ev = Widget.instance_variable_get("@eventable")
ev[:refresh] = []
ev[:pouchdb_error] = []
db.destroy()
end
let(:db) { Vienna::PouchDBAdapter.database }
let(:raw_doc) {
{ name: "Golden Cog", part_number: 1337 }
}
describe "#find" do
it "has this test because it needs to exist for async to run don't know why"
async "fills in a Model's data" do
db.put(raw_doc.merge(rbtype: "Widget", _id: "widget-1")).then do
Widget.find("widget-1") do |w|
async do
expect(w.name).to eq("Golden Cog")
expect(w.id).to eq("widget-1")
expect(w.part_number).to eq(1337)
end
end
end
end
async "triggers error if stored type doesn't match the Model's" do
db.put(raw_doc.merge(_id: "widget-1", rbtype: "OtherType")).then do
Widget.on :pouchdb_error do |error|
async do
expect(error).to match(/wrong type/i)
end
end
Widget.find("widget-1")
end
end
end
describe "creating Records" do
async "generates an id if one is not provided" do
w = Widget.new(name: "New Shiny", part_number: 3771)
expect(w.new_record?).to be(true)
w.save do |cw|
async do
expect(cw.id).not_to be_nil
expect(cw).to be(w)
expect(w.new_record?).to be(false)
end
end
end
async "saves correctly if an id is provided" do
w = Widget.new(raw_doc.merge(id: "widget-1"))
expect(w.new_record?).to be(true)
w.save do
async do
expect(w.id).to eq("widget-1")
expect(w.new_record?).to be(false)
end
end
end
async "triggers the update event when created" do
w = Widget.new(raw_doc)
w.on :update do
async do
expect(true).to be(true)
end
end
w.save
end
async "triggers pouchdb_error if something goes wrong on the pouch size" do
w0 = Widget.new(raw_doc.merge(id: "widget-1"))
w1 = Widget.new(raw_doc.merge(id: "widget-1"))
w1.on :pouchdb_error do |error|
async do
expect(error).to match(/conflict/)
end
end
w0.save do
w1.save
end
end
end
describe "updating records" do
async "changes data and the internal rev" do
w = Widget.new(raw_doc)
w.save do
w.name = "Magic Cog"
rev0 = w[:_vienna_pouchdb][:_rev]
w.save do |uw|
async do
rev1 = w[:_vienna_pouchdb][:_rev]
expect(uw.name).to eq("Magic Cog")
expect(uw).to be(w)
expect(rev1).not_to eq(rev0)
end
end
end
end
end
describe "deleting records" do
async "really removes them from the database" do
w = Widget.new(raw_doc.merge(_id: "widget-1"))
w.save do
w.destroy do
db.get("widget-1").fail do |e|
async do
expect(e.message).to match(/missing/)
end
end
end
end
end
end
describe "fetching collections" do
async "only includes documents of the Model's type" do
Violet.new(owner: "Jessica").save do
Widget.new(raw_doc).save do
Widget.fetch do |ws|
async do
expect(ws.size).to eq(1)
expect(ws.all? { |w| w.class == Widget }).to be(true)
end
end
end
end
end
async "triggers refresh event" do
Widget.new(raw_doc).save do
Widget.on :refresh do |d|
async do
expect(true).to be(true)
end
end
Widget.fetch
end
end
end
end
Adds change event test.
require "spec_helper"
class Widget < Vienna::Model
adapter Vienna::PouchDBAdapter
attributes :name, :part_number
end
class Violet < Vienna::Model
adapter Vienna::PouchDBAdapter
attributes :owner
end
describe Vienna::PouchDBAdapter do
before do
Vienna::PouchDBAdapter.configure do |c|
c.database_name = "test-database-#{rand(1337)}-#{rand(3771)}"
end
end
after do
ev = Widget.instance_variable_get("@eventable")
%i(refresh pouchdb_error :update).each do |e|
ev[e] = []
end
db.destroy()
end
let(:db) { Vienna::PouchDBAdapter.database }
let(:raw_doc) {
{ name: "Golden Cog", part_number: 1337 }
}
describe "#find" do
it "has this test because it needs to exist for async to run don't know why"
async "fills in a Model's data" do
db.put(raw_doc.merge(rbtype: "Widget", _id: "widget-1")).then do
Widget.find("widget-1") do |w|
async do
expect(w.name).to eq("Golden Cog")
expect(w.id).to eq("widget-1")
expect(w.part_number).to eq(1337)
end
end
end
end
async "triggers error if stored type doesn't match the Model's" do
db.put(raw_doc.merge(_id: "widget-1", rbtype: "OtherType")).then do
Widget.on :pouchdb_error do |error|
async do
expect(error).to match(/wrong type/i)
end
end
Widget.find("widget-1")
end
end
end
describe "creating Records" do
async "generates an id if one is not provided" do
w = Widget.new(name: "New Shiny", part_number: 3771)
expect(w.new_record?).to be(true)
w.save do |cw|
async do
expect(cw.id).not_to be_nil
expect(cw).to be(w)
expect(w.new_record?).to be(false)
end
end
end
async "saves correctly if an id is provided" do
w = Widget.new(raw_doc.merge(id: "widget-1"))
expect(w.new_record?).to be(true)
w.save do
async do
expect(w.id).to eq("widget-1")
expect(w.new_record?).to be(false)
end
end
end
async "triggers the update event when created" do
w = Widget.new(raw_doc)
w.on :update do
async do
expect(true).to be(true)
end
end
w.save
end
async "triggers pouchdb_error if something goes wrong on the pouch size" do
w0 = Widget.new(raw_doc.merge(id: "widget-1"))
w1 = Widget.new(raw_doc.merge(id: "widget-1"))
w1.on :pouchdb_error do |error|
async do
expect(error).to match(/conflict/)
end
end
w0.save do
w1.save
end
end
end
describe "updating records" do
async "changes data and the internal rev" do
w = Widget.new(raw_doc)
w.save do
w.name = "Magic Cog"
rev0 = w[:_vienna_pouchdb][:_rev]
w.save do |uw|
async do
rev1 = w[:_vienna_pouchdb][:_rev]
expect(uw.name).to eq("Magic Cog")
expect(uw).to be(w)
expect(rev1).not_to eq(rev0)
end
end
end
end
async "triggers the 'change' event" do
async "changes data and the internal rev" do
w = Widget.new(raw_doc)
w.on :change do
async do
expect(true).to be(true)
end
end
w.save do
w.name = "Magic Cog"
w.save
end
end
end
end
describe "deleting records" do
async "really removes them from the database" do
w = Widget.new(raw_doc.merge(_id: "widget-1"))
w.save do
w.destroy do
db.get("widget-1").fail do |e|
async do
expect(e.message).to match(/missing/)
end
end
end
end
end
end
describe "fetching collections" do
async "only includes documents of the Model's type" do
Violet.new(owner: "Jessica").save do
Widget.new(raw_doc).save do
Widget.fetch do |ws|
async do
expect(ws.size).to eq(1)
expect(ws.all? { |w| w.class == Widget }).to be(true)
end
end
end
end
end
async "triggers refresh event" do
Widget.new(raw_doc).save do
Widget.on :refresh do |d|
async do
expect(true).to be(true)
end
end
Widget.fetch
end
end
end
end
|
require 'spec_helper'
require 'dm-core/spec/shared/adapter_spec'
require 'dm-do-adapter/spec/shared_spec'
require 'dm-migrations'
require 'dm-sqlite-adapter/spec/setup'
ENV['ADAPTER'] = 'sqlite'
ENV['ADAPTER_SUPPORTS'] = 'all'
describe 'DataMapper::Adapters::SqliteAdapter' do
before :all do
@adapter = DataMapper::Spec.adapter
@repository = DataMapper.repository(@adapter.name)
end
it_should_behave_like "An Adapter"
it_should_behave_like "A DataObjects Adapter"
describe "with 'sqlite' as adapter name" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite' }) }
it { subject.options[:adapter].should == 'sqlite3' }
end
describe "with 'sqlite3' as adapter name" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite3' }) }
it { subject.options[:adapter].should == 'sqlite3' }
end
describe "with 'database' given as Symbol" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', :database => :name }) }
it { subject.options[:path].should == 'name' }
end
describe "with 'path' given as Symbol" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', :path => :name }) }
it { subject.options[:path].should == 'name' }
end
describe "with 'database' given as String" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'database' => :name }) }
it { subject.options[:path].should == 'name' }
end
describe "with 'path' given as String" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'path' => :name }) }
it { subject.options[:path].should == 'name' }
end
describe "with blank 'path' and 'database' given as Symbol" do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'path' => '', :database => :name }) }
it { subject.options[:path].should == 'name' }
end
end
Compatiblitiy with https://github.com/datamapper/dm-core/pull/119
require 'spec_helper'
require 'dm-core/spec/shared/adapter_spec'
require 'dm-do-adapter/spec/shared_spec'
require 'dm-migrations'
require 'dm-sqlite-adapter/spec/setup'
ENV['ADAPTER'] = 'sqlite'
ENV['ADAPTER_SUPPORTS'] = 'all'
describe 'DataMapper::Adapters::SqliteAdapter' do
let(:adapter) { DataMapper::Spec.adapter }
let(:repository) { DataMapper.repository(adapter.name) }
it_should_behave_like 'An Adapter'
it_should_behave_like 'A DataObjects Adapter'
describe 'with "sqlite" as adapter name' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite' }) }
it { subject.options[:adapter].should == 'sqlite3' }
end
describe 'with "sqlite3" as adapter name' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite3' }) }
it { subject.options[:adapter].should == 'sqlite3' }
end
describe 'with "database" given as Symbol' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', :database => :name }) }
it { subject.options[:path].should == 'name' }
end
describe 'with "path" given as Symbol' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', :path => :name }) }
it { subject.options[:path].should == 'name' }
end
describe 'with "database" given as String' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'database' => :name }) }
it { subject.options[:path].should == 'name' }
end
describe 'with "path" given as String' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'path' => :name }) }
it { subject.options[:path].should == 'name' }
end
describe 'with blank "path" and "database" given as Symbol' do
subject { DataMapper::Adapters::SqliteAdapter.new(:default, { :adapter => 'sqlite', 'path' => '', :database => :name }) }
it { subject.options[:path].should == 'name' }
end
end
|
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{ropenlaszlo}
s.version = "0.6.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Oliver Steele"]
s.date = %q{2009-03-23}
s.description = %q{ROpenLaszlo is an interface to the OpenLaszlo compiler.}
s.email = %q{steele@osteele.com}
s.extra_rdoc_files = ["README.rdoc", "MIT-LICENSE", "CHANGES.rdoc", "TODO.rdoc"]
s.files = ["lib/openlaszlo", "lib/openlaszlo/applet.rb", "lib/openlaszlo/compiler.rb", "lib/openlaszlo.rb", "lib/ropenlaszlo.rb", "lib/tasks", "lib/tasks/openlaszlo.rake", "test/compilation-error.lzx", "test/compilation-warning.lzx", "test/compiler_test.rb", "test/tasks", "test/tasks/Rakefile", "test/tasks/test.lzx", "test/tasks/test.swf", "test/test.lzx", "test/test_utils.rb", "README.rdoc", "MIT-LICENSE", "CHANGES.rdoc", "TODO.rdoc"]
s.has_rdoc = true
s.homepage = %q{http://github.com/osteele/ropenlaszlo}
s.rdoc_options = ["--title", "ROpenLaszlo: Ruby interface to the OpenLaszlo compiler", "--exclude", "test/.*", "--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{ropenlaszlo}
s.rubygems_version = %q{1.3.1}
s.summary = %q{Ruby interface to the OpenLaszlo compiler.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
Regenerated gemspec for version 0.6.3
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{ropenlaszlo}
s.version = "0.6.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Oliver Steele"]
s.date = %q{2009-03-23}
s.description = %q{ROpenLaszlo is an interface to the OpenLaszlo compiler.}
s.email = %q{steele@osteele.com}
s.extra_rdoc_files = ["README.rdoc", "MIT-LICENSE", "CHANGES.rdoc", "TODO.rdoc"]
s.files = ["lib/openlaszlo", "lib/openlaszlo/applet.rb", "lib/openlaszlo/compiler.rb", "lib/openlaszlo.rb", "lib/ropenlaszlo.rb", "lib/tasks", "lib/tasks/openlaszlo.rake", "test/compilation-error.lzx", "test/compilation-warning.lzx", "test/compiler_test.rb", "test/tasks", "test/tasks/Rakefile", "test/tasks/test.lzx", "test/tasks/test.swf", "test/test.lzx", "test/test_utils.rb", "README.rdoc", "MIT-LICENSE", "CHANGES.rdoc", "TODO.rdoc"]
s.has_rdoc = true
s.homepage = %q{http://github.com/osteele/ropenlaszlo}
s.rdoc_options = ["--title", "ROpenLaszlo: Ruby interface to the OpenLaszlo compiler", "--exclude", "test/.*", "--inline-source", "--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{ropenlaszlo}
s.rubygems_version = %q{1.3.1}
s.summary = %q{Ruby interface to the OpenLaszlo compiler.}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
|
# frozen_string_literal: true
module ActiveRecord
module ConnectionAdapters
module SQLServer
module DatabaseStatements
READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :dbcc, :explain, :save, :select, :set, :rollback, :waitfor) # :nodoc:
private_constant :READ_QUERY
def write_query?(sql) # :nodoc:
!READ_QUERY.match?(sql)
end
def execute(sql, name = nil)
if preventing_writes? && write_query?(sql)
raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
end
materialize_transactions
mark_transaction_written_if_write(sql)
if id_insert_table_name = query_requires_identity_insert?(sql)
with_identity_insert_enabled(id_insert_table_name) { do_execute(sql, name) }
else
do_execute(sql, name)
end
end
def exec_query(sql, name = "SQL", binds = [], prepare: false)
if preventing_writes? && write_query?(sql)
raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
end
materialize_transactions
mark_transaction_written_if_write(sql)
sp_executesql(sql, name, binds, prepare: prepare)
end
def exec_insert(sql, name = nil, binds = [], pk = nil, _sequence_name = nil)
if id_insert_table_name = exec_insert_requires_identity?(sql, pk, binds)
with_identity_insert_enabled(id_insert_table_name) { super(sql, name, binds, pk) }
else
super(sql, name, binds, pk)
end
end
def exec_delete(sql, name, binds)
sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows"
super(sql, name, binds).rows.first.first
end
def exec_update(sql, name, binds)
sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows"
super(sql, name, binds).rows.first.first
end
def begin_db_transaction
do_execute "BEGIN TRANSACTION", "TRANSACTION"
end
def transaction_isolation_levels
super.merge snapshot: "SNAPSHOT"
end
def begin_isolated_db_transaction(isolation)
set_transaction_isolation_level transaction_isolation_levels.fetch(isolation)
begin_db_transaction
end
def set_transaction_isolation_level(isolation_level)
do_execute "SET TRANSACTION ISOLATION LEVEL #{isolation_level}", "TRANSACTION"
end
def commit_db_transaction
do_execute "COMMIT TRANSACTION", "TRANSACTION"
end
def exec_rollback_db_transaction
do_execute "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION", "TRANSACTION"
end
include Savepoints
def create_savepoint(name = current_savepoint_name)
do_execute "SAVE TRANSACTION #{name}", "TRANSACTION"
end
def exec_rollback_to_savepoint(name = current_savepoint_name)
do_execute "ROLLBACK TRANSACTION #{name}", "TRANSACTION"
end
def release_savepoint(name = current_savepoint_name)
end
def case_sensitive_comparison(attribute, value)
column = column_for_attribute(attribute)
if column.collation && !column.case_sensitive?
attribute.eq(Arel::Nodes::Bin.new(value))
else
super
end
end
# We should propose this change to Rails team
def insert_fixtures_set(fixture_set, tables_to_delete = [])
fixture_inserts = []
fixture_set.each do |table_name, fixtures|
fixtures.each_slice(insert_rows_length) do |batch|
fixture_inserts << build_fixture_sql(batch, table_name)
end
end
table_deletes = tables_to_delete.map { |table| "DELETE FROM #{quote_table_name table}" }
total_sqls = Array.wrap(table_deletes + fixture_inserts)
disable_referential_integrity do
transaction(requires_new: true) do
total_sqls.each do |sql|
execute sql, "Fixtures Load"
yield if block_given?
end
end
end
end
def can_perform_case_insensitive_comparison_for?(column)
column.type == :string && (!column.collation || column.case_sensitive?)
end
private :can_perform_case_insensitive_comparison_for?
def default_insert_value(column)
if column.is_identity?
table_name = quote(quote_table_name(column.table_name))
Arel.sql("IDENT_CURRENT(#{table_name}) + IDENT_INCR(#{table_name})")
else
super
end
end
private :default_insert_value
def build_insert_sql(insert) # :nodoc:
sql = +"INSERT #{insert.into}"
if returning = insert.send(:insert_all).returning
sql << " OUTPUT " << returning.map { |column| "INSERTED.#{quote_column_name(column)}" }.join(", ")
end
sql << " #{insert.values_list}"
sql
end
# === SQLServer Specific ======================================== #
def execute_procedure(proc_name, *variables)
materialize_transactions
vars = if variables.any? && variables.first.is_a?(Hash)
variables.first.map { |k, v| "@#{k} = #{quote(v)}" }
else
variables.map { |v| quote(v) }
end.join(", ")
sql = "EXEC #{proc_name} #{vars}".strip
name = "Execute Procedure"
log(sql, name) do
case @connection_options[:mode]
when :dblib
result = ensure_established_connection! { dblib_execute(sql) }
options = { as: :hash, cache_rows: true, timezone: ActiveRecord.default_timezone || :utc }
result.each(options) do |row|
r = row.with_indifferent_access
yield(r) if block_given?
end
result.each.map { |row| row.is_a?(Hash) ? row.with_indifferent_access : row }
end
end
end
def with_identity_insert_enabled(table_name)
table_name = quote_table_name(table_name)
set_identity_insert(table_name, true)
yield
ensure
set_identity_insert(table_name, false)
end
def use_database(database = nil)
return if sqlserver_azure?
name = SQLServer::Utils.extract_identifiers(database || @connection_options[:database]).quoted
do_execute "USE #{name}" unless name.blank?
end
def user_options
return {} if sqlserver_azure?
rows = select_rows("DBCC USEROPTIONS WITH NO_INFOMSGS", "SCHEMA")
rows = rows.first if rows.size == 2 && rows.last.empty?
rows.reduce(HashWithIndifferentAccess.new) do |values, row|
if row.instance_of? Hash
set_option = row.values[0].gsub(/\s+/, "_")
user_value = row.values[1]
elsif row.instance_of? Array
set_option = row[0].gsub(/\s+/, "_")
user_value = row[1]
end
values[set_option] = user_value
values
end
end
def user_options_dateformat
if sqlserver_azure?
select_value "SELECT [dateformat] FROM [sys].[syslanguages] WHERE [langid] = @@LANGID", "SCHEMA"
else
user_options["dateformat"]
end
end
def user_options_isolation_level
if sqlserver_azure?
sql = %(SELECT CASE [transaction_isolation_level]
WHEN 0 THEN NULL
WHEN 1 THEN 'READ UNCOMMITTED'
WHEN 2 THEN 'READ COMMITTED'
WHEN 3 THEN 'REPEATABLE READ'
WHEN 4 THEN 'SERIALIZABLE'
WHEN 5 THEN 'SNAPSHOT' END AS [isolation_level]
FROM [sys].[dm_exec_sessions]
WHERE [session_id] = @@SPID).squish
select_value sql, "SCHEMA"
else
user_options["isolation_level"]
end
end
def user_options_language
if sqlserver_azure?
select_value "SELECT @@LANGUAGE AS [language]", "SCHEMA"
else
user_options["language"]
end
end
def newid_function
select_value "SELECT NEWID()"
end
def newsequentialid_function
select_value "SELECT NEWSEQUENTIALID()"
end
protected
def sql_for_insert(sql, pk, binds)
if pk.nil?
table_name = query_requires_identity_insert?(sql)
pk = primary_key(table_name)
end
sql = if pk && use_output_inserted? && !database_prefix_remote_server?
quoted_pk = SQLServer::Utils.extract_identifiers(pk).quoted
table_name ||= get_table_name(sql)
exclude_output_inserted = exclude_output_inserted_table_name?(table_name, sql)
if exclude_output_inserted
id_sql_type = exclude_output_inserted.is_a?(TrueClass) ? "bigint" : exclude_output_inserted
<<~SQL.squish
DECLARE @ssaIdInsertTable table (#{quoted_pk} #{id_sql_type});
#{sql.dup.insert sql.index(/ (DEFAULT )?VALUES/), " OUTPUT INSERTED.#{quoted_pk} INTO @ssaIdInsertTable"}
SELECT CAST(#{quoted_pk} AS #{id_sql_type}) FROM @ssaIdInsertTable
SQL
else
sql.dup.insert sql.index(/ (DEFAULT )?VALUES/), " OUTPUT INSERTED.#{quoted_pk}"
end
else
"#{sql}; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident"
end
super
end
# === SQLServer Specific ======================================== #
def set_identity_insert(table_name, enable = true)
do_execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
rescue Exception
raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
end
# === SQLServer Specific (Executing) ============================ #
def do_execute(sql, name = "SQL")
materialize_transactions
mark_transaction_written_if_write(sql)
log(sql, name) { raw_connection_do(sql) }
end
def sp_executesql(sql, name, binds, options = {})
options[:ar_result] = true if options[:fetch] != :rows
unless without_prepared_statement?(binds)
types, params = sp_executesql_types_and_parameters(binds)
sql = sp_executesql_sql(sql, types, params, name)
end
raw_select sql, name, binds, options
end
def sp_executesql_types_and_parameters(binds)
types, params = [], []
binds.each_with_index do |attr, index|
attr = attr.value if attr.is_a?(Arel::Nodes::BindParam)
types << "@#{index} #{sp_executesql_sql_type(attr)}"
params << sp_executesql_sql_param(attr)
end
[types, params]
end
def sp_executesql_sql_type(attr)
return attr.type.sqlserver_type if attr.type.respond_to?(:sqlserver_type)
case value = attr.value_for_database
when Numeric
value > 2_147_483_647 ? "bigint".freeze : "int".freeze
else
"nvarchar(max)".freeze
end
end
def sp_executesql_sql_param(attr)
case value = attr.value_for_database
when Type::Binary::Data,
ActiveRecord::Type::SQLServer::Data
quote(value)
else
quote(type_cast(value))
end
end
def sp_executesql_sql(sql, types, params, name)
if name == "EXPLAIN"
params.each.with_index do |param, index|
substitute_at_finder = /(@#{index})(?=(?:[^']|'[^']*')*$)/ # Finds unquoted @n values.
sql = sql.sub substitute_at_finder, param.to_s
end
else
types = quote(types.join(", "))
params = params.map.with_index { |p, i| "@#{i} = #{p}" }.join(", ") # Only p is needed, but with @i helps explain regexp.
sql = "EXEC sp_executesql #{quote(sql)}"
sql += ", #{types}, #{params}" unless params.empty?
end
sql
end
def raw_connection_do(sql)
case @connection_options[:mode]
when :dblib
result = ensure_established_connection! { dblib_execute(sql) }
result.do
end
ensure
@update_sql = false
end
# === SQLServer Specific (Identity Inserts) ===================== #
def use_output_inserted?
self.class.use_output_inserted
end
def exclude_output_inserted_table_names?
!self.class.exclude_output_inserted_table_names.empty?
end
def exclude_output_inserted_table_name?(table_name, sql)
return false unless exclude_output_inserted_table_names?
table_name ||= get_table_name(sql)
return false unless table_name
self.class.exclude_output_inserted_table_names[table_name]
end
def exec_insert_requires_identity?(sql, pk, binds)
query_requires_identity_insert?(sql)
end
def query_requires_identity_insert?(sql)
if insert_sql?(sql)
table_name = get_table_name(sql)
id_column = identity_columns(table_name).first
# id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? quote_table_name(table_name) : false
id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? table_name : false
else
false
end
end
def insert_sql?(sql)
!(sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)/i).nil?
end
def identity_columns(table_name)
schema_cache.columns(table_name).select(&:is_identity?)
end
# === SQLServer Specific (Selecting) ============================ #
def raw_select(sql, name = "SQL", binds = [], options = {})
log(sql, name, binds) { _raw_select(sql, options) }
end
def _raw_select(sql, options = {})
handle = raw_connection_run(sql)
handle_to_names_and_values(handle, options)
ensure
finish_statement_handle(handle)
end
def raw_connection_run(sql)
case @connection_options[:mode]
when :dblib
ensure_established_connection! { dblib_execute(sql) }
end
end
def handle_more_results?(handle)
case @connection_options[:mode]
when :dblib
end
end
def handle_to_names_and_values(handle, options = {})
case @connection_options[:mode]
when :dblib
handle_to_names_and_values_dblib(handle, options)
end
end
def handle_to_names_and_values_dblib(handle, options = {})
query_options = {}.tap do |qo|
qo[:timezone] = ActiveRecord.default_timezone || :utc
qo[:as] = (options[:ar_result] || options[:fetch] == :rows) ? :array : :hash
end
results = handle.each(query_options)
columns = lowercase_schema_reflection ? handle.fields.map { |c| c.downcase } : handle.fields
options[:ar_result] ? ActiveRecord::Result.new(columns, results) : results
end
def finish_statement_handle(handle)
case @connection_options[:mode]
when :dblib
handle.cancel if handle
end
handle
end
def dblib_execute(sql)
@connection.execute(sql).tap do |result|
# TinyTDS returns false instead of raising an exception if connection fails.
# Getting around this by raising an exception ourselves while this PR
# https://github.com/rails-sqlserver/tiny_tds/pull/469 is not released.
raise TinyTds::Error, "failed to execute statement" if result.is_a?(FalseClass)
end
end
def ensure_established_connection!
raise TinyTds::Error, 'SQL Server client is not connected' unless @connection
yield
end
end
end
end
end
add async keyword to exec_query
Following
https://github.com/rails/rails/commit/7fc174aadaefc2c0a8a6b7c8a7599dd9ca04811f
changes
# frozen_string_literal: true
module ActiveRecord
module ConnectionAdapters
module SQLServer
module DatabaseStatements
READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :dbcc, :explain, :save, :select, :set, :rollback, :waitfor) # :nodoc:
private_constant :READ_QUERY
def write_query?(sql) # :nodoc:
!READ_QUERY.match?(sql)
end
def execute(sql, name = nil)
if preventing_writes? && write_query?(sql)
raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
end
materialize_transactions
mark_transaction_written_if_write(sql)
if id_insert_table_name = query_requires_identity_insert?(sql)
with_identity_insert_enabled(id_insert_table_name) { do_execute(sql, name) }
else
do_execute(sql, name)
end
end
def exec_query(sql, name = "SQL", binds = [], prepare: false, async: false)
if preventing_writes? && write_query?(sql)
raise ActiveRecord::ReadOnlyError, "Write query attempted while in readonly mode: #{sql}"
end
materialize_transactions
mark_transaction_written_if_write(sql)
sp_executesql(sql, name, binds, prepare: prepare, async: async)
end
def exec_insert(sql, name = nil, binds = [], pk = nil, _sequence_name = nil)
if id_insert_table_name = exec_insert_requires_identity?(sql, pk, binds)
with_identity_insert_enabled(id_insert_table_name) { super(sql, name, binds, pk) }
else
super(sql, name, binds, pk)
end
end
def exec_delete(sql, name, binds)
sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows"
super(sql, name, binds).rows.first.first
end
def exec_update(sql, name, binds)
sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows"
super(sql, name, binds).rows.first.first
end
def begin_db_transaction
do_execute "BEGIN TRANSACTION", "TRANSACTION"
end
def transaction_isolation_levels
super.merge snapshot: "SNAPSHOT"
end
def begin_isolated_db_transaction(isolation)
set_transaction_isolation_level transaction_isolation_levels.fetch(isolation)
begin_db_transaction
end
def set_transaction_isolation_level(isolation_level)
do_execute "SET TRANSACTION ISOLATION LEVEL #{isolation_level}", "TRANSACTION"
end
def commit_db_transaction
do_execute "COMMIT TRANSACTION", "TRANSACTION"
end
def exec_rollback_db_transaction
do_execute "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION", "TRANSACTION"
end
include Savepoints
def create_savepoint(name = current_savepoint_name)
do_execute "SAVE TRANSACTION #{name}", "TRANSACTION"
end
def exec_rollback_to_savepoint(name = current_savepoint_name)
do_execute "ROLLBACK TRANSACTION #{name}", "TRANSACTION"
end
def release_savepoint(name = current_savepoint_name)
end
def case_sensitive_comparison(attribute, value)
column = column_for_attribute(attribute)
if column.collation && !column.case_sensitive?
attribute.eq(Arel::Nodes::Bin.new(value))
else
super
end
end
# We should propose this change to Rails team
def insert_fixtures_set(fixture_set, tables_to_delete = [])
fixture_inserts = []
fixture_set.each do |table_name, fixtures|
fixtures.each_slice(insert_rows_length) do |batch|
fixture_inserts << build_fixture_sql(batch, table_name)
end
end
table_deletes = tables_to_delete.map { |table| "DELETE FROM #{quote_table_name table}" }
total_sqls = Array.wrap(table_deletes + fixture_inserts)
disable_referential_integrity do
transaction(requires_new: true) do
total_sqls.each do |sql|
execute sql, "Fixtures Load"
yield if block_given?
end
end
end
end
def can_perform_case_insensitive_comparison_for?(column)
column.type == :string && (!column.collation || column.case_sensitive?)
end
private :can_perform_case_insensitive_comparison_for?
def default_insert_value(column)
if column.is_identity?
table_name = quote(quote_table_name(column.table_name))
Arel.sql("IDENT_CURRENT(#{table_name}) + IDENT_INCR(#{table_name})")
else
super
end
end
private :default_insert_value
def build_insert_sql(insert) # :nodoc:
sql = +"INSERT #{insert.into}"
if returning = insert.send(:insert_all).returning
sql << " OUTPUT " << returning.map { |column| "INSERTED.#{quote_column_name(column)}" }.join(", ")
end
sql << " #{insert.values_list}"
sql
end
# === SQLServer Specific ======================================== #
def execute_procedure(proc_name, *variables)
materialize_transactions
vars = if variables.any? && variables.first.is_a?(Hash)
variables.first.map { |k, v| "@#{k} = #{quote(v)}" }
else
variables.map { |v| quote(v) }
end.join(", ")
sql = "EXEC #{proc_name} #{vars}".strip
name = "Execute Procedure"
log(sql, name) do
case @connection_options[:mode]
when :dblib
result = ensure_established_connection! { dblib_execute(sql) }
options = { as: :hash, cache_rows: true, timezone: ActiveRecord.default_timezone || :utc }
result.each(options) do |row|
r = row.with_indifferent_access
yield(r) if block_given?
end
result.each.map { |row| row.is_a?(Hash) ? row.with_indifferent_access : row }
end
end
end
def with_identity_insert_enabled(table_name)
table_name = quote_table_name(table_name)
set_identity_insert(table_name, true)
yield
ensure
set_identity_insert(table_name, false)
end
def use_database(database = nil)
return if sqlserver_azure?
name = SQLServer::Utils.extract_identifiers(database || @connection_options[:database]).quoted
do_execute "USE #{name}" unless name.blank?
end
def user_options
return {} if sqlserver_azure?
rows = select_rows("DBCC USEROPTIONS WITH NO_INFOMSGS", "SCHEMA")
rows = rows.first if rows.size == 2 && rows.last.empty?
rows.reduce(HashWithIndifferentAccess.new) do |values, row|
if row.instance_of? Hash
set_option = row.values[0].gsub(/\s+/, "_")
user_value = row.values[1]
elsif row.instance_of? Array
set_option = row[0].gsub(/\s+/, "_")
user_value = row[1]
end
values[set_option] = user_value
values
end
end
def user_options_dateformat
if sqlserver_azure?
select_value "SELECT [dateformat] FROM [sys].[syslanguages] WHERE [langid] = @@LANGID", "SCHEMA"
else
user_options["dateformat"]
end
end
def user_options_isolation_level
if sqlserver_azure?
sql = %(SELECT CASE [transaction_isolation_level]
WHEN 0 THEN NULL
WHEN 1 THEN 'READ UNCOMMITTED'
WHEN 2 THEN 'READ COMMITTED'
WHEN 3 THEN 'REPEATABLE READ'
WHEN 4 THEN 'SERIALIZABLE'
WHEN 5 THEN 'SNAPSHOT' END AS [isolation_level]
FROM [sys].[dm_exec_sessions]
WHERE [session_id] = @@SPID).squish
select_value sql, "SCHEMA"
else
user_options["isolation_level"]
end
end
def user_options_language
if sqlserver_azure?
select_value "SELECT @@LANGUAGE AS [language]", "SCHEMA"
else
user_options["language"]
end
end
def newid_function
select_value "SELECT NEWID()"
end
def newsequentialid_function
select_value "SELECT NEWSEQUENTIALID()"
end
protected
def sql_for_insert(sql, pk, binds)
if pk.nil?
table_name = query_requires_identity_insert?(sql)
pk = primary_key(table_name)
end
sql = if pk && use_output_inserted? && !database_prefix_remote_server?
quoted_pk = SQLServer::Utils.extract_identifiers(pk).quoted
table_name ||= get_table_name(sql)
exclude_output_inserted = exclude_output_inserted_table_name?(table_name, sql)
if exclude_output_inserted
id_sql_type = exclude_output_inserted.is_a?(TrueClass) ? "bigint" : exclude_output_inserted
<<~SQL.squish
DECLARE @ssaIdInsertTable table (#{quoted_pk} #{id_sql_type});
#{sql.dup.insert sql.index(/ (DEFAULT )?VALUES/), " OUTPUT INSERTED.#{quoted_pk} INTO @ssaIdInsertTable"}
SELECT CAST(#{quoted_pk} AS #{id_sql_type}) FROM @ssaIdInsertTable
SQL
else
sql.dup.insert sql.index(/ (DEFAULT )?VALUES/), " OUTPUT INSERTED.#{quoted_pk}"
end
else
"#{sql}; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident"
end
super
end
# === SQLServer Specific ======================================== #
def set_identity_insert(table_name, enable = true)
do_execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
rescue Exception
raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
end
# === SQLServer Specific (Executing) ============================ #
def do_execute(sql, name = "SQL")
materialize_transactions
mark_transaction_written_if_write(sql)
log(sql, name) { raw_connection_do(sql) }
end
def sp_executesql(sql, name, binds, options = {})
options[:ar_result] = true if options[:fetch] != :rows
unless without_prepared_statement?(binds)
types, params = sp_executesql_types_and_parameters(binds)
sql = sp_executesql_sql(sql, types, params, name)
end
raw_select sql, name, binds, options
end
def sp_executesql_types_and_parameters(binds)
types, params = [], []
binds.each_with_index do |attr, index|
attr = attr.value if attr.is_a?(Arel::Nodes::BindParam)
types << "@#{index} #{sp_executesql_sql_type(attr)}"
params << sp_executesql_sql_param(attr)
end
[types, params]
end
def sp_executesql_sql_type(attr)
return attr.type.sqlserver_type if attr.type.respond_to?(:sqlserver_type)
case value = attr.value_for_database
when Numeric
value > 2_147_483_647 ? "bigint".freeze : "int".freeze
else
"nvarchar(max)".freeze
end
end
def sp_executesql_sql_param(attr)
case value = attr.value_for_database
when Type::Binary::Data,
ActiveRecord::Type::SQLServer::Data
quote(value)
else
quote(type_cast(value))
end
end
def sp_executesql_sql(sql, types, params, name)
if name == "EXPLAIN"
params.each.with_index do |param, index|
substitute_at_finder = /(@#{index})(?=(?:[^']|'[^']*')*$)/ # Finds unquoted @n values.
sql = sql.sub substitute_at_finder, param.to_s
end
else
types = quote(types.join(", "))
params = params.map.with_index { |p, i| "@#{i} = #{p}" }.join(", ") # Only p is needed, but with @i helps explain regexp.
sql = "EXEC sp_executesql #{quote(sql)}"
sql += ", #{types}, #{params}" unless params.empty?
end
sql
end
def raw_connection_do(sql)
case @connection_options[:mode]
when :dblib
result = ensure_established_connection! { dblib_execute(sql) }
result.do
end
ensure
@update_sql = false
end
# === SQLServer Specific (Identity Inserts) ===================== #
def use_output_inserted?
self.class.use_output_inserted
end
def exclude_output_inserted_table_names?
!self.class.exclude_output_inserted_table_names.empty?
end
def exclude_output_inserted_table_name?(table_name, sql)
return false unless exclude_output_inserted_table_names?
table_name ||= get_table_name(sql)
return false unless table_name
self.class.exclude_output_inserted_table_names[table_name]
end
def exec_insert_requires_identity?(sql, pk, binds)
query_requires_identity_insert?(sql)
end
def query_requires_identity_insert?(sql)
if insert_sql?(sql)
table_name = get_table_name(sql)
id_column = identity_columns(table_name).first
# id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? quote_table_name(table_name) : false
id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? table_name : false
else
false
end
end
def insert_sql?(sql)
!(sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)/i).nil?
end
def identity_columns(table_name)
schema_cache.columns(table_name).select(&:is_identity?)
end
# === SQLServer Specific (Selecting) ============================ #
def raw_select(sql, name = "SQL", binds = [], options = {})
log(sql, name, binds) { _raw_select(sql, options) }
end
def _raw_select(sql, options = {})
handle = raw_connection_run(sql)
handle_to_names_and_values(handle, options)
ensure
finish_statement_handle(handle)
end
def raw_connection_run(sql)
case @connection_options[:mode]
when :dblib
ensure_established_connection! { dblib_execute(sql) }
end
end
def handle_more_results?(handle)
case @connection_options[:mode]
when :dblib
end
end
def handle_to_names_and_values(handle, options = {})
case @connection_options[:mode]
when :dblib
handle_to_names_and_values_dblib(handle, options)
end
end
def handle_to_names_and_values_dblib(handle, options = {})
query_options = {}.tap do |qo|
qo[:timezone] = ActiveRecord.default_timezone || :utc
qo[:as] = (options[:ar_result] || options[:fetch] == :rows) ? :array : :hash
end
results = handle.each(query_options)
columns = lowercase_schema_reflection ? handle.fields.map { |c| c.downcase } : handle.fields
options[:ar_result] ? ActiveRecord::Result.new(columns, results) : results
end
def finish_statement_handle(handle)
case @connection_options[:mode]
when :dblib
handle.cancel if handle
end
handle
end
def dblib_execute(sql)
@connection.execute(sql).tap do |result|
# TinyTDS returns false instead of raising an exception if connection fails.
# Getting around this by raising an exception ourselves while this PR
# https://github.com/rails-sqlserver/tiny_tds/pull/469 is not released.
raise TinyTds::Error, "failed to execute statement" if result.is_a?(FalseClass)
end
end
def ensure_established_connection!
raise TinyTds::Error, 'SQL Server client is not connected' unless @connection
yield
end
end
end
end
end
|
missing file
class Extensions::ActiveRecord::ConnectionAdapters::SynonymDefinition < Struct.new(:name, :table_owner, :table_name) #:nodoc:
end
|
class CreateCronoJobs < ActiveRecord::Migration
def self.up
create_table :crono_jobs do |t|
t.string :job_id, null: false
t.text :log
t.datetime :last_performed_at
t.boolean :healthy
t.timestamps null: false
end
add_index :crono_jobs, [:job_id], unique: true
end
def self.down
drop_table :crono_jobs
end
end
Fix MySQL default text size limit
class CreateCronoJobs < ActiveRecord::Migration
def self.up
create_table :crono_jobs do |t|
t.string :job_id, null: false
t.text :log, limit: 4294967295 # LONGTEXT for MySQL
t.datetime :last_performed_at
t.boolean :healthy
t.timestamps null: false
end
add_index :crono_jobs, [:job_id], unique: true
end
def self.down
drop_table :crono_jobs
end
end
|
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "event-crawler"
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Felipe Lima"]
s.date = "2011-11-14"
s.description = "Generic Web crawler with a DSL that parses event-related data from web pages"
s.email = "felipe.lima@gmail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/event-crawler.rb",
"test/helper.rb",
"test/test_event-crawler.rb"
]
s.homepage = "http://github.com/felipecsl/event-crawler"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.11"
s.summary = "Generic web crawler for event-related data"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rcov>, [">= 0"])
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
else
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rcov>, [">= 0"])
end
end
|
$KCODE = 'u'
require 'jcode' # вот от этого надо бы избавиться - скопировать jsize из jcode и поместить его в свой namespace
# Главный контейнер модуля
module RuTils
# Папка, куда установлен модуль RuTils. Нужно чтобы автоматически копировать RuTils в другие приложения.
INSTALLATION_DIRECTORY = File.expand_path(File.dirname(__FILE__) + '/../')
MAJOR = 0
MINOR = 1
TINY = 3
# Версия RuTils
VERSION = [MAJOR, MINOR ,TINY].join('.')
def self.load_component(name) #:nodoc:
require RuTils::INSTALLATION_DIRECTORY + "/lib/#{name}/#{name}"
end
def self.reload_component(name) #:nodoc:
load RuTils::INSTALLATION_DIRECTORY + "/lib/#{name}/#{name}.rb"
end
end
RuTils::load_component :pluralizer #Выбор числительного и сумма прописью
require File.dirname(__FILE__) + '/gilenson/gilenson_port'
# RuTils::load_component :gilenson_port
RuTils::load_component :datetime # Дата и время без локалей
RuTils::load_component :transliteration # Транслит
RuTils::load_component :integration # Интеграция с rails, textile и тд
RuTils::load_component :countries # Данные о странах на русском и английском
RuTils::load_component for new gilenson
git-svn-id: 7df67273c11b06143beb25902101eb0a64e19583@96 a3ef6f13-15a0-41b7-8655-729d286dc6c2
$KCODE = 'u'
require 'jcode' # вот от этого надо бы избавиться - скопировать jsize из jcode и поместить его в свой namespace
# Главный контейнер модуля
module RuTils
# Папка, куда установлен модуль RuTils. Нужно чтобы автоматически копировать RuTils в другие приложения.
INSTALLATION_DIRECTORY = File.expand_path(File.dirname(__FILE__) + '/../')
MAJOR = 0
MINOR = 1
TINY = 3
# Версия RuTils
VERSION = [MAJOR, MINOR ,TINY].join('.')
def self.load_component(name) #:nodoc:
require RuTils::INSTALLATION_DIRECTORY + "/lib/#{name}/#{name}"
end
def self.reload_component(name) #:nodoc:
load RuTils::INSTALLATION_DIRECTORY + "/lib/#{name}/#{name}.rb"
end
end
RuTils::load_component :pluralizer #Выбор числительного и сумма прописью
require File.dirname(__FILE__) + '/gilenson/gilenson_port'
RuTils::load_component :gilenson # новый Гиленсон
RuTils::load_component :datetime # Дата и время без локалей
RuTils::load_component :transliteration # Транслит
RuTils::load_component :integration # Интеграция с rails, textile и тд
RuTils::load_component :countries # Данные о странах на русском и английском |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 's3/authorize/version'
Gem::Specification.new do |spec|
spec.name = "s3-authorize"
spec.version = S3::Authorize::VERSION
spec.authors = ["Vinh Nguyen"]
spec.email = ["vinh.nglx@gmail.com"]
spec.summary = %q{Generate Signature and Policy for upload any files to S3.}
spec.description = %q{Gem generates a signature and policy from AWS Secret key.}
spec.homepage = "https://github.com/vinhnglx/s3-authorize"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'rspec', '~> 3.0'
end
Remove version of development dependency "bundler"
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 's3/authorize/version'
Gem::Specification.new do |spec|
spec.name = "s3-authorize"
spec.version = S3::Authorize::VERSION
spec.authors = ["Vinh Nguyen"]
spec.email = ["vinh.nglx@gmail.com"]
spec.summary = %q{Generate Signature and Policy for upload any files to S3.}
spec.description = %q{Gem generates a signature and policy from AWS Secret key.}
spec.homepage = "https://github.com/vinhnglx/s3-authorize"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'rspec', '~> 3.0'
end
|
class Document < ActiveRecord::Base
# belongs_to :current_revision
def folder
self.folder_id = 42
self
end
def current_revision
self.uuid= 42
self.user_id=42
self.state = 42
self
end
def self.folder
Document
end
def self.current_revision
Document
end
searchable do
integer :id
integer :folder_id
integer :room_id do folder.room_id end
text :name # fulltext
string :name # order_by
time :updated_at
integer :uuid do current_revision.uuid end
integer :user_id do current_revision.user_id end
integer :file_size do current_revision.file_size end
string :file_content_type do current_revision.file_content_type end
string :state do current_revision.state end
end
end
better dummy
class CurrentRevision
def uuid
42
end
def user_id
42
end
def file_size
42
end
def file_content_type
"42/42"
end
def state
"42"
end
end
class Folder
def room_id
42
end
end
class Document < ActiveRecord::Base
# belongs_to :current_revision
def self.folder
Folder.new
end
def self.current_revision
CurrentRevision.new
end
searchable do
integer :id
integer :folder_id
integer :room_id do folder.room_id end
text :name # fulltext
string :name # order_by
time :updated_at
integer :uuid do current_revision.uuid end
integer :user_id do current_revision.user_id end
integer :file_size do current_revision.file_size end
string :file_content_type do current_revision.file_content_type end
string :state do current_revision.state end
end
end
|
require 'spec_helper'
describe Arugula do
it 'has a version number' do
expect(Arugula::VERSION).to match(Gem::Version::ANCHORED_VERSION_PATTERN)
end
{
'a' => 'a',
'foo' => 'e-food',
'[eat]' => 'hfkdshgfjds',
'\\A\\de' => '5eat',
'^line$' => "before\nline\nafter",
'a*bc*' => 'caaaaaaaab',
'a+bc+' => 'caaaaaaaabcc',
'[a-z]' => 'AfG',
'[A-Z].+' => 'my name is Samuel Giddins',
'[e-gE-G]' => 'cow is GREAT',
'Hello ([a-z]+)!' => 'Hello world!',
'a(b(b*))c' => '1ab2abbbc',
'(\d+),(\d+),(\d+)' => '1,20,3',
'foo|bar|baz' => 'foo',
'(foo|bar|baz)' => 'fubar-ed',
'this is (\d+|not)' => 'this is 10pm',
'.' => '',
'' => 'I like pizza.',
'[()\\[\\].-]\\.' => 'hi',
'foo[a-z]?' => 'food?',
'a(b)?c' => 'factual',
'a(b)?c(d)?' => 'ab acd',
'a{2}b{,4}c{3,}d{6,8}' => 'a' * 2 + 'b' * 3 + 'c' * 4 + 'd' * 7,
'fo{1,3}?d' => 'I like eating food',
'.([0-9]){3,5}?foo' => '9876543210foo',
# '<.*>' => '<a>foo</a>',
# '<.+>' => '<a>foo</a>',
# '<.{1,}>' => '<a>foo</a>',
# 'foo(A{,1}+)Abar' => 'fooAAAbar',
}.each do |pattern, string|
ruby_pattern = "/#{pattern}/"
context "#{string.dump} =~ #{ruby_pattern}" do
subject { described_class.new(pattern) }
let(:regexp) { Regexp.new(pattern) }
describe '#to_s' do
it 'returns the original pattern' do
expect(subject.to_s).to eq(ruby_pattern)
end
end
context 'when matching a string' do
it 'does the same thing as ::Regexp' do
expect(subject.match?(string)).to eq(regexp =~ string)
end
it 'returns the correct match data' do
match = subject.match(string)
expected = regexp.match(string)
expect(match.to_a).to eq(expected.to_a)
expect(match.to_s).to eq(expected.to_s)
expect(match.inspect).to eq(expected.inspect)
end
end
end
end
context 'when matching from a starting offset' do
let(:pattern) { 'ab' }
subject { described_class.new(pattern) }
it "doesn't match when the match is before the passed in position" do
expect(subject.match?('abcd', 2)).to be_nil
end
it 'returns the match index' do
expect(subject.match?('ababababab', 3)).to eq(4)
end
end
it 'supports . (any character except line terminator)' do
# Basic matching
expect(Arugula.new('.').match('foo').to_a).to eq ['f']
# Basic non-matching
expect(Arugula.new('.').match('')).to be_nil
expect(Arugula.new('.').match("\n")).to be_nil
expect(Arugula.new('.').match("\0").to_a).to eq ["\0"]
end
end
Add specs for escapes
require 'spec_helper'
describe Arugula do
it 'has a version number' do
expect(Arugula::VERSION).to match(Gem::Version::ANCHORED_VERSION_PATTERN)
end
{
'a' => 'a',
'foo' => 'e-food',
'[eat]' => 'hfkdshgfjds',
'\\A\\de' => '5eat',
'^line$' => "before\nline\nafter",
'a*bc*' => 'caaaaaaaab',
'a+bc+' => 'caaaaaaaabcc',
'[a-z]' => 'AfG',
'[A-Z].+' => 'my name is Samuel Giddins',
'[e-gE-G]' => 'cow is GREAT',
'Hello ([a-z]+)!' => 'Hello world!',
'a(b(b*))c' => '1ab2abbbc',
'(\d+),(\d+),(\d+)' => '1,20,3',
'foo|bar|baz' => 'foo',
'(foo|bar|baz)' => 'fubar-ed',
'this is (\d+|not)' => 'this is 10pm',
'.' => '',
'' => 'I like pizza.',
'[()\\[\\].-]\\.' => 'hi',
'foo[a-z]?' => 'food?',
'a(b)?c' => 'factual',
'a(b)?c(d)?' => 'ab acd',
'a{2}b{,4}c{3,}d{6,8}' => 'a' * 2 + 'b' * 3 + 'c' * 4 + 'd' * 7,
'fo{1,3}?d' => 'I like eating food',
'.([0-9]){3,5}?foo' => '9876543210foo',
# '<.*>' => '<a>foo</a>',
# '<.+>' => '<a>foo</a>',
# '<.{1,}>' => '<a>foo</a>',
# 'foo(A{,1}+)Abar' => 'fooAAAbar',
}.each do |pattern, string|
ruby_pattern = "/#{pattern}/"
context "#{string.dump} =~ #{ruby_pattern}" do
subject { described_class.new(pattern) }
let(:regexp) { Regexp.new(pattern) }
describe '#to_s' do
it 'returns the original pattern' do
expect(subject.to_s).to eq(ruby_pattern)
end
end
context 'when matching a string' do
it 'does the same thing as ::Regexp' do
expect(subject.match?(string)).to eq(regexp =~ string)
end
it 'returns the correct match data' do
match = subject.match(string)
expected = regexp.match(string)
expect(match.to_a).to eq(expected.to_a)
expect(match.to_s).to eq(expected.to_s)
expect(match.inspect).to eq(expected.inspect)
end
end
end
end
context 'when matching from a starting offset' do
let(:pattern) { 'ab' }
subject { described_class.new(pattern) }
it "doesn't match when the match is before the passed in position" do
expect(subject.match?('abcd', 2)).to be_nil
end
it 'returns the match index' do
expect(subject.match?('ababababab', 3)).to eq(4)
end
end
it 'supports . (any character except line terminator)' do
# Basic matching
expect(Arugula.new('.').match('foo').to_a).to eq ['f']
# Basic non-matching
expect(Arugula.new('.').match('')).to be_nil
expect(Arugula.new('.').match("\n")).to be_nil
expect(Arugula.new('.').match("\0").to_a).to eq ["\0"]
end
it 'support quoting meta-characters via escape sequence' do
expect(Arugula.new('\\\\').match('\\').to_a).to eq ['\\']
expect(Arugula.new('\/').match('/').to_a).to eq ['/']
# parenthesis, etc
expect(Arugula.new('\(').match('(').to_a).to eq ['(']
expect(Arugula.new('\)').match(')').to_a).to eq [')']
expect(Arugula.new('\[').match('[').to_a).to eq ['[']
expect(Arugula.new('\]').match(']').to_a).to eq [']']
expect(Arugula.new('\{').match('{').to_a).to eq ['{']
expect(Arugula.new('\}').match('}').to_a).to eq ['}']
# alternation separator
expect(Arugula.new('\|').match('|').to_a).to eq ['|']
# quantifiers
expect(Arugula.new('\?').match('?').to_a).to eq ['?']
expect(Arugula.new('\.').match('.').to_a).to eq ['.']
expect(Arugula.new('\*').match('*').to_a).to eq ['*']
expect(Arugula.new('\+').match('+').to_a).to eq ['+']
# line anchors
expect(Arugula.new('\^').match('^').to_a).to eq ['^']
expect(Arugula.new('\$').match('$').to_a).to eq ['$']
end
end
|
# (c) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
require_relative '../../api200/server_profile_provider'
module OneviewCookbook
module API300
module Synergy
# ServerProfile API300 Synergy provider
class ServerProfileProvider < API200::ServerProfileProvider
def create_or_update
load_os_deployment_plan
super
end
protected
def load_os_deployment_plan
# Return if the property is not defined
return unless @context.os_deployment_plan
@item['osDeploymentSettings'] ||= {}
# Return if the value is already defined in data
if @item['osDeploymentSettings']['osDeploymentPlanUri']
return Chef::Log.warn('The OS deployment plan is already defined in `data`. ' \
"The `os_deployment_plan` property will be ignored in favor of '#{@item['osDeploymentSettings']['osDeploymentPlanUri']}'")
end
# Get the user specified custom attributes
custom = @item['osDeploymentSettings']['customAttributes'] || @item['osDeploymentSettings']['osCustomAttributes']
# Loads the OS deployment plan and gets the default custom attributes
# Chef::Log.info("\n\nLOADING OS DEPLOYMENT PLAN\n\n")
# dps = OneviewSDK::API300::Synergy::OSDeploymentPlan.get_all(@item.client)
# Chef::Log.info(dps)
plan = load_resource(:OSDeploymentPlan, @context.os_deployment_plan)
# Chef::Log.info("\n\nLOADed OS DEPLOYMENT PLAN\n\n")
plan_defaults = plan['additionalParameters']
# Merge both user defined and default custom attributes
custom = custom_merge(plan_defaults, custom)
@item.set_os_deployment_settings(plan, custom)
end
def custom_merge(defaults, customs)
defaults ||= []
customs ||= []
customs.each do |ca|
custom_replace!(defaults, ca)
end
defaults.select { |da| da['value'] }.collect { |ca| { 'name' => ca['name'], 'value' => ca['value'] } }
end
def custom_replace!(target, custom_attribute)
was_replaced = false
target.collect! do |da|
if da['name'] == custom_attribute['name']
was_replaced = true
custom_attribute
else
da
end
end
target << custom_attribute unless was_replaced
end
end
end
end
end
removed unnecessary comments from server profile api300 provider
# (c) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
require_relative '../../api200/server_profile_provider'
module OneviewCookbook
module API300
module Synergy
# ServerProfile API300 Synergy provider
class ServerProfileProvider < API200::ServerProfileProvider
def create_or_update
load_os_deployment_plan
super
end
protected
def load_os_deployment_plan
# Return if the property is not defined
return unless @context.os_deployment_plan
@item['osDeploymentSettings'] ||= {}
# Return if the value is already defined in data
if @item['osDeploymentSettings']['osDeploymentPlanUri']
return Chef::Log.warn('The OS deployment plan is already defined in `data`. ' \
"The `os_deployment_plan` property will be ignored in favor of '#{@item['osDeploymentSettings']['osDeploymentPlanUri']}'")
end
# Get the user specified custom attributes
custom = @item['osDeploymentSettings']['customAttributes'] || @item['osDeploymentSettings']['osCustomAttributes']
# Loads the OS deployment plan and gets the default custom attributes
plan = load_resource(:OSDeploymentPlan, @context.os_deployment_plan)
plan_defaults = plan['additionalParameters']
# Merge both user defined and default custom attributes
custom = custom_merge(plan_defaults, custom)
@item.set_os_deployment_settings(plan, custom)
end
def custom_merge(defaults, customs)
defaults ||= []
customs ||= []
customs.each do |ca|
custom_replace!(defaults, ca)
end
defaults.select { |da| da['value'] }.collect { |ca| { 'name' => ca['name'], 'value' => ca['value'] } }
end
def custom_replace!(target, custom_attribute)
was_replaced = false
target.collect! do |da|
if da['name'] == custom_attribute['name']
was_replaced = true
custom_attribute
else
da
end
end
target << custom_attribute unless was_replaced
end
end
end
end
end
|
Pod::Spec.new do |s|
s.name = "Concorde"
s.version = "0.1.0"
s.summary = "Download and decode progressive JPEGs easily."
s.homepage = "https://github.com/contentful-labs/Concorde/"
s.social_media_url = 'https://twitter.com/contentful'
s.license = {
:type => 'MIT',
:file => 'LICENSE'
}
s.authors = { "Boris Bügling" => "boris@buegling.com" }
s.source = { :git => "https://github.com/contentful-labs/Concorde.git",
:tag => s.version.to_s }
s.requires_arc = true
s.ios.deployment_target = '8.0'
s.ios.frameworks = 'UIKit'
s.osx.deployment_target = '10.9'
s.default_subspecs = 'Core', 'UI'
s.subspec 'Core' do |core_spec|
core_spec.source_files = 'Code/*.{h,m}', 'vendor/libjpeg-turbo/include/*'
core_spec.public_header_files = 'Code/CCBufferedImageDecoder.h'
core_spec.vendored_libraries = 'vendor/libjpeg-turbo/lib/libturbojpeg.a'
end
s.subspec 'UI' do |ui|
ui.ios.source_files = 'Code/CCBufferedImageView.swift'
ui.dependency 'Concorde/Core'
end
s.subspec 'Contentful' do |contentful_spec|
contentful_spec.ios.source_files = 'Code/CCBufferedImageView+Contentful.swift'
contentful_spec.dependency 'Concorde/Core'
contentful_spec.dependency 'Concorde/UI'
contentful_spec.dependency 'ContentfulDeliveryAPI', '>= 1.6.0'
end
end
Updating podspec
Pod::Spec.new do |s|
s.name = "Concorde"
s.version = "0.1.1"
s.summary = "Download and decode progressive JPEGs easily."
s.homepage = "https://github.com/contentful-labs/Concorde/"
s.social_media_url = 'https://twitter.com/contentful'
s.license = {
:type => 'MIT',
:file => 'LICENSE'
}
s.authors = { "Boris Bügling" => "boris@buegling.com" }
s.source = { :git => "https://github.com/contentful-labs/Concorde.git",
:tag => s.version.to_s }
s.requires_arc = true
s.ios.deployment_target = '10.0'
s.ios.frameworks = 'UIKit'
s.osx.deployment_target = '10.9'
s.default_subspecs = 'Core', 'UI'
s.subspec 'Core' do |core_spec|
core_spec.source_files = 'Code/*.{h,m}', 'vendor/libjpeg-turbo/include/*'
core_spec.public_header_files = 'Code/CCBufferedImageDecoder.h'
core_spec.vendored_libraries = 'vendor/libjpeg-turbo/lib/libturbojpeg.a'
end
s.subspec 'UI' do |ui|
ui.ios.source_files = 'Code/CCBufferedImageView.swift'
ui.dependency 'Concorde/Core'
end
s.subspec 'Contentful' do |contentful_spec|
contentful_spec.ios.source_files = 'Code/CCBufferedImageView+Contentful.swift'
contentful_spec.dependency 'Concorde/Core'
contentful_spec.dependency 'Concorde/UI'
contentful_spec.dependency 'ContentfulDeliveryAPI', '>= 1.6.0'
end
end
|
require "spec_helper"
describe "percona::cluster" do
let(:cluster_package) do
"percona-xtradb-cluster-55"
end
let(:chef_run) do
ChefSpec::SoloRunner.new.converge(described_recipe)
end
before do
stub_command("test -f /var/lib/mysql/mysql/user.frm").and_return(true)
stub_command("test -f /etc/mysql/grants.sql").and_return(true)
end
it { expect(chef_run).to include_recipe("percona::package_repo") }
it { expect(chef_run).to include_recipe("percona::configure_server") }
it { expect(chef_run).to include_recipe("percona::access_grants") }
describe "Ubuntu" do
it { expect(chef_run).to install_package(cluster_package) }
it "stops the `mysql` service" do
resource = chef_run.package(cluster_package)
expect(resource).to notify("service[mysql]").to(:stop).immediately
end
end
describe "CentOS" do
let(:chef_run) do
env_options = { platform: "centos", version: "6.5" }
ChefSpec::SoloRunner.new(env_options).converge(described_recipe)
end
it { expect(chef_run).to remove_package("mysql-libs") }
it { expect(chef_run).to install_package(cluster_package) }
end
end
update `cluster_spec` to match centos package name
require "spec_helper"
describe "percona::cluster" do
let(:centos_cluster_package) do
"Percona-XtraDB-Cluster-55"
end
let(:ubuntu_cluster_package) do
"percona-xtradb-cluster-55"
end
let(:chef_run) do
ChefSpec::SoloRunner.new.converge(described_recipe)
end
before do
stub_command("test -f /var/lib/mysql/mysql/user.frm").and_return(true)
stub_command("test -f /etc/mysql/grants.sql").and_return(true)
end
specify do
expect(chef_run).to include_recipe("percona::package_repo")
expect(chef_run).to include_recipe("percona::configure_server")
expect(chef_run).to include_recipe("percona::access_grants")
end
describe "Ubuntu" do
specify do
expect(chef_run).to install_package(ubuntu_cluster_package)
expect(chef_run.package(ubuntu_cluster_package)).to(
notify("service[mysql]").to(:stop).immediately
)
end
end
describe "CentOS" do
let(:chef_run) do
env_options = { platform: "centos", version: "6.5" }
ChefSpec::SoloRunner.new(env_options).converge(described_recipe)
end
specify do
expect(chef_run).to remove_package("mysql-libs")
expect(chef_run).to install_package(centos_cluster_package)
end
end
end
|
# encoding: utf-8
require File.expand_path('../spec_helper', __FILE__)
module CLAide
describe Command do
describe 'in general' do
it 'registers the subcommand classes' do
Fixture::Command.subcommands.map(&:command).should ==
%w(spec-file)
Fixture::Command::SpecFile.subcommands.map(&:command).should ==
%w(common-invisible-command)
Fixture::Command::SpecFile::Create.subcommands.map(&:command).should ==
[]
Fixture::Command::SpecFile::Lint.subcommands.map(&:command).should ==
%w(repo)
end
it 'returns subcommands for look up' do
subcommands = Fixture::Command::SpecFile.subcommands_for_command_lookup
subcommands.map(&:command).should == %w(lint create)
end
it 'returns whether it is the root command' do
Fixture::Command.should.be.root_command?
Fixture::Command::SpecFile.should.not.be.root_command?
end
it 'tries to match a subclass for each of the subcommands' do
parsed = Fixture::Command.parse(%w(spec-file --verbose lint))
parsed.should.be.instance_of Fixture::Command::SpecFile::Lint
end
describe 'plugins' do
describe 'when the plugin is at <command-prefix>_plugin.rb' do
PLUGIN_FIXTURE = ROOT + 'spec/fixture/command/plugin_fixture.rb'
PLUGIN = ROOT + 'spec/fixture_plugin.rb'
before do
FileUtils.copy PLUGIN_FIXTURE, PLUGIN
end
after do
FileUtils.remove_file PLUGIN
end
it 'loads the plugin' do
Fixture::CommandPluginable.subcommands.find do |cmd|
cmd.command == 'demo-plugin'
end.should.be.nil
Fixture::CommandPluginable.load_plugins
plugin_class = Fixture::CommandPluginable.subcommands.find do |cmd|
cmd.command == 'demo-plugin'
end
plugin_class.ancestors.should.include Fixture::CommandPluginable
plugin_class.description.should =~ /plugins/
end
it 'is available for help' do
Fixture::CommandPluginable.load_plugins
banner = CLAide::Command::Banner.new(Fixture::CommandPluginable)
banner.formatted_banner.should =~ /demo-plugin/
end
end
describe 'failing plugins' do
LOAD_ERROR_PLUGIN_FIXTURE = ROOT +
'spec/fixture/command/load_error_plugin_fixture.rb'
LOAD_ERROR_PLUGIN = ROOT + 'spec/fixture_failing_plugin.rb'
before do
FileUtils.copy LOAD_ERROR_PLUGIN_FIXTURE, LOAD_ERROR_PLUGIN
end
after do
FileUtils.remove_file LOAD_ERROR_PLUGIN
end
it 'rescues exceptions raised during the load of the plugin' do
command = Fixture::Command
command.plugin_prefix = 'fixture_failing'
def command.puts(text)
(@fixture_output ||= '') << text
end
should.not.raise do
Fixture::Command.load_plugins
end
output = command.instance_variable_get(:@fixture_output)
output.should.include('Error loading the plugin')
output.should.include('LoadError')
end
end
it 'fails normally if there is no plugin' do
Fixture::Command.load_plugins
Fixture::Command.subcommands.find do
|cmd| cmd.name == 'demo-plugin'
end.should.be.nil
end
end
end
#-------------------------------------------------------------------------#
describe 'validation' do
it 'does not raise if one of the subcommands consumes arguments' do
subcommand = Fixture::Command.parse(%w(spec-file create AFNetworking))
subcommand.spec.should == 'AFNetworking'
end
it 'raises a Help exception when created with an invalid subcommand' do
message = "Unknown command: `unknown`\nDid you mean: spec-file"
should_raise_help message do
Fixture::Command.parse(%w(unknown)).validate!
end
should_raise_help "Unknown command: `unknown`\nDid you mean: lint" do
Fixture::Command.parse(%w(spec-file unknown)).validate!
end
end
it 'raises an empty Help exception when called on an abstract command' do
should_raise_help nil do
Fixture::Command.parse(%w(spec-file)).validate!
end
end
end
#-------------------------------------------------------------------------#
describe 'default options' do
it 'raises a Help exception, without error message' do
should_raise_help nil do
Fixture::Command.parse(%w(--help)).validate!
end
end
it 'sets the verbose flag' do
command = Fixture::Command.parse([])
command.should.not.be.verbose
command = Fixture::Command.parse(%w(--verbose))
command.should.be.verbose
end
it 'handles the version flag' do
command = Fixture::Command
command.version = '1.0'
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << text
end
command.run(%w(--version))
output = command.instance_variable_get(:@fixture_output)
output.should == '1.0'
end
it 'handles the version flag in conjunction with the verbose flag' do
path = 'path/to/gems/cocoapods-plugins/lib/cocoapods_plugin.rb'
Command::PluginsHelper.expects(:plugin_load_paths).returns([path])
Command::PluginsHelper.expects(:plugin_info).
returns('cocoapods_plugin: 1.0')
command = Fixture::Command
command.stubs(:load_plugins)
command.version = '1.0'
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << "#{text}\n"
end
command.run(%w(--version --verbose))
output = command.instance_variable_get(:@fixture_output)
output.should == "1.0\ncocoapods_plugin: 1.0\n"
end
it "doesn't include the version flag for non root commands" do
command = Fixture::Command::SpecFile
should.raise CLAide::Help do
command.parse(%w(--version)).validate!
end.message.should.include?('Unknown option: `--version`')
end
it 'handles the completion-script flag' do
command = Fixture::Command
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << text
end
Command::ShellCompletionHelper.
expects(:completion_template).returns('script')
command.run(%w(--completion-script))
output = command.instance_variable_get(:@fixture_output)
output.should == 'script'
end
it "doesn't include the completion-script flag for non root commands" do
command = Fixture::Command::SpecFile
should.raise CLAide::Help do
command.parse(%w(--completion-script)).validate!
end.message.should.include?('Unknown option: `--completion-script`')
end
end
#-------------------------------------------------------------------------#
describe 'when running' do
before do
Fixture::Command.stubs(:puts)
Fixture::Command.stubs(:exit)
end
it 'does not print the backtrace of an InformativeError by default' do
::CLAide::ANSI.disabled = true
expected = Help.new(Fixture::Command.banner).message
Fixture::Command.expects(:puts).with(expected)
Fixture::Command.run(%w(--help))
end
it 'prints the backtrace of an InformativeError, if set to verbose' do
error = Fixture::Error.new
Fixture::Command.any_instance.stubs(:validate!).raises(error)
error.stubs(:message).returns('the message')
error.stubs(:backtrace).returns(%w(the backtrace))
printed = states('printed').starts_as(:nothing)
Fixture::Command.expects(:puts).with('the message').
when(printed.is(:nothing)).then(printed.is(:message))
Fixture::Command.expects(:puts).with('the', 'backtrace').
when(printed.is(:message)).then(printed.is(:done))
Fixture::Command.run(%w(--verbose))
end
it 'exits with a failure status when an InformativeError occurs' do
Fixture::Command.expects(:exit).with(1)
Fixture::Command.any_instance.stubs(:validate!).
raises(Fixture::Error.new)
Fixture::Command.run([])
end
it 'exits with a failure status when a Help exception occurs' do
Fixture::Command.expects(:exit).with(1)
Fixture::Command.run(%w(unknown))
end
it 'exits with a success status when an empty Help exception occurs' do
Fixture::Command.expects(:exit).with(0)
Fixture::Command.run(%w(--help))
end
end
#-------------------------------------------------------------------------#
describe 'default_subcommand' do
before do
@command_class = Fixture::Command::SpecFile.dup
@command_class.default_subcommand = 'lint'
end
it 'returns the default subcommand if specified' do
cmd = @command_class.parse([])
cmd.class.should == Fixture::Command::SpecFile::Lint
end
it "doesn't return a default subcommand if a command is given" do
cmd = @command_class.parse(%w(create))
cmd.class.should == Fixture::Command::SpecFile::Create
end
it "doesn't invoke a default subcommand by default" do
@command_class.default_subcommand = nil
cmd = @command_class.parse([])
cmd.class.should == @command_class
end
it 'invokes the default subcommand only if abstract' do
@command_class.abstract_command = false
cmd = @command_class.parse([])
cmd.class.should == @command_class
end
it 'raises if unable to find the default subcommand' do
command_class = Fixture::Command::SpecFile.dup
command_class.default_subcommand = 'find-me'
should.raise do
command_class.parse([])
end.message.should.match /Unable to find the default subcommand/
end
it 'shows the help of the parent if a command was invoked by default' do
cmd = @command_class.parse([])
cmd.class.superclass.expects(:help!)
cmd.send(:help!)
end
it "doesn't show the help of the parent by default" do
cmd = @command_class.parse(%w(create))
cmd.class.expects(:help!)
cmd.send(:help!)
end
end
end
end
[Command] Fix specs aligment
# encoding: utf-8
require File.expand_path('../spec_helper', __FILE__)
module CLAide
describe Command do
describe 'in general' do
it 'registers the subcommand classes' do
Fixture::Command.subcommands.map(&:command).should ==
%w(spec-file)
Fixture::Command::SpecFile.subcommands.map(&:command).should ==
%w(common-invisible-command)
Fixture::Command::SpecFile::Create.subcommands.map(&:command).should ==
[]
Fixture::Command::SpecFile::Lint.subcommands.map(&:command).should ==
%w(repo)
end
it 'returns subcommands for look up' do
subcommands = Fixture::Command::SpecFile.subcommands_for_command_lookup
subcommands.map(&:command).should == %w(lint create)
end
it 'returns whether it is the root command' do
Fixture::Command.should.be.root_command?
Fixture::Command::SpecFile.should.not.be.root_command?
end
it 'tries to match a subclass for each of the subcommands' do
parsed = Fixture::Command.parse(%w(spec-file --verbose lint))
parsed.should.be.instance_of Fixture::Command::SpecFile::Lint
end
end
describe 'plugins' do
describe 'when the plugin is at <command-prefix>_plugin.rb' do
PLUGIN_FIXTURE = ROOT + 'spec/fixture/command/plugin_fixture.rb'
PLUGIN = ROOT + 'spec/fixture_plugin.rb'
before do
FileUtils.copy PLUGIN_FIXTURE, PLUGIN
end
after do
FileUtils.remove_file PLUGIN
end
it 'loads the plugin' do
Fixture::CommandPluginable.subcommands.find do |cmd|
cmd.command == 'demo-plugin'
end.should.be.nil
Fixture::CommandPluginable.load_plugins
plugin_class = Fixture::CommandPluginable.subcommands.find do |cmd|
cmd.command == 'demo-plugin'
end
plugin_class.ancestors.should.include Fixture::CommandPluginable
plugin_class.description.should =~ /plugins/
end
it 'is available for help' do
Fixture::CommandPluginable.load_plugins
banner = CLAide::Command::Banner.new(Fixture::CommandPluginable)
banner.formatted_banner.should =~ /demo-plugin/
end
end
describe 'failing plugins' do
LOAD_ERROR_PLUGIN_FIXTURE = ROOT +
'spec/fixture/command/load_error_plugin_fixture.rb'
LOAD_ERROR_PLUGIN = ROOT + 'spec/fixture_failing_plugin.rb'
before do
FileUtils.copy LOAD_ERROR_PLUGIN_FIXTURE, LOAD_ERROR_PLUGIN
end
after do
FileUtils.remove_file LOAD_ERROR_PLUGIN
end
it 'rescues exceptions raised during the load of the plugin' do
command = Fixture::Command
command.plugin_prefix = 'fixture_failing'
def command.puts(text)
(@fixture_output ||= '') << text
end
should.not.raise do
Fixture::Command.load_plugins
end
output = command.instance_variable_get(:@fixture_output)
output.should.include('Error loading the plugin')
output.should.include('LoadError')
end
end
it 'fails normally if there is no plugin' do
Fixture::Command.load_plugins
Fixture::Command.subcommands.find do
|cmd| cmd.name == 'demo-plugin'
end.should.be.nil
end
end
#-------------------------------------------------------------------------#
describe 'validation' do
it 'does not raise if one of the subcommands consumes arguments' do
subcommand = Fixture::Command.parse(%w(spec-file create AFNetworking))
subcommand.spec.should == 'AFNetworking'
end
it 'raises a Help exception when created with an invalid subcommand' do
message = "Unknown command: `unknown`\nDid you mean: spec-file"
should_raise_help message do
Fixture::Command.parse(%w(unknown)).validate!
end
should_raise_help "Unknown command: `unknown`\nDid you mean: lint" do
Fixture::Command.parse(%w(spec-file unknown)).validate!
end
end
it 'raises an empty Help exception when called on an abstract command' do
should_raise_help nil do
Fixture::Command.parse(%w(spec-file)).validate!
end
end
end
#-------------------------------------------------------------------------#
describe 'default options' do
it 'raises a Help exception, without error message' do
should_raise_help nil do
Fixture::Command.parse(%w(--help)).validate!
end
end
it 'sets the verbose flag' do
command = Fixture::Command.parse([])
command.should.not.be.verbose
command = Fixture::Command.parse(%w(--verbose))
command.should.be.verbose
end
it 'handles the version flag' do
command = Fixture::Command
command.version = '1.0'
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << text
end
command.run(%w(--version))
output = command.instance_variable_get(:@fixture_output)
output.should == '1.0'
end
it 'handles the version flag in conjunction with the verbose flag' do
path = 'path/to/gems/cocoapods-plugins/lib/cocoapods_plugin.rb'
Command::PluginsHelper.expects(:plugin_load_paths).returns([path])
Command::PluginsHelper.expects(:plugin_info).
returns('cocoapods_plugin: 1.0')
command = Fixture::Command
command.stubs(:load_plugins)
command.version = '1.0'
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << "#{text}\n"
end
command.run(%w(--version --verbose))
output = command.instance_variable_get(:@fixture_output)
output.should == "1.0\ncocoapods_plugin: 1.0\n"
end
it "doesn't include the version flag for non root commands" do
command = Fixture::Command::SpecFile
should.raise CLAide::Help do
command.parse(%w(--version)).validate!
end.message.should.include?('Unknown option: `--version`')
end
it 'handles the completion-script flag' do
command = Fixture::Command
command.instance_variable_set(:@fixture_output, '')
def command.puts(text)
@fixture_output << text
end
Command::ShellCompletionHelper.
expects(:completion_template).returns('script')
command.run(%w(--completion-script))
output = command.instance_variable_get(:@fixture_output)
output.should == 'script'
end
it "doesn't include the completion-script flag for non root commands" do
command = Fixture::Command::SpecFile
should.raise CLAide::Help do
command.parse(%w(--completion-script)).validate!
end.message.should.include?('Unknown option: `--completion-script`')
end
end
#-------------------------------------------------------------------------#
describe 'when running' do
before do
Fixture::Command.stubs(:puts)
Fixture::Command.stubs(:exit)
end
it 'does not print the backtrace of an InformativeError by default' do
::CLAide::ANSI.disabled = true
expected = Help.new(Fixture::Command.banner).message
Fixture::Command.expects(:puts).with(expected)
Fixture::Command.run(%w(--help))
end
it 'prints the backtrace of an InformativeError, if set to verbose' do
error = Fixture::Error.new
Fixture::Command.any_instance.stubs(:validate!).raises(error)
error.stubs(:message).returns('the message')
error.stubs(:backtrace).returns(%w(the backtrace))
printed = states('printed').starts_as(:nothing)
Fixture::Command.expects(:puts).with('the message').
when(printed.is(:nothing)).then(printed.is(:message))
Fixture::Command.expects(:puts).with('the', 'backtrace').
when(printed.is(:message)).then(printed.is(:done))
Fixture::Command.run(%w(--verbose))
end
it 'exits with a failure status when an InformativeError occurs' do
Fixture::Command.expects(:exit).with(1)
Fixture::Command.any_instance.stubs(:validate!).
raises(Fixture::Error.new)
Fixture::Command.run([])
end
it 'exits with a failure status when a Help exception occurs' do
Fixture::Command.expects(:exit).with(1)
Fixture::Command.run(%w(unknown))
end
it 'exits with a success status when an empty Help exception occurs' do
Fixture::Command.expects(:exit).with(0)
Fixture::Command.run(%w(--help))
end
end
#-------------------------------------------------------------------------#
describe 'default_subcommand' do
before do
@command_class = Fixture::Command::SpecFile.dup
@command_class.default_subcommand = 'lint'
end
it 'returns the default subcommand if specified' do
cmd = @command_class.parse([])
cmd.class.should == Fixture::Command::SpecFile::Lint
end
it "doesn't return a default subcommand if a command is given" do
cmd = @command_class.parse(%w(create))
cmd.class.should == Fixture::Command::SpecFile::Create
end
it "doesn't invoke a default subcommand by default" do
@command_class.default_subcommand = nil
cmd = @command_class.parse([])
cmd.class.should == @command_class
end
it 'invokes the default subcommand only if abstract' do
@command_class.abstract_command = false
cmd = @command_class.parse([])
cmd.class.should == @command_class
end
it 'raises if unable to find the default subcommand' do
command_class = Fixture::Command::SpecFile.dup
command_class.default_subcommand = 'find-me'
should.raise do
command_class.parse([])
end.message.should.match /Unable to find the default subcommand/
end
it 'shows the help of the parent if a command was invoked by default' do
cmd = @command_class.parse([])
cmd.class.superclass.expects(:help!)
cmd.send(:help!)
end
it "doesn't show the help of the parent by default" do
cmd = @command_class.parse(%w(create))
cmd.class.expects(:help!)
cmd.send(:help!)
end
end
end
end
|
require 'spec_helper'
describe Conceal do
let(:key) { SecureRandom.hex(128) }
it 'has a version number' do
expect(Conceal::VERSION).not_to be nil
end
it 'encrypt then decrypt returns the same original plaintext' do
expect(Conceal.decrypt(Conceal.encrypt('hello', key: key), key: key)).to eq('hello')
end
describe '#encrypt' do
it 'does not return the plaintext' do
expect(Conceal.encrypt('hello', key: key)).not_to eq('hello')
end
end
end
add a test to ensure a different iv/salt is used every time
require 'spec_helper'
describe Conceal do
let(:key) { SecureRandom.hex(128) }
it 'has a version number' do
expect(Conceal::VERSION).not_to be nil
end
it 'encrypt then decrypt returns the same original plaintext' do
expect(Conceal.decrypt(Conceal.encrypt('hello', key: key), key: key)).to eq('hello')
end
describe '#encrypt' do
it 'does not return the plaintext' do
expect(Conceal.encrypt('hello', key: key)).not_to eq('hello')
end
it 'outputs different values each time (different iv/salt)' do
first = Conceal.encrypt('hello', key: key)
second = Conceal.encrypt('hello', key: key)
expect(first).not_to eq(second)
end
end
end
|
require 'ethos/entity'
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer
end
end
spec do
entity = Entity.new
asserts(entity.value) == nil
end
spec do
entity = Entity.new value: 1
asserts(entity.value) == 1
end
spec do
entity = Entity.new value: '1'
asserts(entity.value) == 1
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer, default: 1
end
end
spec do
entity = Entity.new
asserts(entity.value) == 1
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer, default: '1'
end
end
spec do
entity = Entity.new
asserts(entity.value) == 1
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :name, String
attribute :parent, Entity
end
end
scope do
define entity: -> { Entity.new }
spec do
asserts(entity.parent) == nil
end
end
scope do
define entity: -> { Entity.new name: 'Child', parent: {name: 'Parent'} }
spec do
asserts(entity.parent).is_a? Entity
end
spec do
asserts(entity.parent.name) == 'Parent'
end
end
end
Allow default attributes to be overridden
require 'ethos/entity'
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer
end
end
spec do
entity = Entity.new
asserts(entity.value) == nil
end
spec do
entity = Entity.new value: 1
asserts(entity.value) == 1
end
spec do
entity = Entity.new value: '1'
asserts(entity.value) == 1
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer, default: 1
end
end
spec do
entity = Entity.new
asserts(entity.value) == 1
end
spec do
entity = Entity.new value: 2
asserts(entity.value) == 2
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :value, Integer, default: '1'
end
end
spec do
entity = Entity.new
asserts(entity.value) == 1
end
end
scope do
setup do
class Entity
prepend Ethos::Entity
attribute :name, String
attribute :parent, Entity
end
end
scope do
define entity: -> { Entity.new }
spec do
asserts(entity.parent) == nil
end
end
scope do
define entity: -> { Entity.new name: 'Child', parent: {name: 'Parent'} }
spec do
asserts(entity.parent).is_a? Entity
end
spec do
asserts(entity.parent.name) == 'Parent'
end
end
end
|
RSpec.describe "calling a deprecated method" do
example { 123 }
end
remove unneeded spec file
|
class ShadowsocksLibev < Formula
desc "Libev port of shadowsocks"
homepage "https://github.com/shadowsocks/shadowsocks-libev"
url "https://github.com/shadowsocks/shadowsocks-libev/archive/v2.2.3.tar.gz"
sha256 "41a4746190698fc78b8e06c0f0e3638ca0bb1180a8cb77246736d88553b277f7"
bottle do
cellar :any
sha256 "04218b8b59658022859e9f0cbad23a3239bc220af33ce129a80b710217c3b827" => :yosemite
sha256 "e957a1465f01c7e87b84043921c46ca7017fb092e6df3b054897b9d88f4db47f" => :mavericks
sha256 "e6e49efa0d30888891cb79513a0873953df24c899f5461f7369cb31ed81f666b" => :mountain_lion
end
head "https://github.com/shadowsocks/shadowsocks-libev.git"
option "with-polarssl", "Use PolarSSL instead of OpenSSL"
depends_on "polarssl" => :optional
depends_on "openssl" if build.without? "polarssl"
def install
args = ["--prefix=#{prefix}"]
if build.with? "polarssl"
polarssl = Formula["polarssl"]
args << "--with-crypto-library=polarssl"
args << "--with-polarssl=#{polarssl.opt_prefix}"
end
system "./configure", *args
system "make"
bin.install "src/ss-local"
bin.install "src/ss-tunnel"
(buildpath/"shadowsocks-libev.json").write <<-EOS.undent
{
"server":"localhost",
"server_port":8388,
"local_port":1080,
"password":"barfoo!",
"timeout":600,
"method":null
}
EOS
etc.install "shadowsocks-libev.json"
inreplace "shadowsocks-libev.8", "/etc/shadowsocks-libev/config.json", "#{etc}/shadowsocks-libev.json"
man8.install "shadowsocks-libev.8"
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/shadowsocks-libev/bin/ss-local -c #{HOMEBREW_PREFIX}/etc/shadowsocks-libev.json"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/ss-local</string>
<string>-c</string>
<string>#{etc}/shadowsocks-libev.json</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
</dict>
</plist>
EOS
end
end
shadowsocks-libev: update 2.2.3 bottle.
class ShadowsocksLibev < Formula
desc "Libev port of shadowsocks"
homepage "https://github.com/shadowsocks/shadowsocks-libev"
url "https://github.com/shadowsocks/shadowsocks-libev/archive/v2.2.3.tar.gz"
sha256 "41a4746190698fc78b8e06c0f0e3638ca0bb1180a8cb77246736d88553b277f7"
bottle do
cellar :any
sha256 "0b3ea442d878f88e6efcfd9ab20394f5d1fca4c459b103c441c1bdd0d5421a14" => :yosemite
sha256 "a7071683859c42ceee535ae273a04a5c1494780784f47970e680987df7d7d8c7" => :mavericks
sha256 "80619c40934b9dcc1e799f4240d3e05a93b4128f27c3cf052355c7bccec93f58" => :mountain_lion
end
head "https://github.com/shadowsocks/shadowsocks-libev.git"
option "with-polarssl", "Use PolarSSL instead of OpenSSL"
depends_on "polarssl" => :optional
depends_on "openssl" if build.without? "polarssl"
def install
args = ["--prefix=#{prefix}"]
if build.with? "polarssl"
polarssl = Formula["polarssl"]
args << "--with-crypto-library=polarssl"
args << "--with-polarssl=#{polarssl.opt_prefix}"
end
system "./configure", *args
system "make"
bin.install "src/ss-local"
bin.install "src/ss-tunnel"
(buildpath/"shadowsocks-libev.json").write <<-EOS.undent
{
"server":"localhost",
"server_port":8388,
"local_port":1080,
"password":"barfoo!",
"timeout":600,
"method":null
}
EOS
etc.install "shadowsocks-libev.json"
inreplace "shadowsocks-libev.8", "/etc/shadowsocks-libev/config.json", "#{etc}/shadowsocks-libev.json"
man8.install "shadowsocks-libev.8"
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/shadowsocks-libev/bin/ss-local -c #{HOMEBREW_PREFIX}/etc/shadowsocks-libev.json"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/ss-local</string>
<string>-c</string>
<string>#{etc}/shadowsocks-libev.json</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false/>
</dict>
</dict>
</plist>
EOS
end
end
|
# frozen_string_literal: true
require "cask/cask"
require "uri"
module Cask
module CaskLoader
class FromContentLoader
attr_reader :content
def self.can_load?(ref)
return false unless ref.respond_to?(:to_str)
content = ref.to_str
token = /(?:"[^"]*"|'[^']*')/
curly = /\(\s*#{token}\s*\)\s*\{.*\}/
do_end = /\s+#{token}\s+do(?:\s*;\s*|\s+).*end/
regex = /\A\s*cask(?:#{curly.source}|#{do_end.source})\s*\Z/m
content.match?(regex)
end
def initialize(content)
@content = content.force_encoding("UTF-8")
end
def load
instance_eval(content, __FILE__, __LINE__)
end
private
def cask(header_token, **options, &block)
Cask.new(header_token, **options, &block)
end
end
class FromPathLoader < FromContentLoader
def self.can_load?(ref)
path = Pathname(ref)
path.extname == ".rb" && path.expand_path.exist?
end
attr_reader :token, :path
def initialize(path)
path = Pathname(path).expand_path
@token = path.basename(".rb").to_s
@path = path
end
def load
raise CaskUnavailableError.new(token, "'#{path}' does not exist.") unless path.exist?
raise CaskUnavailableError.new(token, "'#{path}' is not readable.") unless path.readable?
raise CaskUnavailableError.new(token, "'#{path}' is not a file.") unless path.file?
@content = IO.read(path)
begin
instance_eval(content, path).tap do |cask|
raise CaskUnreadableError.new(token, "'#{path}' does not contain a cask.") unless cask.is_a?(Cask)
end
rescue NameError, ArgumentError, ScriptError => e
raise CaskUnreadableError.new(token, e.message)
end
end
private
def cask(header_token, **options, &block)
raise CaskTokenMismatchError.new(token, header_token) if token != header_token
super(header_token, **options, sourcefile_path: path, &block)
end
end
class FromURILoader < FromPathLoader
def self.can_load?(ref)
uri_regex = ::URI::DEFAULT_PARSER.make_regexp
ref.to_s.match?(Regexp.new('\A' + uri_regex.source + '\Z', uri_regex.options))
end
attr_reader :url
def initialize(url)
@url = URI(url)
super Cache.path/File.basename(@url.path)
end
def load
path.dirname.mkpath
begin
ohai "Downloading #{url}."
curl_download url, to: path
rescue ErrorDuringExecution
raise CaskUnavailableError.new(token, "Failed to download #{Formatter.url(url)}.")
end
super
end
end
class FromTapPathLoader < FromPathLoader
def self.can_load?(ref)
super && !Tap.from_path(ref).nil?
end
attr_reader :tap
def initialize(path)
@tap = Tap.from_path(path)
super(path)
end
private
def cask(*args, &block)
super(*args, tap: tap, &block)
end
end
class FromTapLoader < FromTapPathLoader
def self.can_load?(ref)
ref.to_s.match?(HOMEBREW_TAP_CASK_REGEX)
end
def initialize(tapped_name)
user, repo, token = tapped_name.split("/", 3)
super Tap.fetch(user, repo).cask_dir/"#{token}.rb"
end
def load
tap.install unless tap.installed?
super
end
end
class FromInstanceLoader
attr_reader :cask
def self.can_load?(ref)
ref.is_a?(Cask)
end
def initialize(cask)
@cask = cask
end
def load
cask
end
end
class NullLoader < FromPathLoader
def self.can_load?(*)
true
end
def initialize(ref)
token = File.basename(ref, ".rb")
super CaskLoader.default_path(token)
end
def load
raise CaskUnavailableError.new(token, "No Cask with this name exists.")
end
end
def self.path(ref)
self.for(ref).path
end
def self.load(ref)
self.for(ref).load
end
def self.for(ref)
[
FromInstanceLoader,
FromContentLoader,
FromURILoader,
FromTapLoader,
FromTapPathLoader,
FromPathLoader,
].each do |loader_class|
return loader_class.new(ref) if loader_class.can_load?(ref)
end
return FromTapPathLoader.new(default_path(ref)) if FromTapPathLoader.can_load?(default_path(ref))
case (possible_tap_casks = tap_paths(ref)).count
when 1
return FromTapPathLoader.new(possible_tap_casks.first)
when 2..Float::INFINITY
loaders = possible_tap_casks.map(&FromTapPathLoader.method(:new))
raise CaskError, <<~EOS
Cask #{ref} exists in multiple taps:
#{loaders.map { |loader| " #{loader.tap}/#{loader.token}" }.join("\n")}
EOS
end
possible_installed_cask = Cask.new(ref)
return FromPathLoader.new(possible_installed_cask.installed_caskfile) if possible_installed_cask.installed?
NullLoader.new(ref)
end
def self.default_path(token)
Tap.default_cask_tap.cask_dir/"#{token.to_s.downcase}.rb"
end
def self.tap_paths(token)
Tap.map { |t| t.cask_dir/"#{token.to_s.downcase}.rb" }
.select(&:exist?)
end
end
end
cask_loader: fix URI detection.
Fixes #6678
# frozen_string_literal: true
require "cask/cask"
require "uri"
module Cask
module CaskLoader
class FromContentLoader
attr_reader :content
def self.can_load?(ref)
return false unless ref.respond_to?(:to_str)
content = ref.to_str
token = /(?:"[^"]*"|'[^']*')/
curly = /\(\s*#{token}\s*\)\s*\{.*\}/
do_end = /\s+#{token}\s+do(?:\s*;\s*|\s+).*end/
regex = /\A\s*cask(?:#{curly.source}|#{do_end.source})\s*\Z/m
content.match?(regex)
end
def initialize(content)
@content = content.force_encoding("UTF-8")
end
def load
instance_eval(content, __FILE__, __LINE__)
end
private
def cask(header_token, **options, &block)
Cask.new(header_token, **options, &block)
end
end
class FromPathLoader < FromContentLoader
def self.can_load?(ref)
path = Pathname(ref)
path.extname == ".rb" && path.expand_path.exist?
end
attr_reader :token, :path
def initialize(path)
path = Pathname(path).expand_path
@token = path.basename(".rb").to_s
@path = path
end
def load
raise CaskUnavailableError.new(token, "'#{path}' does not exist.") unless path.exist?
raise CaskUnavailableError.new(token, "'#{path}' is not readable.") unless path.readable?
raise CaskUnavailableError.new(token, "'#{path}' is not a file.") unless path.file?
@content = IO.read(path)
begin
instance_eval(content, path).tap do |cask|
raise CaskUnreadableError.new(token, "'#{path}' does not contain a cask.") unless cask.is_a?(Cask)
end
rescue NameError, ArgumentError, ScriptError => e
raise CaskUnreadableError.new(token, e.message)
end
end
private
def cask(header_token, **options, &block)
raise CaskTokenMismatchError.new(token, header_token) if token != header_token
super(header_token, **options, sourcefile_path: path, &block)
end
end
class FromURILoader < FromPathLoader
def self.can_load?(ref)
uri_regex = ::URI::DEFAULT_PARSER.make_regexp
return false unless ref.to_s.match?(Regexp.new('\A' + uri_regex.source + '\Z', uri_regex.options))
uri = URI(ref)
return false unless uri
return false unless uri.path
true
end
attr_reader :url
def initialize(url)
@url = URI(url)
super Cache.path/File.basename(@url.path)
end
def load
path.dirname.mkpath
begin
ohai "Downloading #{url}."
curl_download url, to: path
rescue ErrorDuringExecution
raise CaskUnavailableError.new(token, "Failed to download #{Formatter.url(url)}.")
end
super
end
end
class FromTapPathLoader < FromPathLoader
def self.can_load?(ref)
super && !Tap.from_path(ref).nil?
end
attr_reader :tap
def initialize(path)
@tap = Tap.from_path(path)
super(path)
end
private
def cask(*args, &block)
super(*args, tap: tap, &block)
end
end
class FromTapLoader < FromTapPathLoader
def self.can_load?(ref)
ref.to_s.match?(HOMEBREW_TAP_CASK_REGEX)
end
def initialize(tapped_name)
user, repo, token = tapped_name.split("/", 3)
super Tap.fetch(user, repo).cask_dir/"#{token}.rb"
end
def load
tap.install unless tap.installed?
super
end
end
class FromInstanceLoader
attr_reader :cask
def self.can_load?(ref)
ref.is_a?(Cask)
end
def initialize(cask)
@cask = cask
end
def load
cask
end
end
class NullLoader < FromPathLoader
def self.can_load?(*)
true
end
def initialize(ref)
token = File.basename(ref, ".rb")
super CaskLoader.default_path(token)
end
def load
raise CaskUnavailableError.new(token, "No Cask with this name exists.")
end
end
def self.path(ref)
self.for(ref).path
end
def self.load(ref)
self.for(ref).load
end
def self.for(ref)
[
FromInstanceLoader,
FromContentLoader,
FromURILoader,
FromTapLoader,
FromTapPathLoader,
FromPathLoader,
].each do |loader_class|
return loader_class.new(ref) if loader_class.can_load?(ref)
end
return FromTapPathLoader.new(default_path(ref)) if FromTapPathLoader.can_load?(default_path(ref))
case (possible_tap_casks = tap_paths(ref)).count
when 1
return FromTapPathLoader.new(possible_tap_casks.first)
when 2..Float::INFINITY
loaders = possible_tap_casks.map(&FromTapPathLoader.method(:new))
raise CaskError, <<~EOS
Cask #{ref} exists in multiple taps:
#{loaders.map { |loader| " #{loader.tap}/#{loader.token}" }.join("\n")}
EOS
end
possible_installed_cask = Cask.new(ref)
return FromPathLoader.new(possible_installed_cask.installed_caskfile) if possible_installed_cask.installed?
NullLoader.new(ref)
end
def self.default_path(token)
Tap.default_cask_tap.cask_dir/"#{token.to_s.downcase}.rb"
end
def self.tap_paths(token)
Tap.map { |t| t.cask_dir/"#{token.to_s.downcase}.rb" }
.select(&:exist?)
end
end
end
|
require 'spec_helper'
RSpec.describe Aws::Xray::Faraday do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| [200, {}, env.request_headers['X-Amzn-Trace-Id']] }
end
end
let(:client) do
Faraday.new(headers: headers) do |builder|
builder.use Aws::Xray::Faraday
builder.adapter :test, stubs
end
end
let(:headers) { { 'Host' => 'target-app' } }
let(:xray_client) { Aws::Xray::Client.new(sock: io) }
let(:io) { Aws::Xray::TestSocket.new }
let(:trace) { Aws::Xray::Trace.new(root: '1-67891233-abcdef012345678912345678') }
context 'without name option' do
it 'uses host header value' do
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(200)
expect(res.headers).to eq({})
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
header_json, body_json = sent_jsons[0..1]
_, parent_body_json = sent_jsons[2..3]
expect(JSON.parse(header_json)).to eq("format" => "json", "version" => 1)
body = JSON.parse(body_json)
parent_body = JSON.parse(parent_body_json)
expect(body['name']).to eq('target-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body['parent_id']).to eq(parent_body['id'])
expect(body['type']).to eq('subsegment')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(Float(body['start_time'])).not_to eq(0)
expect(Float(body['end_time'])).not_to eq(0)
request_part = body['http']['request']
expect(request_part['method']).to eq('GET')
expect(request_part['url']).to eq('http:/foo')
expect(request_part['user_agent']).to match(/Faraday/)
expect(request_part['client_ip']).to be_nil
expect(request_part).not_to have_key('x_forwarded_for')
expect(request_part['traced']).to eq(false)
expect(body['http']['response']['status']).to eq(200)
expect(body['http']['response']['content_length']).to be_nil
expect(res.body).to eq("Root=1-67891233-abcdef012345678912345678;Sampled=1;Parent=#{body['id']}")
end
end
context 'when name option is given via builder' do
it 'sets given name to trace name' do
client = Faraday.new do |builder|
builder.use Aws::Xray::Faraday, 'another-name'
builder.adapter :test, stubs
end
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(200)
expect(res.headers).to eq({})
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
_, body_json = sent_jsons[0..1]
body = JSON.parse(body_json)
expect(body['name']).to eq('another-name')
end
end
context 'when down-stream returns error' do
context '5xx' do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| [500, {}, 'fault'] }
end
end
it 'traces remote fault' do
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(500)
io.rewind
sent_jsons = io.read.split("\n")
_, body_json = sent_jsons[0..1]
body = JSON.parse(body_json)
expect(body['name']).to eq('target-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body['type']).to eq('subsegment')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(body['error']).to eq(false)
expect(body['throttle']).to eq(false)
expect(body['fault']).to eq(true)
e = body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('Got 5xx')
expect(e['remote']).to eq(true)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
end
end
context '499' do
# TODO
end
context '4xx' do
# TODO
end
end
context 'when API call raises an error' do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| raise('test_error') }
end
end
it 'traces remote fault' do
expect {
Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
}.to raise_error('test_error')
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
sub_body = JSON.parse(sent_jsons[1])
expect(sub_body['name']).to eq('target-app')
expect(sub_body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(sub_body['type']).to eq('subsegment')
expect(sub_body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(sub_body['error']).to eq(false)
expect(sub_body['throttle']).to eq(false)
expect(sub_body['fault']).to eq(true)
e = sub_body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('test_error')
expect(e['type']).to eq('RuntimeError')
expect(e['remote']).to eq(false)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
body = JSON.parse(sent_jsons[3])
expect(body['name']).to eq('test-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body).not_to have_key('type')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(body['error']).to eq(false)
expect(body['throttle']).to eq(false)
expect(body['fault']).to eq(true)
e = body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('test_error')
expect(e['type']).to eq('RuntimeError')
expect(e['remote']).to eq(false)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
end
end
context 'when tracing has not been started' do
it 'does not raise any errors' do
response = nil
expect { response = client.get('/foo') }.not_to raise_error
expect(response.status).to eq(200)
end
end
end
Add test to check name parameter of Faraday class
require 'spec_helper'
RSpec.describe Aws::Xray::Faraday do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| [200, {}, env.request_headers['X-Amzn-Trace-Id']] }
end
end
let(:client) do
Faraday.new(headers: headers) do |builder|
builder.use Aws::Xray::Faraday
builder.adapter :test, stubs
end
end
let(:headers) { { 'Host' => 'target-app' } }
let(:xray_client) { Aws::Xray::Client.new(sock: io) }
let(:io) { Aws::Xray::TestSocket.new }
let(:trace) { Aws::Xray::Trace.new(root: '1-67891233-abcdef012345678912345678') }
context 'without name option' do
it 'uses host header value' do
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(200)
expect(res.headers).to eq({})
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
header_json, body_json = sent_jsons[0..1]
_, parent_body_json = sent_jsons[2..3]
expect(JSON.parse(header_json)).to eq("format" => "json", "version" => 1)
body = JSON.parse(body_json)
parent_body = JSON.parse(parent_body_json)
expect(body['name']).to eq('target-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body['parent_id']).to eq(parent_body['id'])
expect(body['type']).to eq('subsegment')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(Float(body['start_time'])).not_to eq(0)
expect(Float(body['end_time'])).not_to eq(0)
request_part = body['http']['request']
expect(request_part['method']).to eq('GET')
expect(request_part['url']).to eq('http:/foo')
expect(request_part['user_agent']).to match(/Faraday/)
expect(request_part['client_ip']).to be_nil
expect(request_part).not_to have_key('x_forwarded_for')
expect(request_part['traced']).to eq(false)
expect(body['http']['response']['status']).to eq(200)
expect(body['http']['response']['content_length']).to be_nil
expect(res.body).to eq("Root=1-67891233-abcdef012345678912345678;Sampled=1;Parent=#{body['id']}")
end
end
context 'when name option is given via builder' do
it 'sets given name to trace name' do
client = Faraday.new do |builder|
builder.use Aws::Xray::Faraday, 'another-name'
builder.adapter :test, stubs
end
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(200)
expect(res.headers).to eq({})
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
_, body_json = sent_jsons[0..1]
body = JSON.parse(body_json)
expect(body['name']).to eq('another-name')
end
end
context 'when down-stream returns error' do
context '5xx' do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| [500, {}, 'fault'] }
end
end
it 'traces remote fault' do
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(500)
io.rewind
sent_jsons = io.read.split("\n")
_, body_json = sent_jsons[0..1]
body = JSON.parse(body_json)
expect(body['name']).to eq('target-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body['type']).to eq('subsegment')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(body['error']).to eq(false)
expect(body['throttle']).to eq(false)
expect(body['fault']).to eq(true)
e = body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('Got 5xx')
expect(e['remote']).to eq(true)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
end
end
context '499' do
# TODO
end
context '4xx' do
# TODO
end
end
context 'when API call raises an error' do
let(:stubs) do
Faraday::Adapter::Test::Stubs.new do |stub|
stub.get('/foo') { |env| raise('test_error') }
end
end
it 'traces remote fault' do
expect {
Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
}.to raise_error('test_error')
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
sub_body = JSON.parse(sent_jsons[1])
expect(sub_body['name']).to eq('target-app')
expect(sub_body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(sub_body['type']).to eq('subsegment')
expect(sub_body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(sub_body['error']).to eq(false)
expect(sub_body['throttle']).to eq(false)
expect(sub_body['fault']).to eq(true)
e = sub_body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('test_error')
expect(e['type']).to eq('RuntimeError')
expect(e['remote']).to eq(false)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
body = JSON.parse(sent_jsons[3])
expect(body['name']).to eq('test-app')
expect(body['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(body).not_to have_key('type')
expect(body['trace_id']).to eq('1-67891233-abcdef012345678912345678')
expect(body['error']).to eq(false)
expect(body['throttle']).to eq(false)
expect(body['fault']).to eq(true)
e = body['cause']['exceptions'].first
expect(e['id']).to match(/\A[0-9a-fA-F]{16}\z/)
expect(e['message']).to eq('test_error')
expect(e['type']).to eq('RuntimeError')
expect(e['remote']).to eq(false)
expect(e['stack'].size).to eq(10)
expect(e['stack'].first['path']).to end_with('.rb')
end
end
context 'without Host header' do
let(:client) do
Faraday.new do |builder|
builder.use Aws::Xray::Faraday, 'another-app'
builder.adapter :test, stubs
end
end
it 'accepts name parameter' do
res = Aws::Xray::Context.with_new_context('test-app', xray_client, trace) do
Aws::Xray::Context.current.base_trace do
client.get('/foo')
end
end
expect(res.status).to eq(200)
io.rewind
sent_jsons = io.read.split("\n")
expect(sent_jsons.size).to eq(4)
body = JSON.parse(sent_jsons[1])
expect(body['name']).to eq('another-app')
end
end
context 'when tracing has not been started' do
it 'does not raise any errors' do
response = nil
expect { response = client.get('/foo') }.not_to raise_error
expect(response.status).to eq(200)
end
end
end
|
require 'spec_helper'
describe Gemlock do
describe ".locked_gemfile_specs" do
it "outputs the list of gems & version requirements" do
Gemlock.stubs(:lockfile).returns((File.join(File.dirname(__FILE__), 'fixtures', 'Gemfile.lock')))
specs = Gemlock.locked_gemfile_specs
expected = [["coffee-rails", "3.1.0"], ["jquery-rails", "1.0.16"],
["json", "1.5.0"], ["rails", "3.1.0"],
["ruby-debug", "0.10.4"], ["sass-rails", "3.1.0"],
["sqlite3", "1.3.4"], ["uglifier", "1.0.4"],
["unicorn", "3.1.0"]]
specs.should match_name_and_versions_of expected
end
end
describe ".lockfile" do
it "loads Gemfile.lock from the Rails root if Rails is defined" do
module Rails
def self.root
Pathname.new(File.expand_path('fixtures', File.dirname(__FILE__)))
end
end
expected_path = Pathname.new(File.expand_path(File.join('fixtures', 'Gemfile.lock'),
File.dirname(__FILE__)))
Gemlock.lockfile.should eql expected_path
# Undefine Rails module
Object.send(:remove_const, :Rails)
end
it "loads Gemfile.lock from the default Bundler location if Rails is not defined" do
expected_path = Pathname.new(File.expand_path(File.join('spec', 'fixtures', 'Gemfile.lock')))
Gemlock.lockfile.should eql expected_path
end
end
describe ".lookup_version" do
use_vcr_cassette
it "looks up and return the latest version of a given gem" do
version = Gemlock.lookup_version("rails")
version.should eql "3.1.1"
end
end
describe ".check_gems_individually" do
use_vcr_cassette
it "returns a hash of outdated gems & versions specificed in config" do
Gemlock.stubs(:config).returns((File.join(File.dirname(__FILE__), 'fixtures', 'gemlock.yml')))
gems = Gemlock.locked_gemfile_specs.inject({}) do |hash, spec|
hash[spec.name] = spec.version.to_s
hash
end
expected = {'coffee-rails' => { :current => '3.1.0',
:latest => '3.1.1' },
'sass-rails' => { :current => '3.1.0',
:latest => '3.1.4' },
'unicorn' => { :current => '3.1.0',
:latest => '4.1.1' },
'rails' => { :current => '3.1.0',
:latest => '3.1.1'} }
Gemlock.check_gems_individually(gems).should eql expected
end
end
describe ".outdated" do
use_vcr_cassette
before do
Gemlock.stubs(:lockfile).returns((File.join(File.dirname(__FILE__), 'fixtures', 'Gemfile.lock')))
end
let(:in_spec) { {"coffee-rails" => "3.1.0",
"jquery-rails" => "1.0.16",
"json" => "1.5.0",
"rails" => "3.1.0",
"ruby-debug" => "0.10.4",
"sass-rails" => "3.1.0",
"sqlite3" => "1.3.4",
"uglifier" => "1.0.4",
"unicorn" => "3.1.0"} }
it "returns a hash of all outdated gems" do
expected = {'coffee-rails' => { :current => '3.1.0',
:latest => '3.1.1' },
'sass-rails' => { :current => '3.1.0',
:latest => '3.1.4' },
'unicorn' => { :current => '3.1.0',
:latest => '4.1.1' },
'json' => { :current => '1.5.0',
:latest => '1.6.1' },
'rails' => { :current => '3.1.0',
:latest => '3.1.1'} }
Gemlock.outdated.should eql expected
end
it "checks for each gem individually if the bulk check fails" do
RestClient.expects(:get).with("http://gemlock.herokuapp.com/ruby_gems/updates.json",
{:params => {:gems => in_spec.to_json}}).raises(RestClient::GatewayTimeout)
Gemlock.expects(:check_gems_individually).with(in_spec)
Gemlock.outdated
end
it "sets an flag if it is an automatic check" do
RestClient.expects(:get).with("http://gemlock.herokuapp.com/ruby_gems/updates.json",
{:params => {:gems => in_spec.to_json,
:automatic => true}}).returns('{}')
Gemlock.outdated(true)
end
end
describe ".config" do
it "loads gemlock.yml from the config directory if Rails is defined" do
module Rails
def self.root
Pathname.new(File.dirname(__FILE__))
end
end
expected_path = Pathname.new(File.dirname(__FILE__)).join('config', 'gemlock.yml')
File.stubs(:exists?).with(expected_path).returns(true)
Gemlock.config.should eql expected_path
# Undefine Rails module
Object.send(:remove_const, :Rails)
end
it "is nil if Rails is defined and the files does not exist" do
module Rails
def self.root
Pathname.new(File.dirname(__FILE__))
end
end
Gemlock.parsed_config.should be_nil
Object.send(:remove_const, :Rails)
end
it "is nil if Rails is not defined and the file exists" do
Gemlock.config.should be_nil
end
end
describe ".parsed_config" do
it "returns nil if the config file is not present" do
Gemlock.parsed_config.should be_nil
end
it "returns a hash containing the user's email if config file is present" do
Gemlock.stubs(:config).returns((File.join(File.dirname(__FILE__), 'fixtures', 'gemlock.yml')))
Gemlock.parsed_config["email"].should eql "tester@example.com"
end
end
describe ".email" do
it "returns the email in the config if present and valid" do
Gemlock.stubs(:parsed_config).returns({'email' => 'hi@mikeskalnik.com'})
Gemlock.email.should eql 'hi@mikeskalnik.com'
end
it "returns nil if the email in the config is invalid" do
Gemlock.stubs(:parsed_config).returns({'email' => 'd@er@p.com'})
Gemlock.email.should be_nil
end
it "returns nil if there is no config" do
Gemlock.stubs(:parsed_config).returns(nil)
Gemlock.email.should be_nil
end
end
describe ".difference" do
it "returns 'major' if there is a major version difference between the two gem versions" do
Gemlock.difference("2.0.0", "3.0.0").should eql "major"
Gemlock.difference("2.5.10", "3.1.0").should eql "major"
Gemlock.difference("3.1.10", "2.5.8").should eql "major"
Gemlock.difference("3.0", "2.0" ).should eql "major"
end
it "returns 'minor' if there is a minor version difference between the two gem versions" do
Gemlock.difference("3.0.0", "3.1.0").should eql "minor"
Gemlock.difference("3.1.0", "3.2.1").should eql "minor"
Gemlock.difference("3.1.0", "3.0.0").should eql "minor"
end
it "returns 'patch' if there is a patch version difference between the two gem versions" do
Gemlock.difference("3.1.0", "3.1.1").should eql "patch"
Gemlock.difference("0.0.2", "0.0.1").should eql "patch"
end
it "returns 'none' if there is no difference" do
Gemlock.difference("0.0.0", "0.0.0").should eql "none"
Gemlock.difference("0.0.1", "0.0.1").should eql "none"
Gemlock.difference("0.1.0", "0.1.0").should eql "none"
Gemlock.difference("1.0.0", "1.0.0").should eql "none"
end
end
describe '.initializer' do
it "makes a thread" do
Gemlock.stubs(:outdated).returns([])
capture_stdout do
@thread = Gemlock.initializer
@thread.class.should eql Thread
@thread.kill
end
end
it "checks for updates" do
Gemlock.expects(:outdated).returns([])
capture_stdout do
@thread = Gemlock.initializer
while @thread.status != 'sleep' do
sleep 0.5
end
@thread.kill
end
end
end
describe ".process_version" do
it "splits a version string into an array" do
Gemlock.send(:process_version, "3.0.0").class.should eql Array
end
it "appends missing zeros to the end of a version if not given" do
Gemlock.send(:process_version, "3").should eql [3, 0, 0]
Gemlock.send(:process_version, "3.0").should eql [3, 0, 0]
end
end
describe ".update_interval" do
it "returns the number of seconds in a week if config_file is not present, or interval is not specified" do
Gemlock.update_interval.should eql 60*60*24*7
Gemlock.stubs(:parsed_config).returns({"email"=>"tester@example.com"})
Gemlock.update_interval.should eql 60*60*24*7
end
it "returns the number of seconds until the next number of hours as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["8 hours"]})
Gemlock.update_interval.should eql 60*60*8
end
it "returns the number of seconds until the next number of days as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["4 days"]})
Gemlock.update_interval.should eql 60*60*24*4
end
it "returns the number of seconds until the next number of weeks as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["2 weeks"]})
Gemlock.update_interval.should eql 60*60*24*7*2
end
it "returns the number of seconds unil the next number of months as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["3 months"]})
Gemlock.update_interval.should eql 60*60*24*30*3
end
end
def capture_stdout
io = StringIO.new
$stdout = io
yield
return io
ensure
$stdout = STDOUT
end
end
[#18078889] Add test for email param being sent in Gemlock.outdated
require 'spec_helper'
describe Gemlock do
describe ".locked_gemfile_specs" do
it "outputs the list of gems & version requirements" do
Gemlock.stubs(:lockfile).returns((File.join(File.dirname(__FILE__), 'fixtures', 'Gemfile.lock')))
specs = Gemlock.locked_gemfile_specs
expected = [["coffee-rails", "3.1.0"], ["jquery-rails", "1.0.16"],
["json", "1.5.0"], ["rails", "3.1.0"],
["ruby-debug", "0.10.4"], ["sass-rails", "3.1.0"],
["sqlite3", "1.3.4"], ["uglifier", "1.0.4"],
["unicorn", "3.1.0"]]
specs.should match_name_and_versions_of expected
end
end
describe ".lockfile" do
it "loads Gemfile.lock from the Rails root if Rails is defined" do
module Rails
def self.root
Pathname.new(File.expand_path('fixtures', File.dirname(__FILE__)))
end
end
expected_path = Pathname.new(File.expand_path(File.join('fixtures', 'Gemfile.lock'),
File.dirname(__FILE__)))
Gemlock.lockfile.should eql expected_path
# Undefine Rails module
Object.send(:remove_const, :Rails)
end
it "loads Gemfile.lock from the default Bundler location if Rails is not defined" do
expected_path = Pathname.new(File.expand_path(File.join('spec', 'fixtures', 'Gemfile.lock')))
Gemlock.lockfile.should eql expected_path
end
end
describe ".lookup_version" do
use_vcr_cassette
it "looks up and return the latest version of a given gem" do
version = Gemlock.lookup_version("rails")
version.should eql "3.1.1"
end
end
describe ".check_gems_individually" do
use_vcr_cassette
it "returns a hash of outdated gems & versions specificed in config" do
Gemlock.stubs(:config).returns((File.join(File.dirname(__FILE__), 'fixtures', 'gemlock.yml')))
gems = Gemlock.locked_gemfile_specs.inject({}) do |hash, spec|
hash[spec.name] = spec.version.to_s
hash
end
expected = {'coffee-rails' => { :current => '3.1.0',
:latest => '3.1.1' },
'sass-rails' => { :current => '3.1.0',
:latest => '3.1.4' },
'unicorn' => { :current => '3.1.0',
:latest => '4.1.1' },
'rails' => { :current => '3.1.0',
:latest => '3.1.1'} }
Gemlock.check_gems_individually(gems).should eql expected
end
end
describe ".outdated" do
use_vcr_cassette
before do
Gemlock.stubs(:lockfile).returns((File.join(File.dirname(__FILE__), 'fixtures', 'Gemfile.lock')))
end
let(:in_spec) { {"coffee-rails" => "3.1.0",
"jquery-rails" => "1.0.16",
"json" => "1.5.0",
"rails" => "3.1.0",
"ruby-debug" => "0.10.4",
"sass-rails" => "3.1.0",
"sqlite3" => "1.3.4",
"uglifier" => "1.0.4",
"unicorn" => "3.1.0"} }
it "returns a hash of all outdated gems" do
expected = {'coffee-rails' => { :current => '3.1.0',
:latest => '3.1.1' },
'sass-rails' => { :current => '3.1.0',
:latest => '3.1.4' },
'unicorn' => { :current => '3.1.0',
:latest => '4.1.1' },
'json' => { :current => '1.5.0',
:latest => '1.6.1' },
'rails' => { :current => '3.1.0',
:latest => '3.1.1'} }
Gemlock.outdated.should eql expected
end
it "checks for each gem individually if the bulk check fails" do
RestClient.expects(:get).with("http://gemlock.herokuapp.com/ruby_gems/updates.json",
{:params => {:gems => in_spec.to_json}}).raises(RestClient::GatewayTimeout)
Gemlock.expects(:check_gems_individually).with(in_spec)
Gemlock.outdated
end
it "sets an flag if it is an automatic check" do
RestClient.expects(:get).with("http://gemlock.herokuapp.com/ruby_gems/updates.json",
{:params => {:gems => in_spec.to_json,
:automatic => true}}).returns('{}')
Gemlock.outdated(true)
end
it "sends the email address in config to the server if present" do
Gemlock.stubs(:parsed_config).returns({'email' => 'hi@mikeskalnik.com'})
RestClient.expects(:get).with("http://gemlock.herokuapp.com/ruby_gems/updates.json",
{:params => {:gems => in_spec.to_json,
:email => 'hi@mikeskalnik.com'}}).returns('{}')
Gemlock.outdated
end
end
describe ".config" do
it "loads gemlock.yml from the config directory if Rails is defined" do
module Rails
def self.root
Pathname.new(File.dirname(__FILE__))
end
end
expected_path = Pathname.new(File.dirname(__FILE__)).join('config', 'gemlock.yml')
File.stubs(:exists?).with(expected_path).returns(true)
Gemlock.config.should eql expected_path
# Undefine Rails module
Object.send(:remove_const, :Rails)
end
it "is nil if Rails is defined and the files does not exist" do
module Rails
def self.root
Pathname.new(File.dirname(__FILE__))
end
end
Gemlock.parsed_config.should be_nil
Object.send(:remove_const, :Rails)
end
it "is nil if Rails is not defined and the file exists" do
Gemlock.config.should be_nil
end
end
describe ".parsed_config" do
it "returns nil if the config file is not present" do
Gemlock.parsed_config.should be_nil
end
it "returns a hash containing the user's email if config file is present" do
Gemlock.stubs(:config).returns((File.join(File.dirname(__FILE__), 'fixtures', 'gemlock.yml')))
Gemlock.parsed_config["email"].should eql "tester@example.com"
end
end
describe ".email" do
it "returns the email in the config if present and valid" do
Gemlock.stubs(:parsed_config).returns({'email' => 'hi@mikeskalnik.com'})
Gemlock.email.should eql 'hi@mikeskalnik.com'
end
it "returns nil if the email in the config is invalid" do
Gemlock.stubs(:parsed_config).returns({'email' => 'd@er@p.com'})
Gemlock.email.should be_nil
end
it "returns nil if there is no config" do
Gemlock.stubs(:parsed_config).returns(nil)
Gemlock.email.should be_nil
end
end
describe ".difference" do
it "returns 'major' if there is a major version difference between the two gem versions" do
Gemlock.difference("2.0.0", "3.0.0").should eql "major"
Gemlock.difference("2.5.10", "3.1.0").should eql "major"
Gemlock.difference("3.1.10", "2.5.8").should eql "major"
Gemlock.difference("3.0", "2.0" ).should eql "major"
end
it "returns 'minor' if there is a minor version difference between the two gem versions" do
Gemlock.difference("3.0.0", "3.1.0").should eql "minor"
Gemlock.difference("3.1.0", "3.2.1").should eql "minor"
Gemlock.difference("3.1.0", "3.0.0").should eql "minor"
end
it "returns 'patch' if there is a patch version difference between the two gem versions" do
Gemlock.difference("3.1.0", "3.1.1").should eql "patch"
Gemlock.difference("0.0.2", "0.0.1").should eql "patch"
end
it "returns 'none' if there is no difference" do
Gemlock.difference("0.0.0", "0.0.0").should eql "none"
Gemlock.difference("0.0.1", "0.0.1").should eql "none"
Gemlock.difference("0.1.0", "0.1.0").should eql "none"
Gemlock.difference("1.0.0", "1.0.0").should eql "none"
end
end
describe '.initializer' do
it "makes a thread" do
Gemlock.stubs(:outdated).returns([])
capture_stdout do
@thread = Gemlock.initializer
@thread.class.should eql Thread
@thread.kill
end
end
it "checks for updates" do
Gemlock.expects(:outdated).returns([])
capture_stdout do
@thread = Gemlock.initializer
while @thread.status != 'sleep' do
sleep 0.5
end
@thread.kill
end
end
end
describe ".process_version" do
it "splits a version string into an array" do
Gemlock.send(:process_version, "3.0.0").class.should eql Array
end
it "appends missing zeros to the end of a version if not given" do
Gemlock.send(:process_version, "3").should eql [3, 0, 0]
Gemlock.send(:process_version, "3.0").should eql [3, 0, 0]
end
end
describe ".update_interval" do
it "returns the number of seconds in a week if config_file is not present, or interval is not specified" do
Gemlock.update_interval.should eql 60*60*24*7
Gemlock.stubs(:parsed_config).returns({"email"=>"tester@example.com"})
Gemlock.update_interval.should eql 60*60*24*7
end
it "returns the number of seconds until the next number of hours as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["8 hours"]})
Gemlock.update_interval.should eql 60*60*8
end
it "returns the number of seconds until the next number of days as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["4 days"]})
Gemlock.update_interval.should eql 60*60*24*4
end
it "returns the number of seconds until the next number of weeks as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["2 weeks"]})
Gemlock.update_interval.should eql 60*60*24*7*2
end
it "returns the number of seconds unil the next number of months as given" do
Gemlock.stubs(:parsed_config).returns({"interval" => ["3 months"]})
Gemlock.update_interval.should eql 60*60*24*30*3
end
end
def capture_stdout
io = StringIO.new
$stdout = io
yield
return io
ensure
$stdout = STDOUT
end
end
|
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
module TestModule
def dragon
"trogdor"
end
end
describe Mandy::Job do
describe "store" do
it "allows configuring a store" do
Mandy.stores.clear
job = Mandy::Job.new("test1") { store(:hbase, :test_store, :url => 'http://abc.com/test') }
Mandy.stores.should == { :test_store => Mandy::Stores::HBase.new(:url => 'http://abc.com/test') }
end
end
describe "mixins" do
it "should mixin module to mapper" do
input, output = StringIO.new("something"), StringIO.new("")
job = Mandy::Job.new("test1") { mixin TestModule; map do |k,v| emit(dragon) end; }
job.run_map(input, output)
output.rewind
output.read.chomp.should == "trogdor"
end
it "should mixin module to reducer" do
input, output = StringIO.new("something"), StringIO.new("")
job = Mandy::Job.new("test1") { mixin TestModule; map do |k,v| end; reduce do |k,v| emit(dragon) end; }
job.run_map(input, output)
job.run_reduce(input, output)
output.rewind
output.read.chomp.should == "trogdor"
end
end
describe "custom serialisation" do
it "should allow for standard input format" do
input = "manilow\t1978,lola"
map_output, reduce_output = StringIO.new(''), StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
output_format :plain
map do|k,v|
emit(k, {"year" => "1978", "name" => "lola"})
end
reduce do |k, values|
v = values.first
emit(k, v["year"] + "," + v["name"])
end
end
job.run_map(input, map_output)
map_output.rewind
job.run_reduce(map_output, reduce_output)
reduce_output.rewind
reduce_output.read.chomp.should == input
end
it "should use standard output when job has no reducers" do
input = "manilow\t1978,lola"
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
output_format :plain
map do|k,v|
emit(k, v)
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == input
end
it "should allow output to be converted to json from plaintext input" do
input = "manilow\t1978,lola"
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
output_format :pants
map do|k,v|
emit(k, v.split(","))
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == "manilow\t[\"1978\",\"lola\"]"
end
it "should allow serialisation module to be mixed in" do
input = to_input_line("manilow", {:dates => [1, 9, 7, 8], :name => "lola"})
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
map do|k,v|
k.should == "manilow"
v.should == {"dates" => [1, 9, 7, 8], "name" => "lola"}
emit(k, v)
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == "manilow\t{\"name\":\"lola\",\"dates\":[1,9,7,8]}"
end
def to_input_line(k,v)
[k, v.to_json].join("\t")
end
end
end
Don't need to specify output format
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
module TestModule
def dragon
"trogdor"
end
end
describe Mandy::Job do
describe "store" do
it "allows configuring a store" do
Mandy.stores.clear
job = Mandy::Job.new("test1") { store(:hbase, :test_store, :url => 'http://abc.com/test') }
Mandy.stores.should == { :test_store => Mandy::Stores::HBase.new(:url => 'http://abc.com/test') }
end
end
describe "mixins" do
it "should mixin module to mapper" do
input, output = StringIO.new("something"), StringIO.new("")
job = Mandy::Job.new("test1") { mixin TestModule; map do |k,v| emit(dragon) end; }
job.run_map(input, output)
output.rewind
output.read.chomp.should == "trogdor"
end
it "should mixin module to reducer" do
input, output = StringIO.new("something"), StringIO.new("")
job = Mandy::Job.new("test1") { mixin TestModule; map do |k,v| end; reduce do |k,v| emit(dragon) end; }
job.run_map(input, output)
job.run_reduce(input, output)
output.rewind
output.read.chomp.should == "trogdor"
end
end
describe "custom serialisation" do
it "should allow for standard input format" do
input = "manilow\t1978,lola"
map_output, reduce_output = StringIO.new(''), StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
output_format :plain
map do|k,v|
emit(k, {"year" => "1978", "name" => "lola"})
end
reduce do |k, values|
v = values.first
emit(k, v["year"] + "," + v["name"])
end
end
job.run_map(input, map_output)
map_output.rewind
job.run_reduce(map_output, reduce_output)
reduce_output.rewind
reduce_output.read.chomp.should == input
end
it "should use standard output when job has no reducers" do
input = "manilow\t1978,lola"
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
output_format :plain
map do|k,v|
emit(k, v)
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == input
end
it "should allow output to be converted to json from plaintext input" do
input = "manilow\t1978,lola"
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
input_format :plain
map do|k,v|
emit(k, v.split(","))
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == "manilow\t[\"1978\",\"lola\"]"
end
it "should allow serialisation module to be mixed in" do
input = to_input_line("manilow", {:dates => [1, 9, 7, 8], :name => "lola"})
output = StringIO.new('')
job = Mandy::Job.new("lola") do
serialize Mandy::Serializers::Json
map do|k,v|
k.should == "manilow"
v.should == {"dates" => [1, 9, 7, 8], "name" => "lola"}
emit(k, v)
end
end
job.run_map(input, output)
output.rewind
output.read.chomp.should == "manilow\t{\"name\":\"lola\",\"dates\":[1,9,7,8]}"
end
def to_input_line(k,v)
[k, v.to_json].join("\t")
end
end
end |
require 'minitest_helper'
describe 'Locking' do
let(:repository) { Repository.new :test }
def lock(repo)
Eternity.locker_adapter[repo.name] = :test_process
end
def assert_locked
error = proc { yield }.must_raise Locky::Error
error.message.must_equal 'test already locked by test_process'
end
it 'Commit' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository.commit author: 'User', message: 'Commit Message' }
repository.current_commit.must_be_nil
repository.changes_count.must_equal 1
end
it 'Checkout' do
repository[:countries].insert 'AR', name: 'Argentina'
commit_1 = repository.commit author: 'User', message: 'Commit 1'
repository[:countries].insert 'UY', name: 'Uruguay'
commit_2 = repository.commit author: 'User', message: 'Commit 2'
lock repository
assert_locked { repository.checkout commit: commit_1.id }
repository.current_commit.must_equal commit_2
end
it 'Merge' do
repository[:countries].insert 'AR', name: 'Argentina'
commit_1 = repository.commit author: 'User', message: 'Commit 1'
repository[:countries].insert 'UY', name: 'Uruguay'
commit_2 = repository.commit author: 'User', message: 'Commit 2'
repository.checkout commit: commit_1.id
lock repository
assert_locked { repository.merge commit: commit_2.id }
repository.current_commit.must_equal commit_1
end
it 'Revert all' do
repository[:countries].insert 'AR', name: 'Argentina'
repository[:countries].insert 'UY', name: 'Uruguay'
repository[:cities].insert 'CABA', name: 'Ciudad Autonoma de Buenos Aires'
lock repository
assert_locked { repository.revert }
repository.changes_count.must_equal 3
repository[:countries].count.must_equal 2
repository[:cities].count.must_equal 1
end
it 'Revert collection' do
repository[:countries].insert 'AR', name: 'Argentina'
repository[:countries].insert 'UY', name: 'Uruguay'
repository[:cities].insert 'CABA', name: 'Ciudad Autonoma de Buenos Aires'
lock repository
assert_locked { repository[:countries].revert_all }
repository.changes_count.must_equal 3
repository[:countries].count.must_equal 2
repository[:cities].count.must_equal 1
end
it 'Insert' do
lock repository
assert_locked { repository[:countries].insert 'AR', name: 'Argentina' }
repository.changes_count.must_equal 0
end
it 'Update' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].update 'AR', name: 'Republica Argentina' }
repository.delta.to_h.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
it 'Delete' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].delete 'AR' }
repository.delta.to_h.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
it 'Revert' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].revert 'AR' }
repository.delta.to_h.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
end
Fix tests for Ruby 1.9.3
require 'minitest_helper'
describe 'Locking' do
let(:repository) { Repository.new :test }
def lock(repo)
Eternity.locker_adapter[repo.name] = :test_process
end
def assert_locked
error = proc { yield }.must_raise Locky::Error
error.message.must_equal 'test already locked by test_process'
end
it 'Commit' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository.commit author: 'User', message: 'Commit Message' }
repository.current_commit.must_be_nil
repository.changes_count.must_equal 1
end
it 'Checkout' do
repository[:countries].insert 'AR', name: 'Argentina'
commit_1 = repository.commit author: 'User', message: 'Commit 1'
repository[:countries].insert 'UY', name: 'Uruguay'
commit_2 = repository.commit author: 'User', message: 'Commit 2'
lock repository
assert_locked { repository.checkout commit: commit_1.id }
repository.current_commit.must_equal commit_2
end
it 'Merge' do
repository[:countries].insert 'AR', name: 'Argentina'
commit_1 = repository.commit author: 'User', message: 'Commit 1'
repository[:countries].insert 'UY', name: 'Uruguay'
commit_2 = repository.commit author: 'User', message: 'Commit 2'
repository.checkout commit: commit_1.id
lock repository
assert_locked { repository.merge commit: commit_2.id }
repository.current_commit.must_equal commit_1
end
it 'Revert all' do
repository[:countries].insert 'AR', name: 'Argentina'
repository[:countries].insert 'UY', name: 'Uruguay'
repository[:cities].insert 'CABA', name: 'Ciudad Autonoma de Buenos Aires'
lock repository
assert_locked { repository.revert }
repository.changes_count.must_equal 3
repository[:countries].count.must_equal 2
repository[:cities].count.must_equal 1
end
it 'Revert collection' do
repository[:countries].insert 'AR', name: 'Argentina'
repository[:countries].insert 'UY', name: 'Uruguay'
repository[:cities].insert 'CABA', name: 'Ciudad Autonoma de Buenos Aires'
lock repository
assert_locked { repository[:countries].revert_all }
repository.changes_count.must_equal 3
repository[:countries].count.must_equal 2
repository[:cities].count.must_equal 1
end
it 'Insert' do
lock repository
assert_locked { repository[:countries].insert 'AR', name: 'Argentina' }
repository.changes_count.must_equal 0
end
it 'Update' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].update 'AR', name: 'Republica Argentina' }
repository.delta.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
it 'Delete' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].delete 'AR' }
repository.delta.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
it 'Revert' do
repository[:countries].insert 'AR', name: 'Argentina'
lock repository
assert_locked { repository[:countries].revert 'AR' }
repository.delta.must_equal 'countries' => {'AR' => {'action' => 'insert', 'data' => {'name' => 'Argentina'}}}
end
end |
Test for minimax method, create test for winning, losing, draw, and step a way from winning and losing
require_relative '../lib/tic_tac_toe.rb'
require 'pry'
describe TicTacToe do
# let(:ttt){TicTacToe.new}
describe '#minimax' do
context 'a player wins' do
it "returns 1000 if it is a winning board is X" do
board = ["X", "X", "X", "O", "X", "X", "O", "X", "O"]
expect(TicTacToe.new(board, "X").minimax).to eq 1000
end
it "returns -1000 if it is a winning board is O" do
board = ["X", "X", "O", "O", "X", "X", "O", "O", "O"]
expect(TicTacToe.new(board, "O").minimax).to eq -1000
end
end
context 'draw' do
it "returns 0 if it is a draw" do
board = ["X", "O", "X", "X", "O", "O", "O", "X", "O"]
expect(TicTacToe.new(board, "O").minimax).to eq 0
end
end
context 'one move away from win' do
it "returns 990 if it is a winning board is X" do
board = ["X", "X", " ", " ", " ", " ", " ", " ", " "]
expect(TicTacToe.new(board, "X").minimax).to eq 990
end
it "returns -990 if it is a winning board is O" do
board = [" ", " ", " ", " ", " ", " ", " ", "O", "O"]
expect(TicTacToe.new(board, "O").minimax).to eq -990
end
end
end
end |
ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rails'
require 'capybara/rspec'
require_relative 'support/webmock'
require_relative 'support/capybara'
require_relative 'support/database_cleaner'
require_relative 'support/factory_girl'
require_relative 'support/omniauth'
require_relative 'support/helpers/omniauth_helpers'
require_relative 'support/helpers'
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.before(:each) do
stub_request(:get, %r{http://localhost:4000/lesson-repos/}).
to_return(status: 200, body: '{"lessons":{}}')
end
end
fixed spec/rails_helper to mock new async_fetcher route
ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rails'
require 'capybara/rspec'
require_relative 'support/webmock'
require_relative 'support/capybara'
require_relative 'support/database_cleaner'
require_relative 'support/factory_girl'
require_relative 'support/omniauth'
require_relative 'support/helpers/omniauth_helpers'
require_relative 'support/helpers'
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.before(:each) do
stub_request(:get, %r{http://localhost:4000/projects/}).
to_return(status: 200, body: '{"lessons":{}}')
end
end
|
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'capybara/rspec'
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
Capybara.default_max_wait_time = 3
Capybara.asset_host = 'http://localhost:3000'
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.use_transactional_fixtures = false
config.include FactoryGirl::Syntax::Methods
config.infer_spec_type_from_file_location!
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, js: true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
Configure shoulda-matchers Gem
We added and configures the shoulda-matchers Gem because it provides
RSpec-compatible one-liners that test common Rails functionality.
These tests would otherwise be much longer, more complex, and
error-prone.
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'capybara/rspec'
require 'capybara/poltergeist'
Capybara.javascript_driver = :poltergeist
Capybara.default_max_wait_time = 3
Capybara.asset_host = 'http://localhost:3000'
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.use_transactional_fixtures = false
config.include FactoryGirl::Syntax::Methods
config.infer_spec_type_from_file_location!
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each, js: true) do
DatabaseCleaner.strategy = :truncation
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
|
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= "test"
require "spec_helper"
require File.expand_path("../../config/environment", __FILE__)
require "rspec/rails"
# Add additional requires below this line. Rails is not loaded until this point!
require "devise"
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
require "vcr"
require "webmock/rspec"
WebMock.disable_net_connect!(allow_localhost: true)
VCR.config do |config|
config.cassette_library_dir = "spec/fixtures/vcr_cassettes"
config.hook_into :webmock
config.filter_sensitive_data("<TWITTER_CONSUMER_KEY>") { ENV["TWITTER_CONSUMER_KEY"] }
config.filter_sensitive_data("<TWITTER_CONSUMER_SECRET>") { ENV["TWITTER_CONSUMER_SECRET"] }
config.filter_sensitive_data("<TWITTER_ACCESS_TOKEN>") { ENV["TWITTER_ACCESS_TOKEN"] }
config.filter_sensitive_data("<TWITTER_ACCESS_SECRET>") { ENV["TWITTER_ACCESS_SECRET"] }
end
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.include Devise::TestHelpers, type: :controller
config.include ControllerMacros, type: :controller
# Add VCR to all tests
config.around(:each) do |example|
options = example.metadata[:vcr] || {}
if options[:record] == :skip
VCR.turned_off(&example)
else
name = example.metadata[:full_description].
split(/\s+/, 2).
join("/").
underscore.
tr(".", "/").
gsub(%r([^\w/]+), "_").
gsub(%r(/$), "")
VCR.use_cassette(name, options, &example)
end
end
end
fix deprecation warning
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= "test"
require "spec_helper"
require File.expand_path("../../config/environment", __FILE__)
require "rspec/rails"
# Add additional requires below this line. Rails is not loaded until this point!
require "devise"
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
require "vcr"
require "webmock/rspec"
WebMock.disable_net_connect!(allow_localhost: true)
VCR.configure do |config|
config.cassette_library_dir = "spec/fixtures/vcr_cassettes"
config.hook_into :webmock
config.filter_sensitive_data("<TWITTER_CONSUMER_KEY>") { ENV["TWITTER_CONSUMER_KEY"] }
config.filter_sensitive_data("<TWITTER_CONSUMER_SECRET>") { ENV["TWITTER_CONSUMER_SECRET"] }
config.filter_sensitive_data("<TWITTER_ACCESS_TOKEN>") { ENV["TWITTER_ACCESS_TOKEN"] }
config.filter_sensitive_data("<TWITTER_ACCESS_SECRET>") { ENV["TWITTER_ACCESS_SECRET"] }
end
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.include Devise::TestHelpers, type: :controller
config.include ControllerMacros, type: :controller
# Add VCR to all tests
config.around(:each) do |example|
options = example.metadata[:vcr] || {}
if options[:record] == :skip
VCR.turned_off(&example)
else
name = example.metadata[:full_description].
split(/\s+/, 2).
join("/").
underscore.
tr(".", "/").
gsub(%r([^\w/]+), "_").
gsub(%r(/$), "")
VCR.use_cassette(name, options, &example)
end
end
end
|
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'capybara/rspec'
require 'capybara/poltergeist'
Capybara.default_driver = :poltergeist
Capybara.default_wait_time = 3
Capybara.asset_host = 'http://localhost:3000'
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
end
Fix Capybara deprecation warning
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
abort('The Rails environment is running in production mode!') if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'capybara/rspec'
require 'capybara/poltergeist'
Capybara.default_driver = :poltergeist
Capybara.default_max_wait_time = 3
Capybara.asset_host = 'http://localhost:3000'
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
end
|
require 'spec_helper'
require 'strscan'
def ScanShortcutFor(regex)
Module.new do
define_method :scan do |text|
StringScanner.new(text).scan(Wptemplates::Regexes.send(regex))
end
define_method :scanner_after do |text|
s = StringScanner.new(text)
s.scan(Wptemplates::Regexes.send(regex))
s
end
end
end
describe Wptemplates::Regexes do
describe '.till_doublebrace_doubleopenbrackets_or_pipe' do
include ScanShortcutFor(:till_doublebrace_doubleopenbrackets_or_pipe)
it 'consumes a string with no doublebraces or pipes at all' do
expect(scan "abc").to eq("abc")
end
it 'consumes until doublebraces or pipe' do
expect(scan "abc{{d").to eq("abc")
expect(scan "abc|d").to eq("abc")
expect(scan "abc}}d").to eq("abc")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "{{d").to be_false
expect(scan "|d").to be_false
expect(scan "}}d").to be_false
end
it 'consumes until doublebraces or pipe even if other braces and pipes show up (not greedy)' do
expect(scan "ab|c{{d}}e").to eq("ab")
expect(scan "ab|c|d|e").to eq("ab")
expect(scan "ab{{c|d}}e").to eq("ab")
expect(scan "ab}}c|d{{e").to eq("ab")
end
it 'ignores lone braces' do
expect(scan "ab{c|d}}e").to eq("ab{c")
expect(scan "ab}c|d{{e").to eq("ab}c")
end
end
describe '.till_doubleopenbrace_or_doubleopenbrackets' do
include ScanShortcutFor(:till_doubleopenbrace_or_doubleopenbrackets)
it 'consumes a string with no doubleopenbraces at all' do
expect(scan "abc").to eq("abc")
expect(scan "ab}}c").to eq("ab}}c")
expect(scan "ab|c").to eq("ab|c")
end
it 'consumes until doubleopenbraces' do
expect(scan "abc{{d").to eq("abc")
expect(scan "abc|d{{").to eq("abc|d")
expect(scan "abc}}d{{").to eq("abc}}d")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "{{d").to be_false
end
it 'consumes until doubleopenbraces even if other doubleopenbraces show up (not greedy)' do
expect(scan "ab{{d{{e").to eq("ab")
end
it 'ignores lone braces' do
expect(scan "ab{c{{e").to eq("ab{c")
end
end
describe '.till_doubleclosebrace_or_pipe' do
include ScanShortcutFor(:till_doubleclosebrace_or_pipe)
it 'consumes a string with no doubleclosebraces or pipes at all' do
expect(scan "abc").to eq("abc")
expect(scan "ab{{c").to eq("ab{{c")
end
it 'consumes until doubleclosebraces' do
expect(scan "abc}}d").to eq("abc")
expect(scan "a{{bc}}d").to eq("a{{bc")
end
it 'consumes until a pipe' do
expect(scan "abc|d").to eq("abc")
expect(scan "a{{bc|d").to eq("a{{bc")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "}}d").to be_false
expect(scan "|d").to be_false
end
it 'consumes until doubleclosebracees even if other doubleclosebrace show up (not greedy)' do
expect(scan "ab}}d}}e").to eq("ab")
end
it 'consumes until a pipe even if other pipes show up (not greedy)' do
expect(scan "ab|d|e").to eq("ab")
end
it 'ignores lone braces' do
expect(scan "ab}c}}e").to eq("ab}c")
end
end
describe '.from_pipe_till_equals_no_doubleclosebrace_or_pipe' do
include ScanShortcutFor(:from_pipe_till_equals_no_doubleclosebrace_or_pipe)
context 'when there is an equals sign and a pipe' do
it 'consumes a string including equals with no doubleclosebraces or pipes at all' do
expect(scan "|abc=").to eq("|abc=")
expect(scan "|abc=d").to eq("|abc=")
expect(scan "|ab{{c=d").to eq("|ab{{c=")
end
it 'fails when doubleclosebraces occur before equals' do
expect(scan "|abc}}d=e").to be_false
expect(scan "|a{{bc}}d=e").to be_false
end
it 'ignores single closebraces' do
expect(scan "|abc}d=e").to eq("|abc}d=")
expect(scan "|a{{bc}d=e").to eq("|a{{bc}d=")
end
it 'fails when a pipe occurs before equals' do
expect(scan "|abc|d=e").to be_false
expect(scan "|a{{bc|d=e").to be_false
end
it 'does actually accept an empty string (epsilon transition)' do
expect(scan "|=d").to eq("|=")
expect(scan "|=").to eq("|=")
end
it 'consumes until equals even if other equals show up (not greedy)' do
expect(scan "|ab=d=e").to eq("|ab=")
end
it 'provides us with the stuff between pipe and equals in the first index' do
expect(scanner_after("|ab=c")[1]).to eq("ab")
expect(scanner_after("|=c")[1]).to eq("")
end
end
context 'when there is no equals sign' do
it 'fails on plain string' do
expect(scan "|abc").to be_false
end
it 'fails when there is a pipe' do
expect(scan "abc|d").to be_false
expect(scan "abcd|").to be_false
expect(scan "|abcd").to be_false
end
it 'fails when there are doubleclosebraces' do
expect(scan "abc}}d").to be_false
expect(scan "abcd}}").to be_false
expect(scan "}}abcd").to be_false
end
end
context 'when the pipe is not a the beginning or there is no pipe' do
it 'fails' do
expect(scan "abc").to be_false
expect(scan "abc=").to be_false
expect(scan "a|bc=d").to be_false
expect(scan " |bc=d").to be_false
end
end
end
describe '.a_pipe' do
include ScanShortcutFor(:a_pipe)
it 'consumes a pipe' do
expect(scan "|").to eq("|")
expect(scan "|a").to eq("|")
end
it 'consumes only one pipe even if there are others around (not greedy)' do
expect(scan "|||").to eq("|")
expect(scan "|a|").to eq("|")
expect(scan "|a|a").to eq("|")
end
it 'fails when there is stuff before the pipe' do
expect(scan "a|").to be_false
expect(scan "a|b").to be_false
end
end
describe '.a_doubleopenbrace' do
include ScanShortcutFor(:a_doubleopenbrace)
it 'consumes a doubleopenbrace' do
expect(scan "{{").to eq("{{")
expect(scan "{{a").to eq("{{")
end
it 'consumes only one doubleopenbrace even if there are others around (not greedy)' do
expect(scan "{{a{{").to eq("{{")
expect(scan "{{a{{a").to eq("{{")
end
it 'fails when there is stuff before the doubleopenbrace' do
expect(scan "a{{").to be_false
expect(scan "a{{b").to be_false
end
it 'ignores singleopenbrace' do
expect(scan "a{").to be_false
expect(scan "a{b").to be_false
end
it 'deals with extra braces' do
expect(scan "{{{").to eq("{{")
expect(scan "{{{{").to eq("{{")
expect(scan "{{{{{").to eq("{{")
expect(scan "{{{{{{").to eq("{{")
end
it 'ignores pipes and doubleclosingbrace' do
expect(scan "|").to be_false
expect(scan "}}").to be_false
end
end
describe '.a_doubleclosingbrace' do
include ScanShortcutFor(:a_doubleclosingbrace)
it 'consumes a doubleclosingbrace' do
expect(scan "}}").to eq("}}")
expect(scan "}}a").to eq("}}")
end
it 'consumes only one doubleclosingbrace even if there are others around (not greedy)' do
expect(scan "}}a}}").to eq("}}")
expect(scan "}}a}}a").to eq("}}")
end
it 'fails when there is stuff before the doubleclosingbrace' do
expect(scan "a}}").to be_false
expect(scan "a}}b").to be_false
end
it 'ignores singleclosebrace' do
expect(scan "a}").to be_false
expect(scan "a}b").to be_false
end
it 'deals with extra braces' do
expect(scan "}}}").to eq("}}")
expect(scan "}}}}").to eq("}}")
expect(scan "}}}}}").to eq("}}")
expect(scan "}}}}}}").to eq("}}")
end
it 'ignores pipes and doubleopenbrace' do
expect(scan "|").to be_false
expect(scan "{{").to be_false
end
end
describe '.a_link' do
include ScanShortcutFor(:a_link)
it 'consumes a normal link' do
s = scanner_after("[[foo]]")
expect(s.matched).to eq("[[foo]]")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
it 'consumes only the normal link' do
s = scanner_after("[[foo]].")
expect(s.matched).to eq("[[foo]]")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
it 'consumes some extra letters after closing brackets' do
s = scanner_after("[[foo]]nx.")
expect(s.matched).to eq("[[foo]]nx")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to eq("nx")
end
it 'consumes a link label' do
s = scanner_after("[[foo|bar]].")
expect(s.matched).to eq("[[foo|bar]]")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("bar")
expect(s[3]).to be_nil
end
it 'consumes a link label and extra letters' do
s = scanner_after("[[foo|bar]]ny.")
expect(s.matched).to eq("[[foo|bar]]ny")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("bar")
expect(s[3]).to eq("ny")
end
it 'consumes an empty link label' do
s = scanner_after("[[foo|]].")
expect(s.matched).to eq("[[foo|]]")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("")
expect(s[3]).to be_nil
end
it 'consumes a link with an anchor' do
s = scanner_after("[[foo#ro|bar]]ny.")
expect(s.matched).to eq("[[foo#ro|bar]]ny")
expect(s[1]).to eq("foo#ro")
expect(s[2]).to eq("bar")
expect(s[3]).to eq("ny")
end
it 'does not consume unclosed links' do
expect(scan "[[a").to be_false
end
it 'does not consume unclosed links with newlines' do
expect(scan "[[a\nb]]").to be_false
end
it 'consume only to the first pair of brackets even if there are others around' do
s = scanner_after("[[a]]b]]c,x")
expect(s.matched).to eq("[[a]]b")
expect(s[1]).to eq("a")
expect(s[2]).to be_nil
expect(s[3]).to eq("b")
end
it 'consumes pipes in the label' do
s = scanner_after("[[a|b|c]]d,x")
expect(s.matched).to eq("[[a|b|c]]d")
expect(s[1]).to eq("a")
expect(s[2]).to eq("b|c")
expect(s[3]).to eq("d")
end
it 'consumes single brackets in the label' do
s = scanner_after("[[a|b]c]]d,x")
expect(s.matched).to eq("[[a|b]c]]d")
expect(s[1]).to eq("a")
expect(s[2]).to eq("b]c")
expect(s[3]).to eq("d")
end
it 'consumes parens in urls' do
s = scanner_after("[[a(b)|c]]d.")
expect(s.matched).to eq("[[a(b)|c]]d")
expect(s[1]).to eq("a(b)")
expect(s[2]).to eq("c")
expect(s[3]).to eq("d")
end
it 'consumes commas in urls' do
s = scanner_after("[[a,b|c]]d.")
expect(s.matched).to eq("[[a,b|c]]d")
expect(s[1]).to eq("a,b")
expect(s[2]).to eq("c")
expect(s[3]).to eq("d")
end
it 'consumes spaces in urls' do
s = scanner_after("[[ ]]")
expect(s.matched).to eq("[[ ]]")
expect(s[1]).to eq(" ")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
end
end
Update the regex specs to match new regex purposes
require 'spec_helper'
require 'strscan'
def ScanShortcutFor(regex)
Module.new do
define_method :scan do |text|
StringScanner.new(text).scan(Wptemplates::Regexes.send(regex))
end
define_method :scanner_after do |text|
s = StringScanner.new(text)
s.scan(Wptemplates::Regexes.send(regex))
s
end
end
end
describe Wptemplates::Regexes do
describe '.till_doublebrace_doubleopenbrackets_or_pipe' do
include ScanShortcutFor(:till_doublebrace_doubleopenbrackets_or_pipe)
it 'consumes a string with no doublebraces or doubleopenbrackets or pipes at all' do
expect(scan "abc").to eq("abc")
end
it 'consumes until doublebraces or doubleopenbrackets or pipe' do
expect(scan "abc{{d").to eq("abc")
expect(scan "abc|d").to eq("abc")
expect(scan "abc}}d").to eq("abc")
expect(scan "abc[[d").to eq("abc")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "{{d").to be_false
expect(scan "|d").to be_false
expect(scan "}}d").to be_false
expect(scan "[[d").to be_false
end
it 'consumes until doublebraces or doubleopenbrackets or pipe even if other braces and pipes show up (not greedy)' do
expect(scan "ab|c{{d}}e").to eq("ab")
expect(scan "ab|c|d|e").to eq("ab")
expect(scan "ab{{c|d}}e").to eq("ab")
expect(scan "ab}}c|d{{e").to eq("ab")
expect(scan "ab[[c|d}}e").to eq("ab")
expect(scan "ab[[c|d{{e").to eq("ab")
end
it 'ignores lone braces' do
expect(scan "ab{c|d}}e").to eq("ab{c")
expect(scan "ab}c|d{{e").to eq("ab}c")
end
it 'ignores lone openbrackets' do
expect(scan "ab[c|d}}e").to eq("ab[c")
end
it 'ignores closebrackets' do
expect(scan "ab]]c|d}}e").to eq("ab]]c")
end
end
describe '.till_doubleopenbrace_or_doubleopenbrackets' do
include ScanShortcutFor(:till_doubleopenbrace_or_doubleopenbrackets)
it 'consumes a string with no doubleopenbraces or doubleopenbrackets at all' do
expect(scan "abc").to eq("abc")
expect(scan "ab}}c").to eq("ab}}c")
expect(scan "ab|c").to eq("ab|c")
expect(scan "ab]]c").to eq("ab]]c")
end
it 'consumes until doubleopenbraces' do
expect(scan "abc{{d").to eq("abc")
expect(scan "abc|d{{").to eq("abc|d")
expect(scan "abc}}d{{").to eq("abc}}d")
end
it 'consumes until doubleopenbrackets' do
expect(scan "abc[[d").to eq("abc")
expect(scan "abc|d[[").to eq("abc|d")
expect(scan "abc]]d[[").to eq("abc]]d")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "{{d").to be_false
expect(scan "[[d").to be_false
end
it 'consumes until doubleopenbraces/brackets if other doubleopenbraces/brackets show up (not greedy)' do
expect(scan "ab{{d{{e").to eq("ab")
expect(scan "ab[[d{{e").to eq("ab")
expect(scan "ab[[d[[e").to eq("ab")
expect(scan "ab{{d[[e").to eq("ab")
end
it 'ignores lone braces and brackets' do
expect(scan "ab[{c{{e").to eq("ab[{c")
expect(scan "ab[{c[[e").to eq("ab[{c")
end
end
describe '.till_doubleclosebrace_or_pipe' do
include ScanShortcutFor(:till_doubleclosebrace_or_pipe)
it 'consumes a string with no doubleclosebraces or pipes at all' do
expect(scan "abc").to eq("abc")
expect(scan "ab{{c").to eq("ab{{c")
end
it 'consumes until doubleclosebraces' do
expect(scan "abc}}d").to eq("abc")
expect(scan "a{{bc}}d").to eq("a{{bc")
end
it 'consumes until a pipe' do
expect(scan "abc|d").to eq("abc")
expect(scan "a{{bc|d").to eq("a{{bc")
end
it 'does not accept an empty string (epsilon transition)' do
expect(scan "}}d").to be_false
expect(scan "|d").to be_false
end
it 'consumes until doubleclosebracees even if other doubleclosebrace show up (not greedy)' do
expect(scan "ab}}d}}e").to eq("ab")
end
it 'consumes until a pipe even if other pipes show up (not greedy)' do
expect(scan "ab|d|e").to eq("ab")
end
it 'ignores lone braces' do
expect(scan "ab}c}}e").to eq("ab}c")
end
end
describe '.from_pipe_till_equals_no_doubleclosebrace_or_pipe' do
include ScanShortcutFor(:from_pipe_till_equals_no_doubleclosebrace_or_pipe)
context 'when there is an equals sign and a pipe' do
it 'consumes a string including equals with no doubleclosebraces or pipes at all' do
expect(scan "|abc=").to eq("|abc=")
expect(scan "|abc=d").to eq("|abc=")
expect(scan "|ab{{c=d").to eq("|ab{{c=")
end
it 'fails when doubleclosebraces occur before equals' do
expect(scan "|abc}}d=e").to be_false
expect(scan "|a{{bc}}d=e").to be_false
end
it 'ignores single closebraces' do
expect(scan "|abc}d=e").to eq("|abc}d=")
expect(scan "|a{{bc}d=e").to eq("|a{{bc}d=")
end
it 'fails when a pipe occurs before equals' do
expect(scan "|abc|d=e").to be_false
expect(scan "|a{{bc|d=e").to be_false
end
it 'does actually accept an empty string (epsilon transition)' do
expect(scan "|=d").to eq("|=")
expect(scan "|=").to eq("|=")
end
it 'consumes until equals even if other equals show up (not greedy)' do
expect(scan "|ab=d=e").to eq("|ab=")
end
it 'provides us with the stuff between pipe and equals in the first index' do
expect(scanner_after("|ab=c")[1]).to eq("ab")
expect(scanner_after("|=c")[1]).to eq("")
end
end
context 'when there is no equals sign' do
it 'fails on plain string' do
expect(scan "|abc").to be_false
end
it 'fails when there is a pipe' do
expect(scan "abc|d").to be_false
expect(scan "abcd|").to be_false
expect(scan "|abcd").to be_false
end
it 'fails when there are doubleclosebraces' do
expect(scan "abc}}d").to be_false
expect(scan "abcd}}").to be_false
expect(scan "}}abcd").to be_false
end
end
context 'when the pipe is not a the beginning or there is no pipe' do
it 'fails' do
expect(scan "abc").to be_false
expect(scan "abc=").to be_false
expect(scan "a|bc=d").to be_false
expect(scan " |bc=d").to be_false
end
end
end
describe '.a_pipe' do
include ScanShortcutFor(:a_pipe)
it 'consumes a pipe' do
expect(scan "|").to eq("|")
expect(scan "|a").to eq("|")
end
it 'consumes only one pipe even if there are others around (not greedy)' do
expect(scan "|||").to eq("|")
expect(scan "|a|").to eq("|")
expect(scan "|a|a").to eq("|")
end
it 'fails when there is stuff before the pipe' do
expect(scan "a|").to be_false
expect(scan "a|b").to be_false
end
end
describe '.a_doubleopenbrace' do
include ScanShortcutFor(:a_doubleopenbrace)
it 'consumes a doubleopenbrace' do
expect(scan "{{").to eq("{{")
expect(scan "{{a").to eq("{{")
end
it 'consumes only one doubleopenbrace even if there are others around (not greedy)' do
expect(scan "{{a{{").to eq("{{")
expect(scan "{{a{{a").to eq("{{")
end
it 'fails when there is stuff before the doubleopenbrace' do
expect(scan "a{{").to be_false
expect(scan "a{{b").to be_false
end
it 'ignores singleopenbrace' do
expect(scan "a{").to be_false
expect(scan "a{b").to be_false
end
it 'deals with extra braces' do
expect(scan "{{{").to eq("{{")
expect(scan "{{{{").to eq("{{")
expect(scan "{{{{{").to eq("{{")
expect(scan "{{{{{{").to eq("{{")
end
it 'ignores pipes and doubleclosingbrace' do
expect(scan "|").to be_false
expect(scan "}}").to be_false
end
end
describe '.a_doubleclosingbrace' do
include ScanShortcutFor(:a_doubleclosingbrace)
it 'consumes a doubleclosingbrace' do
expect(scan "}}").to eq("}}")
expect(scan "}}a").to eq("}}")
end
it 'consumes only one doubleclosingbrace even if there are others around (not greedy)' do
expect(scan "}}a}}").to eq("}}")
expect(scan "}}a}}a").to eq("}}")
end
it 'fails when there is stuff before the doubleclosingbrace' do
expect(scan "a}}").to be_false
expect(scan "a}}b").to be_false
end
it 'ignores singleclosebrace' do
expect(scan "a}").to be_false
expect(scan "a}b").to be_false
end
it 'deals with extra braces' do
expect(scan "}}}").to eq("}}")
expect(scan "}}}}").to eq("}}")
expect(scan "}}}}}").to eq("}}")
expect(scan "}}}}}}").to eq("}}")
end
it 'ignores pipes and doubleopenbrace' do
expect(scan "|").to be_false
expect(scan "{{").to be_false
end
end
describe '.a_link' do
include ScanShortcutFor(:a_link)
it 'consumes a normal link' do
s = scanner_after("[[foo]]")
expect(s.matched).to eq("[[foo]]")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
it 'consumes only the normal link' do
s = scanner_after("[[foo]].")
expect(s.matched).to eq("[[foo]]")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
it 'consumes some extra letters after closing brackets' do
s = scanner_after("[[foo]]nx.")
expect(s.matched).to eq("[[foo]]nx")
expect(s[1]).to eq("foo")
expect(s[2]).to be_nil
expect(s[3]).to eq("nx")
end
it 'consumes a link label' do
s = scanner_after("[[foo|bar]].")
expect(s.matched).to eq("[[foo|bar]]")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("bar")
expect(s[3]).to be_nil
end
it 'consumes a link label and extra letters' do
s = scanner_after("[[foo|bar]]ny.")
expect(s.matched).to eq("[[foo|bar]]ny")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("bar")
expect(s[3]).to eq("ny")
end
it 'consumes an empty link label' do
s = scanner_after("[[foo|]].")
expect(s.matched).to eq("[[foo|]]")
expect(s[1]).to eq("foo")
expect(s[2]).to eq("")
expect(s[3]).to be_nil
end
it 'consumes a link with an anchor' do
s = scanner_after("[[foo#ro|bar]]ny.")
expect(s.matched).to eq("[[foo#ro|bar]]ny")
expect(s[1]).to eq("foo#ro")
expect(s[2]).to eq("bar")
expect(s[3]).to eq("ny")
end
it 'does not consume unclosed links' do
expect(scan "[[a").to be_false
end
it 'does not consume unclosed links with newlines' do
expect(scan "[[a\nb]]").to be_false
end
it 'consume only to the first pair of brackets even if there are others around' do
s = scanner_after("[[a]]b]]c,x")
expect(s.matched).to eq("[[a]]b")
expect(s[1]).to eq("a")
expect(s[2]).to be_nil
expect(s[3]).to eq("b")
end
it 'consumes pipes in the label' do
s = scanner_after("[[a|b|c]]d,x")
expect(s.matched).to eq("[[a|b|c]]d")
expect(s[1]).to eq("a")
expect(s[2]).to eq("b|c")
expect(s[3]).to eq("d")
end
it 'consumes single brackets in the label' do
s = scanner_after("[[a|b]c]]d,x")
expect(s.matched).to eq("[[a|b]c]]d")
expect(s[1]).to eq("a")
expect(s[2]).to eq("b]c")
expect(s[3]).to eq("d")
end
it 'consumes parens in urls' do
s = scanner_after("[[a(b)|c]]d.")
expect(s.matched).to eq("[[a(b)|c]]d")
expect(s[1]).to eq("a(b)")
expect(s[2]).to eq("c")
expect(s[3]).to eq("d")
end
it 'consumes commas in urls' do
s = scanner_after("[[a,b|c]]d.")
expect(s.matched).to eq("[[a,b|c]]d")
expect(s[1]).to eq("a,b")
expect(s[2]).to eq("c")
expect(s[3]).to eq("d")
end
it 'consumes spaces in urls' do
s = scanner_after("[[ ]]")
expect(s.matched).to eq("[[ ]]")
expect(s[1]).to eq(" ")
expect(s[2]).to be_nil
expect(s[3]).to be_nil
end
end
end |
require 'spec_helper'
describe Ripcord do
it 'has a version number' do
expect(Ripcord::VERSION).not_to be nil
end
it 'does something useful' do
expect(false).to eq(true)
end
end
Remove generated spec
require 'spec_helper'
describe Ripcord do
it 'has a version number' do
expect(Ripcord::VERSION).not_to be nil
end
end
|
ENV["RAILS_ENV"], ENV["RACK_ENV"] = "test", "test"
$:.unshift(File.expand_path("../../", __FILE__))
if ENV["COVERAGE"] == true
require "simplecov"
SimpleCov.start
end
require "mongoid/finders/find_by"
Mongoid.load!(File.expand_path("../../config/mongoid.yml", __FILE__), :test)
module Mongoid::SpecHelpers
class << self
def random_class
random(10).capitalize.to_sym
end
def random(len)
(0..len).map { ('a'..'z').to_a[rand(26)] }.join.capitalize
end
end
end
if ENV["CI"]
Mongoid.configure do |config|
config.connect_to("travis-#{Process.pid}")
end
end
module Rails
class Application
# 0
end
end
module FindBy
class Application < Rails::Application
# 1
end
end
RSpec.configure do |config|
config.before(:each) do
Mongoid.purge!
end
end
Clean up rspec/helper.
ENV["RAILS_ENV"], ENV["RACK_ENV"] = "test", "test"
$:.unshift(File.expand_path("../../", __FILE__))
require "simplecov"
SimpleCov.start
require "mongoid/finders/find_by"
Mongoid.load!(File.expand_path("../../config/mongoid.yml", __FILE__), :test)
module Mongoid::SpecHelpers
class << self
def random_class
random(10).capitalize.to_sym
end
def random(len)
(0..len).map { ('a'..'z').to_a[rand(26)] }.join.capitalize
end
end
end
if ENV["CI"]
Mongoid.configure do |config|
config.connect_to("travis-#{Process.pid}")
end
end
module Rails
class Application
# 0
end
end
module FindBy
class Application < Rails::Application
# 1
end
end
RSpec.configure do |config|
config.before(:each) do
Mongoid.purge!
end
end
|
require 'spec_helper'
require 'pry'
RSpec.describe Service do
let(:service) { Service.new({:name => "Affordable Housing Commission", :phone => "(314) 657-3880", :fax => "(314) 613-7015", :address => "1520 Market Street", :url => "https://www.stlouis-mo.gov/government/departments/affordable-housing/index.cfm"}) }
let(:url) { './fixtures/target_site.html'}
describe "#initialize" do
it "creates an instance of a service with all contact info" do
expect(service.instance_variable_get(:@name)).to eq("Affordable Housing Commission")
expect(service.instance_variable_get(:@url)).to eq("https://www.stlouis-mo.gov/government/departments/affordable-housing/index.cfm")
expect(service.instance_variable_get(:@phone)).to eq("(314) 657-3880")
expect(service.instance_variable_get(:@fax)).to eq("(314) 613-7015")
expect(service.instance_variable_get(:@address)).to eq("1520 Market Street")
service.instance_variable_get(:@name)
service.instance_variable_get(:@url)
service.instance_variable_get(:@phone)
service.instance_variable_get(:@fax)
service.instance_variable_get(:@address)
end
end
describe ".all" do
it "returns the value of @@all" do
expect(service.class.all).to be_kind_of(Array)
service.class.all
end
end
describe "#save" do
it "adds this instance to the @@all array" do
service.save
expect(service.class.all.size).not_to eq(0)
service.class.all.size
end
end
describe ".create_services" do
it "takes input from Scraper class and creates services with it" do
service.class.create_services(url)
expect(service.class.all.size).to eq(95)
end
end
describe ".list_all_services" do
it "lists the name of each service" do
expect($stdout).to receive(:puts).exactly(95).times.with any_args
service.class.list_all_services
end
end
end
Add test for .list_all_services_by_letter
require 'spec_helper'
require 'pry'
RSpec.describe Service do
let(:service) { Service.new({:name => "Affordable Housing Commission", :phone => "(314) 657-3880", :fax => "(314) 613-7015", :address => "1520 Market Street", :url => "https://www.stlouis-mo.gov/government/departments/affordable-housing/index.cfm"}) }
let(:url) { './fixtures/target_site.html'}
describe "#initialize" do
it "creates an instance of a service with all contact info" do
expect(service.instance_variable_get(:@name)).to eq("Affordable Housing Commission")
expect(service.instance_variable_get(:@url)).to eq("https://www.stlouis-mo.gov/government/departments/affordable-housing/index.cfm")
expect(service.instance_variable_get(:@phone)).to eq("(314) 657-3880")
expect(service.instance_variable_get(:@fax)).to eq("(314) 613-7015")
expect(service.instance_variable_get(:@address)).to eq("1520 Market Street")
service.instance_variable_get(:@name)
service.instance_variable_get(:@url)
service.instance_variable_get(:@phone)
service.instance_variable_get(:@fax)
service.instance_variable_get(:@address)
end
end
describe ".all" do
it "returns the value of @@all" do
expect(service.class.all).to be_kind_of(Array)
service.class.all
end
end
describe "#save" do
it "adds this instance to the @@all array" do
service.save
expect(service.class.all.size).not_to eq(0)
service.class.all.size
end
end
describe ".create_services" do
it "takes input from Scraper class and creates services with it" do
service.class.create_services(url)
expect(service.class.all.size).to eq(95)
end
end
describe ".list_all_services" do
it "lists the name of each service" do
expect($stdout).to receive(:puts).exactly(95).times.with any_args
service.class.list_all_services
end
end
describe ".list_all_services_by_letter" do
it "lists all the services starting with input letter" do
expect($stdout).to receive(:puts).exactly(3).times.with any_args
service.class.list_all_services_by_letter
end
end
end |
RSpec.configure do |config|
# Check that the factories are all valid.
config.before(:suite) do
# The Stubbed I18n backend will return the translation key, while having the same behaviour
# as the standard backend. This allows tests to be written without assuming the translation.
class StubbedI18nBackend < I18n::Backend::Simple
protected
def lookup(_, key, _, _)
super
key.to_s
end
end
I18n.backend = StubbedI18nBackend.new
end
end
Do not hide the translations for Rails keys.
RSpec.configure do |config|
# Check that the factories are all valid.
config.before(:suite) do
# The Stubbed I18n backend will return the translation key, while having the same behaviour
# as the standard backend. This allows tests to be written without assuming the translation.
class StubbedI18nBackend < I18n::Backend::Simple
protected
def lookup(_, key, _, _)
key = key.to_s
result = super
if key.start_with?('activerecord', 'attributes', 'errors', 'support')
result
else
key
end
end
end
I18n.backend = StubbedI18nBackend.new
end
end
|
require "docker"
if ENV.key?("DOCKER_HOST")
Docker.url = ENV.fetch("DOCKER_HOST")
end
class TestCluster
DOCKER_HOST = ENV.fetch("DOCKER_HOST") {
if `ifconfig -a | grep '^[a-zA-Z]' | cut -d':' -f1`.split.include?('docker0')
# Use the IP address of the docker0 network interface
ip = `/sbin/ifconfig docker0 | grep "inet addr" | cut -d ':' -f 2 | cut -d ' ' -f 1`.strip
else
# Use the IP address of the current machine. The loopback address won't resolve
# properly within the container.
ip = `/sbin/ifconfig | grep -v '127.0.0.1' | awk '$1=="inet" {print $2}' | cut -f1 -d'/' | head -n 1`.strip
end
"kafka://#{ip}"
}
DOCKER_HOSTNAME = URI(DOCKER_HOST).host
KAFKA_IMAGE = "ches/kafka:0.10.2.1"
ZOOKEEPER_IMAGE = "jplock/zookeeper:3.4.6"
KAFKA_CLUSTER_SIZE = 3
def start
[KAFKA_IMAGE, ZOOKEEPER_IMAGE].each do |image|
print "Fetching image #{image}... "
unless Docker::Image.exist?(image)
Docker::Image.create("fromImage" => image)
end
puts "OK"
end
puts "Starting cluster..."
@zookeeper = create(
"Image" => ZOOKEEPER_IMAGE,
"Hostname" => "localhost",
"ExposedPorts" => {
"2181/tcp" => {}
},
)
@kafka_brokers = KAFKA_CLUSTER_SIZE.times.map {|broker_id|
port = 9093 + broker_id
create(
"Image" => KAFKA_IMAGE,
"Hostname" => "localhost",
"Links" => ["#{@zookeeper.id}:zookeeper"],
"ExposedPorts" => {
"9092/tcp" => {}
},
"Env" => [
"KAFKA_BROKER_ID=#{broker_id}",
"KAFKA_ADVERTISED_HOST_NAME=#{DOCKER_HOSTNAME}",
"KAFKA_ADVERTISED_PORT=#{port}",
]
)
}
@kafka = @kafka_brokers.first
start_zookeeper_container
start_kafka_containers
end
def start_zookeeper_container
@zookeeper.start(
"PortBindings" => {
"2181/tcp" => [{ "HostPort" => "" }]
}
)
config = @zookeeper.json.fetch("NetworkSettings").fetch("Ports")
port = config.fetch("2181/tcp").first.fetch("HostPort")
wait_for_port(port)
Thread.new do
File.open("zookeeper.log", "a") do |log|
@zookeeper.attach do |stream, chunk|
log.puts(chunk)
end
end
end
end
def start_kafka_containers
@kafka_brokers.each_with_index do |kafka, index|
port = 9093 + index
kafka.start(
"PortBindings" => {
"9092/tcp" => [{ "HostPort" => "#{port}/tcp" }]
},
)
Thread.new do
File.open("kafka#{index}.log", "a") do |log|
kafka.attach do |stream, chunk|
log.puts(chunk)
end
end
end
end
ensure_kafka_is_ready
end
def kafka_hosts
@kafka_brokers.map {|kafka|
config = kafka.json.fetch("NetworkSettings").fetch("Ports")
port = config.fetch("9092/tcp").first.fetch("HostPort")
host = DOCKER_HOSTNAME
"#{host}:#{port}"
}
end
def kill_kafka_broker(number)
broker = @kafka_brokers[number]
puts "Killing broker #{number}"
broker.kill
broker.remove rescue nil
end
def start_kafka_broker(number)
broker = @kafka_brokers[number]
puts "Starting broker #{number}"
broker.start
end
def create_topic(topic, num_partitions: 1, num_replicas: 1)
print "Creating topic #{topic}... "
kafka_command [
"/kafka/bin/kafka-topics.sh",
"--create",
"--topic=#{topic}",
"--replication-factor=#{num_replicas}",
"--partitions=#{num_partitions}",
"--zookeeper=zookeeper",
]
puts "OK"
out = kafka_command [
"/kafka/bin/kafka-topics.sh",
"--describe",
"--topic=#{topic}",
"--zookeeper=zookeeper",
]
puts out
end
def kafka_command(command, attempt: 1)
container = create(
"Image" => KAFKA_IMAGE,
"Links" => ["#{@zookeeper.id}:zookeeper"],
"Cmd" => command,
)
begin
container.start
status = container.wait.fetch('StatusCode')
if status != 0
puts container.logs(stdout: true, stderr: true)
raise "Command failed with status #{status}"
end
container.logs(stdout: true, stderr: true)
ensure
container.delete(force: true) rescue nil
end
rescue => _
if attempt < 3
kafka_command(command, attempt: attempt + 1)
end
end
def stop
puts "Stopping cluster..."
@kafka_brokers.each {|kafka| kafka.delete(force: true) rescue nil }
@zookeeper.delete(force: true) rescue nil
end
private
def ensure_kafka_is_ready
kafka_hosts.each do |host_and_port|
host, port = host_and_port.split(":", 2)
wait_for_port(port, host: host)
end
end
def wait_for_port(port, host: DOCKER_HOSTNAME)
print "Waiting for #{host}:#{port}..."
loop do
begin
socket = TCPSocket.open(host, port)
socket.close
puts " OK"
break
rescue
print "."
sleep 1
end
end
end
def create(options)
Docker::Container.create(options)
end
end
Add a sleep for good measure
require "docker"
if ENV.key?("DOCKER_HOST")
Docker.url = ENV.fetch("DOCKER_HOST")
end
class TestCluster
DOCKER_HOST = ENV.fetch("DOCKER_HOST") {
if `ifconfig -a | grep '^[a-zA-Z]' | cut -d':' -f1`.split.include?('docker0')
# Use the IP address of the docker0 network interface
ip = `/sbin/ifconfig docker0 | grep "inet addr" | cut -d ':' -f 2 | cut -d ' ' -f 1`.strip
else
# Use the IP address of the current machine. The loopback address won't resolve
# properly within the container.
ip = `/sbin/ifconfig | grep -v '127.0.0.1' | awk '$1=="inet" {print $2}' | cut -f1 -d'/' | head -n 1`.strip
end
"kafka://#{ip}"
}
DOCKER_HOSTNAME = URI(DOCKER_HOST).host
KAFKA_IMAGE = "ches/kafka:0.10.2.1"
ZOOKEEPER_IMAGE = "jplock/zookeeper:3.4.6"
KAFKA_CLUSTER_SIZE = 3
def start
[KAFKA_IMAGE, ZOOKEEPER_IMAGE].each do |image|
print "Fetching image #{image}... "
unless Docker::Image.exist?(image)
Docker::Image.create("fromImage" => image)
end
puts "OK"
end
puts "Starting cluster..."
@zookeeper = create(
"Image" => ZOOKEEPER_IMAGE,
"Hostname" => "localhost",
"ExposedPorts" => {
"2181/tcp" => {}
},
)
@kafka_brokers = KAFKA_CLUSTER_SIZE.times.map {|broker_id|
port = 9093 + broker_id
create(
"Image" => KAFKA_IMAGE,
"Hostname" => "localhost",
"Links" => ["#{@zookeeper.id}:zookeeper"],
"ExposedPorts" => {
"9092/tcp" => {}
},
"Env" => [
"KAFKA_BROKER_ID=#{broker_id}",
"KAFKA_ADVERTISED_HOST_NAME=#{DOCKER_HOSTNAME}",
"KAFKA_ADVERTISED_PORT=#{port}",
]
)
}
@kafka = @kafka_brokers.first
start_zookeeper_container
start_kafka_containers
end
def start_zookeeper_container
@zookeeper.start(
"PortBindings" => {
"2181/tcp" => [{ "HostPort" => "" }]
}
)
config = @zookeeper.json.fetch("NetworkSettings").fetch("Ports")
port = config.fetch("2181/tcp").first.fetch("HostPort")
wait_for_port(port)
Thread.new do
File.open("zookeeper.log", "a") do |log|
@zookeeper.attach do |stream, chunk|
log.puts(chunk)
end
end
end
end
def start_kafka_containers
@kafka_brokers.each_with_index do |kafka, index|
port = 9093 + index
kafka.start(
"PortBindings" => {
"9092/tcp" => [{ "HostPort" => "#{port}/tcp" }]
},
)
Thread.new do
File.open("kafka#{index}.log", "a") do |log|
kafka.attach do |stream, chunk|
log.puts(chunk)
end
end
end
end
ensure_kafka_is_ready
end
def kafka_hosts
@kafka_brokers.map {|kafka|
config = kafka.json.fetch("NetworkSettings").fetch("Ports")
port = config.fetch("9092/tcp").first.fetch("HostPort")
host = DOCKER_HOSTNAME
"#{host}:#{port}"
}
end
def kill_kafka_broker(number)
broker = @kafka_brokers[number]
puts "Killing broker #{number}"
broker.kill
broker.remove rescue nil
end
def start_kafka_broker(number)
broker = @kafka_brokers[number]
puts "Starting broker #{number}"
broker.start
end
def create_topic(topic, num_partitions: 1, num_replicas: 1)
print "Creating topic #{topic}... "
kafka_command [
"/kafka/bin/kafka-topics.sh",
"--create",
"--topic=#{topic}",
"--replication-factor=#{num_replicas}",
"--partitions=#{num_partitions}",
"--zookeeper=zookeeper",
]
puts "OK"
out = kafka_command [
"/kafka/bin/kafka-topics.sh",
"--describe",
"--topic=#{topic}",
"--zookeeper=zookeeper",
]
puts out
end
def kafka_command(command, attempt: 1)
container = create(
"Image" => KAFKA_IMAGE,
"Links" => ["#{@zookeeper.id}:zookeeper"],
"Cmd" => command,
)
begin
container.start
status = container.wait.fetch('StatusCode')
if status != 0
puts container.logs(stdout: true, stderr: true)
raise "Command failed with status #{status}"
end
container.logs(stdout: true, stderr: true)
ensure
container.delete(force: true) rescue nil
end
rescue => _
if attempt < 3
kafka_command(command, attempt: attempt + 1)
end
end
def stop
puts "Stopping cluster..."
@kafka_brokers.each {|kafka| kafka.delete(force: true) rescue nil }
@zookeeper.delete(force: true) rescue nil
end
private
def ensure_kafka_is_ready
kafka_hosts.each do |host_and_port|
host, port = host_and_port.split(":", 2)
wait_for_port(port, host: host)
end
sleep 10
end
def wait_for_port(port, host: DOCKER_HOSTNAME)
print "Waiting for #{host}:#{port}..."
loop do
begin
socket = TCPSocket.open(host, port)
socket.close
puts " OK"
break
rescue
print "."
sleep 1
end
end
end
def create(options)
Docker::Container.create(options)
end
end
|
require_relative 'spec_helper'
require 'json'
describe "Doc App" do
it "should re-direct from /start to home" do
get '/start/'
last_response.should be_redirect
last_response.location.should include '/doc'
end
it "should re-direct from Randor to Wistia Basics" do
get '/randor-basics/'
last_response.should be_redirect
last_response.location.should include '/wistia-basics-getting-started'
end
it "should respond to search" do
get '/search/media'
parsed_body = JSON.parse(last_response.body)
parsed_body["results"].length.should > 0
parsed_body["results"][0]["title"].should_not be ""
parsed_body["results"][0]["title"].should == "Guide to Using Media in Wistia"
end
end
Left myself a note for when that test breaks
require_relative 'spec_helper'
require 'json'
describe "Doc App" do
it "should re-direct from /start to home" do
get '/start/'
last_response.should be_redirect
last_response.location.should include '/doc'
end
it "should re-direct from Randor to Wistia Basics" do
get '/randor-basics/'
last_response.should be_redirect
last_response.location.should include '/wistia-basics-getting-started'
end
it "should respond to search" do
get '/search/media'
parsed_body = JSON.parse(last_response.body)
parsed_body["results"].length.should > 0 # if this fails, try `rake build`
parsed_body["results"][0]["title"].should_not be ""
parsed_body["results"][0]["title"].should == "Guide to Using Media in Wistia"
end
end
|
################################################################################
#
# Author: Zachary Patten <zachary AT jovelabs DOT com>
# Copyright: Copyright (c) Zachary Patten
# License: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
require "spec_helper"
describe ZTK::SSH do
let(:ui) { ZTK::UI.new(:stdout => StringIO.new, :stderr => StringIO.new, :stdin => StringIO.new) }
subject { ZTK::SSH.new }
describe "class" do
it "should be an instance of ZTK::SSH" do
expect(subject).to be_an_instance_of ZTK::SSH
end
end
[ :direct, :proxy ].each do |connection_type|
before(:each) do
subject.config do |config|
config.ui = ui
config.user = ENV["USER"]
config.host_name = "127.0.0.1"
if connection_type == :proxy
config.proxy_user = ENV["USER"]
config.proxy_host_name = "127.0.0.1"
end
end
end
describe "#execute (#{connection_type})" do
it "should be able to connect to 127.0.0.1 as the current user and execute a command (your key must be in ssh-agent)" do
data = %x(hostname).chomp
status = subject.exec("hostname")
expect(status.exit_code).to equal 0
ui.stdout.rewind
expect(ui.stdout.read.chomp).to match data
expect(subject.close).to be true
end
it "should timeout after the period specified" do
subject.config.timeout = WAIT_SMALL
expect { subject.exec("sleep 10") }.to raise_error ZTK::SSHError
expect(subject.close).to be true
end
it "should throw an exception if the exit status is not as expected" do
expect { subject.exec("exit 42") }.to raise_error ZTK::SSHError
expect(subject.close).to be true
end
it "should return a instance of an OpenStruct object" do
result = subject.exec(%{echo "Hello World"})
expect(result).to be_an_instance_of OpenStruct
expect(subject.close).to be true
end
it "should return the exit code" do
data = 64
result = subject.exec(%{exit #{data}}, :exit_code => data)
expect(result.exit_code).to equal data
expect(subject.close).to be true
end
it "should return the output" do
data = "Hello World @ #{Time.now.utc}"
result = subject.exec(%Q{echo "#{data}"})
expect(result.output).to match data
expect(subject.close).to be true
end
it "should allow us to change the expected exit code" do
data = 32
result = subject.exec(%{exit #{data}}, :exit_code => data)
expect(result.exit_code).to equal data
expect(subject.close).to be true
end
it "should allow us to execute a bootstrap script" do
data = "Hello World @ #{Time.now.utc}"
result = subject.bootstrap(<<-EOBOOTSTRAP)
echo "#{data}" >&1
EOBOOTSTRAP
expect(result.output.chomp).to match data
expect(subject.close).to be true
end
it "should allow us to write a file" do
data = "Hello World @ #{Time.now.utc}"
test_filename = File.join("", "tmp", "test_file.txt")
subject.file(:target => test_filename) do |f|
f.write(data)
end
result = subject.exec(%{cat #{test_filename}})
expect(result.output.chomp).to match data
expect(subject.close).to be true
end
end #execute
describe "#ui (#{connection_type})" do
describe "#stdout (#{connection_type})" do
[true, false].each do |request_pty|
it "should capture STDOUT #{request_pty ? "with" : "without"} PTY and send it to the STDOUT pipe" do
subject.config.request_pty = request_pty
data = "Hello World @ #{Time.now.utc}"
subject.exec(%{echo "#{data}" >&1})
ui.stdout.rewind
expect(ui.stdout.read).to match data
ui.stderr.rewind
expect(ui.stderr.read).to be_empty
ui.stdin.rewind
expect(ui.stdin.read).to be_empty
expect(subject.close).to be true
end
end
end #stdout
describe "#stderr (#{connection_type})" do
[true, false].each do |request_pty|
it "should capture STDERR #{request_pty ? "with" : "without"} PTY and send it to the #{request_pty ? "STDOUT" : "STDERR"} pipe" do
subject.config.request_pty = request_pty
data = "Hello World @ #{Time.now.utc}"
subject.exec(%{echo "#{data}" >&2})
ui.stdout.rewind
expect(ui.stdout.read).to (request_pty ? match(data) : be_empty)
ui.stderr.rewind
expect(ui.stderr.read).to (request_pty ? be_empty : match(data))
ui.stdin.rewind
expect(ui.stdin.read).to be_empty
expect(subject.close).to be true
end
end
end #stderr
end #ui
describe "#upload (#{connection_type})" do
[true, false].each do |use_scp|
it "should be able to upload a file to 127.0.0.1 as the current user using #{use_scp ? 'scp' : 'sftp'} (your key must be in ssh-agent)" do
data = "Hello World @ #{Time.now.utc}"
remote_temp = Tempfile.new('remote')
remote_file = File.join(ZTK::Locator.root, "tmp", File.basename(remote_temp.path.dup))
remote_temp.close
File.exists?(remote_file) && File.delete(remote_file)
local_temp = Tempfile.new('local')
local_file = File.join(ZTK::Locator.root, "tmp", File.basename(local_temp.path.dup))
local_temp.close
if RUBY_VERSION < "1.9.3"
File.open(local_file, 'w') do |file|
file.puts(data)
end
else
IO.write(local_file, data)
end
expect(File.exists?(remote_file)).to be false
subject.upload(local_file, remote_file, :use_scp => use_scp)
expect(File.exists?(remote_file)).to be true
File.exists?(remote_file) && File.delete(remote_file)
File.exists?(local_file) && File.delete(local_file)
expect(subject.close).to be true
end
end
end #upload
describe "#download (#{connection_type})" do
[true, false].each do |use_scp|
it "should be able to download a file from 127.0.0.1 as the current user using #{use_scp ? 'scp' : 'sftp'} (your key must be in ssh-agent)" do
data = "Hello World @ #{Time.now.utc}"
local_temp = Tempfile.new('local')
local_file = File.join(ZTK::Locator.root, "tmp", File.basename(local_temp.path.dup))
local_temp.close
File.exists?(local_file) && File.delete(local_file)
remote_temp = Tempfile.new('remote')
remote_file = File.join(ZTK::Locator.root, "tmp", File.basename(remote_temp.path.dup))
remote_temp.close
if RUBY_VERSION < "1.9.3"
File.open(remote_file, 'w') do |file|
file.puts(data)
end
else
IO.write(remote_file, data)
end
expect(File.exists?(local_file)).to be false
subject.download(remote_file, local_file, :use_scp => use_scp)
expect(File.exists?(local_file)).to be true
File.exists?(local_file) && File.delete(local_file)
File.exists?(remote_file) && File.delete(remote_file)
expect(subject.close).to be true
end
end
end #download
end
end
add spec for ssh console
################################################################################
#
# Author: Zachary Patten <zachary AT jovelabs DOT com>
# Copyright: Copyright (c) Zachary Patten
# License: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
require "spec_helper"
describe ZTK::SSH do
let(:ui) { ZTK::UI.new(:stdout => StringIO.new, :stderr => StringIO.new, :stdin => StringIO.new) }
subject { ZTK::SSH.new }
describe "class" do
it "should be an instance of ZTK::SSH" do
expect(subject).to be_an_instance_of ZTK::SSH
end
end
[ :direct, :proxy ].each do |connection_type|
before(:each) do
subject.config do |config|
config.ui = ui
config.user = ENV["USER"]
config.host_name = "127.0.0.1"
if connection_type == :proxy
config.proxy_user = ENV["USER"]
config.proxy_host_name = "127.0.0.1"
end
end
end
describe "#console (#{connection_type})" do
it "should execute a console" do
expect(Kernel).to receive(:exec)
subject.console
end
end
describe "#execute (#{connection_type})" do
it "should be able to connect to 127.0.0.1 as the current user and execute a command (your key must be in ssh-agent)" do
data = %x(hostname).chomp
status = subject.exec("hostname")
expect(status.exit_code).to equal 0
ui.stdout.rewind
expect(ui.stdout.read.chomp).to match data
expect(subject.close).to be true
end
it "should timeout after the period specified" do
subject.config.timeout = WAIT_SMALL
expect { subject.exec("sleep 10") }.to raise_error ZTK::SSHError
expect(subject.close).to be true
end
it "should throw an exception if the exit status is not as expected" do
expect { subject.exec("exit 42") }.to raise_error ZTK::SSHError
expect(subject.close).to be true
end
it "should return a instance of an OpenStruct object" do
result = subject.exec(%{echo "Hello World"})
expect(result).to be_an_instance_of OpenStruct
expect(subject.close).to be true
end
it "should return the exit code" do
data = 64
result = subject.exec(%{exit #{data}}, :exit_code => data)
expect(result.exit_code).to equal data
expect(subject.close).to be true
end
it "should return the output" do
data = "Hello World @ #{Time.now.utc}"
result = subject.exec(%Q{echo "#{data}"})
expect(result.output).to match data
expect(subject.close).to be true
end
it "should allow us to change the expected exit code" do
data = 32
result = subject.exec(%{exit #{data}}, :exit_code => data)
expect(result.exit_code).to equal data
expect(subject.close).to be true
end
it "should allow us to execute a bootstrap script" do
data = "Hello World @ #{Time.now.utc}"
result = subject.bootstrap(<<-EOBOOTSTRAP)
echo "#{data}" >&1
EOBOOTSTRAP
expect(result.output.chomp).to match data
expect(subject.close).to be true
end
it "should allow us to write a file" do
data = "Hello World @ #{Time.now.utc}"
test_filename = File.join("", "tmp", "test_file.txt")
subject.file(:target => test_filename) do |f|
f.write(data)
end
result = subject.exec(%{cat #{test_filename}})
expect(result.output.chomp).to match data
expect(subject.close).to be true
end
end #execute
describe "#ui (#{connection_type})" do
describe "#stdout (#{connection_type})" do
[true, false].each do |request_pty|
it "should capture STDOUT #{request_pty ? "with" : "without"} PTY and send it to the STDOUT pipe" do
subject.config.request_pty = request_pty
data = "Hello World @ #{Time.now.utc}"
subject.exec(%{echo "#{data}" >&1})
ui.stdout.rewind
expect(ui.stdout.read).to match data
ui.stderr.rewind
expect(ui.stderr.read).to be_empty
ui.stdin.rewind
expect(ui.stdin.read).to be_empty
expect(subject.close).to be true
end
end
end #stdout
describe "#stderr (#{connection_type})" do
[true, false].each do |request_pty|
it "should capture STDERR #{request_pty ? "with" : "without"} PTY and send it to the #{request_pty ? "STDOUT" : "STDERR"} pipe" do
subject.config.request_pty = request_pty
data = "Hello World @ #{Time.now.utc}"
subject.exec(%{echo "#{data}" >&2})
ui.stdout.rewind
expect(ui.stdout.read).to (request_pty ? match(data) : be_empty)
ui.stderr.rewind
expect(ui.stderr.read).to (request_pty ? be_empty : match(data))
ui.stdin.rewind
expect(ui.stdin.read).to be_empty
expect(subject.close).to be true
end
end
end #stderr
end #ui
describe "#upload (#{connection_type})" do
[true, false].each do |use_scp|
it "should be able to upload a file to 127.0.0.1 as the current user using #{use_scp ? 'scp' : 'sftp'} (your key must be in ssh-agent)" do
data = "Hello World @ #{Time.now.utc}"
remote_temp = Tempfile.new('remote')
remote_file = File.join(ZTK::Locator.root, "tmp", File.basename(remote_temp.path.dup))
remote_temp.close
File.exists?(remote_file) && File.delete(remote_file)
local_temp = Tempfile.new('local')
local_file = File.join(ZTK::Locator.root, "tmp", File.basename(local_temp.path.dup))
local_temp.close
if RUBY_VERSION < "1.9.3"
File.open(local_file, 'w') do |file|
file.puts(data)
end
else
IO.write(local_file, data)
end
expect(File.exists?(remote_file)).to be false
subject.upload(local_file, remote_file, :use_scp => use_scp)
expect(File.exists?(remote_file)).to be true
File.exists?(remote_file) && File.delete(remote_file)
File.exists?(local_file) && File.delete(local_file)
expect(subject.close).to be true
end
end
end #upload
describe "#download (#{connection_type})" do
[true, false].each do |use_scp|
it "should be able to download a file from 127.0.0.1 as the current user using #{use_scp ? 'scp' : 'sftp'} (your key must be in ssh-agent)" do
data = "Hello World @ #{Time.now.utc}"
local_temp = Tempfile.new('local')
local_file = File.join(ZTK::Locator.root, "tmp", File.basename(local_temp.path.dup))
local_temp.close
File.exists?(local_file) && File.delete(local_file)
remote_temp = Tempfile.new('remote')
remote_file = File.join(ZTK::Locator.root, "tmp", File.basename(remote_temp.path.dup))
remote_temp.close
if RUBY_VERSION < "1.9.3"
File.open(remote_file, 'w') do |file|
file.puts(data)
end
else
IO.write(remote_file, data)
end
expect(File.exists?(local_file)).to be false
subject.download(remote_file, local_file, :use_scp => use_scp)
expect(File.exists?(local_file)).to be true
File.exists?(local_file) && File.delete(local_file)
File.exists?(remote_file) && File.delete(remote_file)
expect(subject.close).to be true
end
end
end #download
end
end
|
class Api::V2::BaseController < ApplicationController
rescue_from ActiveResource::BadRequest, with: :bad_request
rescue_from Mongoid::Errors::DocumentNotFound, with: :not_found
before_filter :restrict_access, except: [:options]
respond_to :json
def options
set_headers
render nothing: true, status: 200
end
private
def restrict_access
authenticate_or_request_with_http_token do |token, options|
@current_user = User.where(access_token: token).first
@current_user
end
end
def current_user
@current_user
end
def document_ids
request.headers['X-Document-Ids'].split(',')
end
protected
def set_headers
headers['Access-Control-Allow-Origin'] = '*'
headers['Access-Control-Expose-Headers'] = 'ETag'
headers['Access-Control-Allow-Methods'] = 'GET, POST, PATCH, PUT, DELETE, OPTIONS, HEAD'
headers['Access-Control-Allow-Headers'] = '*,x-requested-with,Content-Type,If-Modified-Since,If-None-Match'
headers['Access-Control-Max-Age'] = '86400'
end
def bad_request(exception)
render json: {respond_type: 'BAD REQUEST'}, status: :bad_request
end
def not_found(exception)
render json: {respond_type: 'NOT FOUND'}, status: :not_found
end
def unauthorized(exception)
render json: {render_type: 'UNAUTHORIZED'}, status: :unauthorized
end
end
[aphrodite] Add Authorization to headers for CORS
class Api::V2::BaseController < ApplicationController
rescue_from ActiveResource::BadRequest, with: :bad_request
rescue_from Mongoid::Errors::DocumentNotFound, with: :not_found
before_filter :restrict_access, except: [:options]
respond_to :json
def options
set_headers
render nothing: true, status: 200
end
private
def restrict_access
authenticate_or_request_with_http_token do |token, options|
@current_user = User.where(access_token: token).first
@current_user
end
end
def current_user
@current_user
end
def document_ids
request.headers['X-Document-Ids'].split(',')
end
protected
def set_headers
headers['Access-Control-Allow-Origin'] = '*'
headers['Access-Control-Expose-Headers'] = 'ETag'
headers['Access-Control-Allow-Methods'] = 'GET, POST, PATCH, PUT, DELETE, OPTIONS, HEAD'
headers['Access-Control-Allow-Headers'] = '*,x-requested-with,Content-Type,If-Modified-Since,If-None-Match,Authorization'
headers['Access-Control-Max-Age'] = '86400'
end
def bad_request(exception)
render json: {respond_type: 'BAD REQUEST'}, status: :bad_request
end
def not_found(exception)
render json: {respond_type: 'NOT FOUND'}, status: :not_found
end
def unauthorized(exception)
render json: {render_type: 'UNAUTHORIZED'}, status: :unauthorized
end
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_chimpy'
s.version = '2.0.0.alpha'
s.summary = 'MailChimp/Spree integration using the mailchimp gem'
s.description = s.summary
s.required_ruby_version = '>= 1.9.3'
s.author = 'Joshua Nussbaum'
s.email = 'josh@godynamo.com'
s.homepage = 'http://www.godynamo.com'
s.license = %q{BSD-3}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_core', '~> 2.3.0.beta'
s.add_dependency 'mailchimp', '>= 0.0.9'
s.add_development_dependency 'rspec-rails', '~> 2.14'
s.add_development_dependency 'capybara', '~> 2.2.1'
s.add_development_dependency 'selenium-webdriver', '~> 2.40'
s.add_development_dependency 'factory_girl', '~> 4.4'
s.add_development_dependency 'shoulda-matchers', '~> 2.5'
s.add_development_dependency 'sqlite3', '~> 1.3.9'
s.add_development_dependency 'simplecov', '0.7.1'
s.add_development_dependency 'database_cleaner', '1.2.0'
s.add_development_dependency 'coffee-rails', '~> 4.0.1'
s.add_development_dependency 'sass-rails', '~> 4.0.2'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'launchy'
s.add_development_dependency 'pry-rails'
s.add_development_dependency 'pry-debugger'
s.add_development_dependency 'pry-rescue'
end
lowered spree core version for lesser conflict
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_chimpy'
s.version = '2.0.0.alpha'
s.summary = 'MailChimp/Spree integration using the mailchimp gem'
s.description = s.summary
s.required_ruby_version = '>= 1.9.3'
s.author = 'Joshua Nussbaum'
s.email = 'josh@godynamo.com'
s.homepage = 'http://www.godynamo.com'
s.license = %q{BSD-3}
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
s.add_dependency 'spree_core', '~> 2.1'
s.add_dependency 'mailchimp', '>= 0.0.9'
s.add_development_dependency 'rspec-rails', '~> 2.14'
s.add_development_dependency 'capybara', '~> 2.2.1'
s.add_development_dependency 'selenium-webdriver', '~> 2.40'
s.add_development_dependency 'factory_girl', '~> 4.4'
s.add_development_dependency 'shoulda-matchers', '~> 2.5'
s.add_development_dependency 'sqlite3', '~> 1.3.9'
s.add_development_dependency 'simplecov', '0.7.1'
s.add_development_dependency 'database_cleaner', '1.2.0'
s.add_development_dependency 'coffee-rails', '~> 4.0.1'
s.add_development_dependency 'sass-rails', '~> 4.0.2'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'guard-rspec'
s.add_development_dependency 'launchy'
s.add_development_dependency 'pry-rails'
s.add_development_dependency 'pry-debugger'
s.add_development_dependency 'pry-rescue'
end
|
#!/usr/bin/env ruby
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
#
# this sample shows how to compile an executable file from source
# use --exe PE to compile a PE/ELF/MachO etc
# use --cpu MIPS/--16/--be to change the CPU
# the arg is a source file (c or asm) (some arch may not yet support C compiling)
# defaults to encoding a shellcode, use --exe to override (or the scripts samples/{elf,pe}encode)
# to compile a shellcode to a cstring, use --cstring
#
require 'metasm'
require 'optparse'
$execlass ||= Metasm::Shellcode
$cpu ||= Metasm::Ia32.new
outfilename = 'a.out'
type = nil
etype = :bin
macros = {}
OptionParser.new { |opt|
opt.on('-o file', 'output filename') { |f| outfilename = f }
opt.on('--c', 'parse source as a C file') { type = 'c' }
opt.on('--asm', 'parse asm as an ASM file') { type = 'asm' }
opt.on('--stdin', 'parse source on stdin') { ARGV << '-' }
opt.on('-v', '-W', 'verbose') { $VERBOSE=true }
opt.on('-d', 'debug') { $DEBUG=$VERBOSE=true }
opt.on('-D var=val', 'define a preprocessor macro') { |v| v0, v1 = v.split('=', 2) ; macros[v0] = v1 }
opt.on('--cstring', 'encode output as a C string to stdout') { $to_cstring = true }
opt.on('--string', 'encode output as a string to stdout') { $to_string = true }
opt.on('-e class', '--exe class', 'use a specific ExeFormat class') { |c| $execlass = Metasm.const_get(c) }
opt.on('--cpu cpu', 'use a specific CPU class') { |c| $cpu = Metasm.const_get(c).new }
# must come after --cpu in commandline
opt.on('--16', 'set cpu in 16bit mode') { $cpu.size = 16 }
opt.on('--le', 'set cpu in little-endian mode') { $cpu.endianness = :little }
opt.on('--be', 'set cpu in big-endian mode') { $cpu.endianness = :big }
opt.on('-fno-pic', 'generate position-dependant code') { $cpu.generate_PIC = false }
opt.on('--shared', 'generate shared library') { etype = :lib }
}.parse!
if file = ARGV.shift
type ||= 'c' if file =~ /\.c$/
src = macros.map { |k, v| "#define #{k} #{v}\n" }.join
if file == '-'
src << $stdin.read
else
src << File.read(file)
end
else
src = DATA.read # the text after __END__
end
if type == 'c'
exe = $execlass.compile_c($cpu, src, file)
else
exe = $execlass.assemble($cpu, src, file)
end
if $to_string
p exe.encode_string
elsif $to_cstring
str = exe.encode_string
var = File.basename(file)[/^\w+/] || 'sc' # derive varname from filename
puts "unsigned char #{var}[#{str.length}] = ", str.scan(/.{1,19}/m).map { |l|
'"' + l.unpack('C*').map { |c| '\\x%02x' % c }.join + '"'
}.join("\n") + ';'
else
exe.encode_file(outfilename, etype)
end
__END__
#include <asm/unistd.h>
jmp getip
gotip:
mov eax, __NR_write
mov ebx, 1
pop ecx
mov edx, strend-str
int 80h
mov eax, __NR_exit
mov ebx, 1
int 80h
getip:
call gotip
str db "Hello, world!", 0xa
strend:
samples/exeencode: save cfile etc to outfile, prevent overwriting files by default
#!/usr/bin/env ruby
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
#
# this sample shows how to compile an executable file from source
# use --exe PE to compile a PE/ELF/MachO etc
# use --cpu MIPS/--16/--be to change the CPU
# the arg is a source file (c or asm) (some arch may not yet support C compiling)
# defaults to encoding a shellcode, use --exe to override (or the scripts samples/{elf,pe}encode)
# to compile a shellcode to a cstring, use --cstring
#
require 'metasm'
require 'optparse'
$execlass ||= Metasm::Shellcode
$cpu ||= Metasm::Ia32.new
outfilename = nil
type = nil
etype = :bin
overwrite_outfile = false
macros = {}
OptionParser.new { |opt|
opt.on('-o file', 'output filename') { |f| outfilename = f }
opt.on('-f') { overwrite_outfile = true }
opt.on('--c', 'parse source as a C file') { type = 'c' }
opt.on('--asm', 'parse asm as an ASM file') { type = 'asm' }
opt.on('--stdin', 'parse source on stdin') { ARGV << '-' }
opt.on('-v', '-W', 'verbose') { $VERBOSE=true }
opt.on('-d', 'debug') { $DEBUG=$VERBOSE=true }
opt.on('-D var=val', 'define a preprocessor macro') { |v| v0, v1 = v.split('=', 2) ; macros[v0] = v1 }
opt.on('--cstring', 'encode output as a C string to stdout') { $to_cstring = true }
opt.on('--string', 'encode output as a string to stdout') { $to_string = true }
opt.on('-e class', '--exe class', 'use a specific ExeFormat class') { |c| $execlass = Metasm.const_get(c) }
opt.on('--cpu cpu', 'use a specific CPU class') { |c| $cpu = Metasm.const_get(c).new }
# must come after --cpu in commandline
opt.on('--16', 'set cpu in 16bit mode') { $cpu.size = 16 }
opt.on('--le', 'set cpu in little-endian mode') { $cpu.endianness = :little }
opt.on('--be', 'set cpu in big-endian mode') { $cpu.endianness = :big }
opt.on('--fno-pic', 'generate position-dependant code') { $cpu.generate_PIC = false }
opt.on('--shared', 'generate shared library') { etype = :lib }
}.parse!
if file = ARGV.shift
type ||= 'c' if file =~ /\.c$/
src = macros.map { |k, v| "#define #{k} #{v}\n" }.join
if file == '-'
src << $stdin.read
else
src << File.read(file)
end
else
src = DATA.read # the text after __END__
end
if type == 'c'
exe = $execlass.compile_c($cpu, src, file)
else
exe = $execlass.assemble($cpu, src, file)
end
if $to_string or $to_cstring
str = exe.encode_string
if $to_string
str = str.inspect
elsif $to_cstring
var = File.basename(file)[/^\w+/] || 'sc' rescue 'sc' # derive varname from filename
str = ["unsigned char #{var}[#{str.length}] = "] + str.scan(/.{1,19}/m).map { |l|
'"' + l.unpack('C*').map { |c| '\\x%02x' % c }.join + '"'
}
str.last << ?;
end
if outfilename
abort "Error: target file #{outfilename.inspect} exists !" if File.exists? outfilename and not overwrite_outfile
File.open(outfilename, 'w') { |fd| fd.puts str }
puts "saved to file #{outfilename.inspect}"
else
puts str
end
else
outfilename ||= 'a.out'
abort "Error: target file #{outfilename.inspect} exists !" if File.exists? outfilename and not overwrite_outfile
exe.encode_file(outfilename, etype)
puts "saved to file #{outfilename.inspect}"
end
__END__
#include <asm/unistd.h>
jmp getip
gotip:
mov eax, __NR_write
mov ebx, 1
pop ecx
mov edx, strend-str
int 80h
mov eax, __NR_exit
mov ebx, 1
int 80h
getip:
call gotip
str db "Hello, world!", 0xa
strend:
|
#!/usr/bin/env ruby
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
#
# this sample shows how to compile an executable file from source
# use --exe PE to compile a PE/ELF/MachO etc
# use --cpu MIPS/--16/--be to change the CPU
# the arg is a source file (c or asm) (some arch may not yet support C compiling)
# defaults to encoding a shellcode, use --exe to override (or the scripts samples/{elf,pe}encode)
# to compile a shellcode to a cstring, use --cstring
#
require 'metasm'
require 'optparse'
$opts ||= {}
$opts = {
:execlass => Metasm::Shellcode,
:cpu => Metasm::Ia32.new,
:exetype => :bin,
:macros => {}
}.merge($opts)
OptionParser.new { |opt|
opt.on('-o file', 'output filename') { |f| $opts[:outfilename] = f }
opt.on('-i', 'dont overwrite existing outfile') { $opts[:nooverwrite_outfile] = true }
opt.on('--c', 'parse source as a C file') { $opts[:srctype] = 'c' }
opt.on('--asm', 'parse asm as an ASM file') { $opts[:srctype] = 'asm' }
opt.on('--stdin', 'parse source on stdin') { ARGV << '-' }
opt.on('-v', '-W', 'verbose') { $VERBOSE=true }
opt.on('-d', 'debug') { $DEBUG=$VERBOSE=true }
opt.on('-D var=val', 'define a preprocessor macro') { |v| v0, v1 = v.split('=', 2) ; $opts[:macros][v0] = v1 }
opt.on('--cstring', 'encode output as a C string') { $opts[:to_string] = :c }
opt.on('--jsstring', 'encode output as a js string') { $opts[:to_string] = :js }
opt.on('--string', 'encode output as a string to stdout') { $opts[:to_string] = :inspect }
opt.on('--varname name', 'the variable name for string output') { |v| $opts[:varname] = v }
opt.on('-e class', '--exe class', 'use a specific ExeFormat class') { |c| $opts[:execlass] = Metasm.const_get(c) }
opt.on('--cpu cpu', 'use a specific CPU class') { |c| $opts[:cpu] = Metasm.const_get(c).new }
# must come after --cpu in commandline
opt.on('--16', 'set cpu in 16bit mode') { $opts[:cpu].size = 16 }
opt.on('--le', 'set cpu in little-endian mode') { $opts[:cpu].endianness = :little }
opt.on('--be', 'set cpu in big-endian mode') { $opts[:cpu].endianness = :big }
opt.on('--fno-pic', 'generate position-dependant code') { $opts[:cpu].generate_PIC = false }
opt.on('--shared', '--lib', '--dll', 'generate shared library') { $opts[:exetype] = :lib }
opt.on('--ruby-module-hack', 'use the dynldr module hack to use any ruby lib available for ruby symbols') { $opts[:dldrhack] = true }
}.parse!
src = $opts[:macros].map { |k, v| "#define #{k} #{v}\n" }.join
if file = ARGV.shift
$opts[:srctype] ||= 'c' if file =~ /\.c$/
if file == '-'
src << $stdin.read
else
src << File.read(file)
end
else
$opts[:srctype] ||= $opts[:srctype_data]
src << DATA.read # the text after __END__ in this file
end
if $opts[:outfilename] and $opts[:nooverwrite_outfile] and File.exist?($opts[:outfilename])
abort "Error: target file exists !"
end
if $opts[:srctype] == 'c'
exe = $opts[:execlass].compile_c($opts[:cpu], src, file)
else
exe = $opts[:execlass].assemble($opts[:cpu], src, file)
end
if $opts[:to_string]
str = exe.encode_string
$opts[:varname] ||= File.basename(file.to_s)[/^\w+/] || 'sc' # derive varname from filename
case $opts[:to_string]
when :inspect
str = "#{$opts[:varname]} = #{str.inspect}"
when :c
str = ["unsigned char #{$opts[:varname]}[#{str.length}] = "] + str.scan(/.{1,19}/m).map { |l|
'"' + l.unpack('C*').map { |c| '\\x%02x' % c }.join + '"'
}
str.last << ?;
when :js
str << 0 if str.length & 1 != 0
str = ["#{$opts[:varname]} = "] + str.scan(/.{2,20}/m).map { |l|
'"' + l.unpack($opts[:cpu].endianness == :little ? 'v*' : 'n*').map { |c| '%%u%04x' % c }.join + '"+'
}
str.last[-1] = ?;
end
if of = $opts[:outfilename]
abort "Error: target file #{of.inspect} exists !" if File.exists?(of) and $opts[:nooverwrite_outfile]
File.open(of, 'w') { |fd| fd.puts str }
puts "saved to file #{of.inspect}"
else
puts str
end
else
of = $opts[:outfilename] ||= 'a.out'
abort "Error: target file #{of.inspect} exists !" if File.exists?(of) and $opts[:nooverwrite_outfile]
Metasm::DynLdr.compile_binary_module_hack(exe) if $opts[:dldrhack]
exe.encode_file(of, $opts[:exetype])
puts "saved to file #{of.inspect}"
end
__END__
#include <asm/unistd.h>
jmp getip
gotip:
mov eax, __NR_write
mov ebx, 1
pop ecx
mov edx, strend-str
int 80h
mov eax, __NR_exit
mov ebx, 1
int 80h
getip:
call gotip
str db "Hello, world!", 0xa
strend:
samples/exeencode: re-add dummy -f option to prevent autocompletion to --fno-pic (optparse.rb, come see me after the class)
#!/usr/bin/env ruby
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
#
# this sample shows how to compile an executable file from source
# use --exe PE to compile a PE/ELF/MachO etc
# use --cpu MIPS/--16/--be to change the CPU
# the arg is a source file (c or asm) (some arch may not yet support C compiling)
# defaults to encoding a shellcode, use --exe to override (or the scripts samples/{elf,pe}encode)
# to compile a shellcode to a cstring, use --cstring
#
require 'metasm'
require 'optparse'
$opts ||= {}
$opts = {
:execlass => Metasm::Shellcode,
:cpu => Metasm::Ia32.new,
:exetype => :bin,
:macros => {}
}.merge($opts)
OptionParser.new { |opt|
opt.on('-o file', 'output filename') { |f| $opts[:outfilename] = f }
opt.on('-i', 'dont overwrite existing outfile') { $opts[:nooverwrite_outfile] = true }
opt.on('-f', 'overwrite existing outfile (default)') { $opts.delete :nooverwrite_outfile } # without this, optparse autocomplete to --fno-pic and break older scripts...
opt.on('--c', 'parse source as a C file') { $opts[:srctype] = 'c' }
opt.on('--asm', 'parse asm as an ASM file') { $opts[:srctype] = 'asm' }
opt.on('--stdin', 'parse source on stdin') { ARGV << '-' }
opt.on('-v', '-W', 'verbose') { $VERBOSE=true }
opt.on('-d', 'debug') { $DEBUG=$VERBOSE=true }
opt.on('-D var=val', 'define a preprocessor macro') { |v| v0, v1 = v.split('=', 2) ; $opts[:macros][v0] = v1 }
opt.on('--cstring', 'encode output as a C string') { $opts[:to_string] = :c }
opt.on('--jsstring', 'encode output as a js string') { $opts[:to_string] = :js }
opt.on('--string', 'encode output as a string to stdout') { $opts[:to_string] = :inspect }
opt.on('--varname name', 'the variable name for string output') { |v| $opts[:varname] = v }
opt.on('-e class', '--exe class', 'use a specific ExeFormat class') { |c| $opts[:execlass] = Metasm.const_get(c) }
opt.on('--cpu cpu', 'use a specific CPU class') { |c| $opts[:cpu] = Metasm.const_get(c).new }
# must come after --cpu in commandline
opt.on('--16', 'set cpu in 16bit mode') { $opts[:cpu].size = 16 }
opt.on('--le', 'set cpu in little-endian mode') { $opts[:cpu].endianness = :little }
opt.on('--be', 'set cpu in big-endian mode') { $opts[:cpu].endianness = :big }
opt.on('--fno-pic', 'generate position-dependant code') { $opts[:cpu].generate_PIC = false }
opt.on('--shared', '--lib', '--dll', 'generate shared library') { $opts[:exetype] = :lib }
opt.on('--ruby-module-hack', 'use the dynldr module hack to use any ruby lib available for ruby symbols') { $opts[:dldrhack] = true }
}.parse!
src = $opts[:macros].map { |k, v| "#define #{k} #{v}\n" }.join
if file = ARGV.shift
$opts[:srctype] ||= 'c' if file =~ /\.c$/
if file == '-'
src << $stdin.read
else
src << File.read(file)
end
else
$opts[:srctype] ||= $opts[:srctype_data]
src << DATA.read # the text after __END__ in this file
end
if $opts[:outfilename] and $opts[:nooverwrite_outfile] and File.exist?($opts[:outfilename])
abort "Error: target file exists !"
end
if $opts[:srctype] == 'c'
exe = $opts[:execlass].compile_c($opts[:cpu], src, file)
else
exe = $opts[:execlass].assemble($opts[:cpu], src, file)
end
if $opts[:to_string]
str = exe.encode_string
$opts[:varname] ||= File.basename(file.to_s)[/^\w+/] || 'sc' # derive varname from filename
case $opts[:to_string]
when :inspect
str = "#{$opts[:varname]} = #{str.inspect}"
when :c
str = ["unsigned char #{$opts[:varname]}[#{str.length}] = "] + str.scan(/.{1,19}/m).map { |l|
'"' + l.unpack('C*').map { |c| '\\x%02x' % c }.join + '"'
}
str.last << ?;
when :js
str << 0 if str.length & 1 != 0
str = ["#{$opts[:varname]} = "] + str.scan(/.{2,20}/m).map { |l|
'"' + l.unpack($opts[:cpu].endianness == :little ? 'v*' : 'n*').map { |c| '%%u%04x' % c }.join + '"+'
}
str.last[-1] = ?;
end
if of = $opts[:outfilename]
abort "Error: target file #{of.inspect} exists !" if File.exists?(of) and $opts[:nooverwrite_outfile]
File.open(of, 'w') { |fd| fd.puts str }
puts "saved to file #{of.inspect}"
else
puts str
end
else
of = $opts[:outfilename] ||= 'a.out'
abort "Error: target file #{of.inspect} exists !" if File.exists?(of) and $opts[:nooverwrite_outfile]
Metasm::DynLdr.compile_binary_module_hack(exe) if $opts[:dldrhack]
exe.encode_file(of, $opts[:exetype])
puts "saved to file #{of.inspect}"
end
__END__
#include <asm/unistd.h>
jmp getip
gotip:
mov eax, __NR_write
mov ebx, 1
pop ecx
mov edx, strend-str
int 80h
mov eax, __NR_exit
mov ebx, 1
int 80h
getip:
call gotip
str db "Hello, world!", 0xa
strend:
|
require 'countries'
require 'base64'
require 'socket'
require 'easypost'
require 'net/http'
require 'net/scp'
require 'uri'
class Admin::Store::ShipmentsController < Admin::BaseController
skip_before_filter :verify_authenticity_token, only: :label_print
def index
q = params[:q]
s = Shipment.includes(:order, :items, :inventory_transaction, [items: :order_item]).order('store_shipments.created_at DESC')
s = s.where("recipient_name LIKE '%#{q}%' OR recipient_company LIKE '%#{q}%' OR recipient_city LIKE '%#{q}%'")
s = s.where("store_orders.user_id = ?", params[:user_id]) unless params[:user_id].blank?
s = s.where("store_orders.affiliate_id = ?", params[:affiliate_id]) unless params[:affiliate_id].blank?
s = s.where(carrier: params[:carrier]) unless params[:carrier].blank?
s = s.where(ship_date: params[:ship_date]) unless params[:ship_date].blank?
s = s.where(status: params[:status]) unless params[:status].blank?
s = s.where(manifest_id: params[:manifest_id]) unless params[:manifest_id].blank?
respond_to do |format|
format.html { @shipments = s.paginate(page: params[:page], per_page: @per_page) }
format.csv { send_data Shipment.to_csv(s, skip_cols: ['label_data']) }
end
end
def new
# check if order id was passed in?
return redirect_to action: 'choose_order' if params[:order_id].nil?
@order = Order.find(params[:order_id])
if @order.nil?
flash[:notice] = "Order #{params[:order_id]} was not found."
return redirect_to action: 'choose_order'
end
@shipment = @order.create_shipment(session[:user_id], false)
@shipment.invoice_amount = @order.total if @shipment.sequence == 1
render 'edit'
end
def create
@shipment = Shipment.new(shipment_params)
@shipment.fulfilled_by_id = current_user.id
puts ">>>>>>>>>>> #{@shipment.skip_inventory}"
if @shipment.save
# create order history item
OrderHistory.create order_id: @shipment.order_id, user_id: current_user.id, event_type: :shipment_created,
system_name: 'Rhombus', identifier: @shipment.id, comment: "shipment created: #{@shipment}"
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was successfully created."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def show
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def packing_slip
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate], [items: :order_item]).find(params[:id])
render 'packing_slip', layout: false
end
def invoice
@shipment = Shipment.find(params[:id])
render 'invoice', layout: false
end
def email_invoice
@shipment = Shipment.find(params[:id])
SendInvoiceJob.perform_later(@shipment.id, session[:user_id])
flash[:success] = "Invoice was emailed to #{@shipment.order.notify_email}"
redirect_to :back
end
def create_payment
@shipment = Shipment.find(params[:id])
if @shipment.post_invoice
OrderHistory.create(order_id: @shipment.order.id, user_id: session[:user_id],
event_type: :invoice, system_name: 'Rhombus', identifier: @shipment.to_s,
comment: "Invoiced $#{@shipment.invoice_amount}" )
flash[:success] = 'Invoice posted'
else
flash[:error] = 'Invoice was not posted'
end
redirect_to :back
end
def edit
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate], [items: :order_item]).find(params[:id])
end
def update
@shipment = Shipment.find(params[:id])
@shipment.fulfilled_by_id = current_user.id
if @shipment.update(shipment_params)
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was updated."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def destroy
Shipment.find(params[:id]).destroy
redirect_to :back
end
def choose_order
end
def label_image
shipment = Shipment.find(params[:id])
send_data shipment.label_data, type: shipment.label_format
end
def label
return render text: :ok
# used background processing for printing to thermal printer as it can take a few seconds
if ['epl2','zpl'].include?(params[:format])
ShippingLabelJob.perform_later(session[:user_id], params[:id], params[:format])
flash[:info] = "Shipping label dispatched to printer"
return redirect_to :back
end
# requested a PNG probably
shipment = Shipment.find(params[:id])
courier_data = JSON.parse(shipment.courier_data)
begin
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => params[:format]})
# download label
label_url = response[:postage_label]["label_#{params[:format]}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
rescue => e
flash[:error] = "Error downloading shipping label: " + e.message
return redirect_to :back
end
send_data label_data, filename: shipment.to_s + "." + params[:format]
end
def void_label
shipment = Shipment.find(params[:id])
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
courier_data = JSON.parse(shipment.courier_data)
begin
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.refund
flash[:info] = "Refund status: #{response[:refund_status]} - / - Tracking: #{response[:tracking_code]} - / - Confirmation: #{response[:confirmation_number] || "n/a"}"
shipment.update_attribute(:status, 'void') if response[:refund_status] == 'submitted'
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
def product_labels_pending
@shipments = Shipment.where(status: :pending)
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = si.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{@shipments.map(&:id).join(",")})
and si.quantity > 0
group by shipment_id, p.sku, si.quantity
order by sheet.name;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
render 'product_labels'
end
def product_labels
@shipments = Shipment.where(id: params[:shipment_id])
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = si.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{params[:shipment_id].join(",")})
and si.quantity > 0
group by shipment_id, p.sku, si.quantity
order by sheet.name;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
end
# Print one size of labels
def label_print
# First find the printer
p = Printer.find_by(id: params[:printer_id])
if p.nil?
flash[:error] = "Printer not found"
return redirect_to :back
end
# See which label size this job is for (poor English here)
label_prefix = Setting.get(:kiaro, "Label Prefix")
label = params[:label].split(" ", 2)[1] + ".alf"
label_count = 0
str = ""
logs = []
params[:shipment_items].each do |h|
puts h.inspect
next if h['quantity'] == "0"
si = ShipmentItem.find(h['id'])
next if (h['personalized'] == 'true' && si.order_item.rendered_file.blank?)
item_number = si.order_item.item_number
puts " >>>>>>>>>>>>>>> HERE >>>>>>>>>>>"
qty = h['quantity'].to_i
label_count += qty
### QUICKCOMMAND LABEL SPECS #########
str << "LABELNAME=#{label}\r\n"
if h['personalized'] == 'true'
img = si.order_item.rendered_file.split('/').last
str << "FIELD 001=#{label_prefix}\\personalized_labels\\#{img}\r\n"
else
sku, breed, variant = item_number.split("-")
str << "FIELD 001=#{label_prefix}\\hb_labels\\#{breed}\\#{item_number}.pdf\r\n"
end
str << "LABELQUANTITY=#{qty}\r\n"
str << "PRINTER=#{p.url}\r\n\r\n"
######################################
logs << Log.new(timestamp: DateTime.now,
loggable_type: 'Shipment',
loggable_id: si.shipment.id,
event: :label_printed,
data1: item_number,
data2: qty,
data3: p.name,
ip_address: request.remote_ip,
user_id: session[:user_id])
end
# handle nothing to print
if label_count == 0
flash[:error] = "No labels specified for printing."
return redirect_to :back
end
# SCP file over to server
tmp_file = "/tmp/" + Time.now.strftime("%Y-%m-%d-%H%M%S") + ".acf"
File.write(tmp_file, str)
# example scp://user:pass@server1.mydomain.com:/home/kiaro/monitor/
uri = URI(Setting.get(:kiaro, "Print Job URI"))
begin
Net::SCP.upload!(uri.host, uri.user, tmp_file, uri.path, :ssh => { :password => uri.password, :port => uri.port || 22 })
flash[:success] = "#{label_count} labels submitted for printing"
logs.each(&:save)
Log.create(timestamp: DateTime.now, loggable_type: 'Printer', loggable_id: p.id, event: :job_submitted,
data1: label, data2: label_count, ip_address: request.remote_ip, user_id: session[:user_id])
rescue => e
flash[:error] = e.message
end
File.delete(tmp_file)
redirect_to :back
end
def packing_slip_batch
urls = ''
token = Cache.setting(Rails.configuration.domain_id, :system, 'Security Token')
website_url = Cache.setting(Rails.configuration.domain_id, :system, 'Website URL')
Shipment.where(id: params[:shipment_id]).each do |s|
digest = Digest::MD5.hexdigest(s.id.to_s + token)
urls += " " + website_url + packing_slip_admin_store_shipment_path(s, digest: digest)
OrderHistory.create(order_id: s.order_id, user_id: session[:user_id], event_type: :packing_slip_print,system_name: 'Rhombus',comment: "Packing slip printed")
end
output_file = "/tmp/#{SecureRandom.hex(6)}.pdf"
ret = system("wkhtmltopdf -q #{urls} #{output_file}")
unless File.exists?(output_file)
flash[:error] = "Unable to generate PDF [Debug: #{$?}]"
return redirect_to :back
end
if params[:printer_id].blank?
send_file output_file
else
printer = Printer.find(params[:printer_id])
job = printer.print_file(output_file)
flash[:info] = "Print job submitted to '#{printer.name} [#{printer.location}]'. CUPS JobID: #{job.id}"
redirect_to :back
end
end
def shipping_label_batch
EasyPost.api_key = Cache.setting(Rails.configuration.domain_id, 'Shipping', 'EasyPost API Key')
if params[:printer_id].blank?
file_format = 'pdf'
else
p = Printer.find(params[:printer_id])
file_format = p.preferred_format
mime_type = (file_format == 'pdf' ? 'application/pdf' : 'text/plain')
end
count = 0
begin
Shipment.where(id: params[:shipment_id]).each do |s|
courier_data = JSON.parse(s.courier_data)
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => file_format})
# download label
label_url = response[:postage_label]["label_#{file_format}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
if params[:printer_id].blank?
return send_data label_data, filename: s.to_s + "." + file_format
else
p.print_data(label_data, mime_type)
count += 1
end
end
rescue => e
flash[:error] = e.message
return redirect_to :back
end
flash[:info] = "#{count} label(s) sent to #{p.name} [#{p.location}]"
redirect_to :back
end
def update_status
shipments = Shipment.where(id: params[:shipment_id]).where.not(status: params[:status])
shipments.each do |s|
s.update_attribute(:status, params[:status])
if s.status == 'shipped' && s.ship_date.nil?
s.update_attribute(:ship_date, Date.today)
end
end
flash[:info] = "Status of #{shipments.length} shipment(s) updated to '#{params[:status]}'"
redirect_to :back
end
def email_confirmation
shipment = Shipment.find(params[:id])
begin
OrderMailer.order_shipped(shipment.id, session[:user_id]).deliver_now
flash[:info] = "Shipment email sent to '#{shipment.order.notify_email}'"
rescue => e
flash[:info] = e.message
end
redirect_to :back
end
def batch
@shipments = Shipment.where(id: params[:shipment_id])
if @shipments.length == 0
flash[:error] = "No shipments selected."
return redirect_to :back
end
@batch = Shipment.new(ship_date: Date.today)
# try to autopopulate fields
shipments = Shipment.includes(:items)
.where(status: :shipped)
.order(ship_date: :desc)
.limit(10)
shipments.each do |s|
if s.same_content?(@shipments[0])
@batch = s.dup
@batch.ship_date = Date.today
break
end
end
end
def scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def verify_scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
scan_list = params["upc_list"].split("\n").map { |x| x.chomp! }
@scans = {}
scan_list.each do |scan|
@scans[scan] = 0 if @scans[scan].nil?
@scans[scan] += 1
end
render 'scan'
end
def create_inventory_transaction
@shipment = Shipment.includes(:items, [items: :product]).find(params[:id])
begin
tran = @shipment.new_inventory_transaction
tran.shipment_id = @shipment.id
tran.save!
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
private
def shipment_params
params.require(:shipment).permit!
end
end
label printing for personalization
require 'countries'
require 'base64'
require 'socket'
require 'easypost'
require 'net/http'
require 'net/scp'
require 'uri'
class Admin::Store::ShipmentsController < Admin::BaseController
skip_before_filter :verify_authenticity_token, only: :label_print
def index
q = params[:q]
s = Shipment.includes(:order, :items, :inventory_transaction, [items: :order_item]).order('store_shipments.created_at DESC')
s = s.where("recipient_name LIKE '%#{q}%' OR recipient_company LIKE '%#{q}%' OR recipient_city LIKE '%#{q}%'")
s = s.where("store_orders.user_id = ?", params[:user_id]) unless params[:user_id].blank?
s = s.where("store_orders.affiliate_id = ?", params[:affiliate_id]) unless params[:affiliate_id].blank?
s = s.where(carrier: params[:carrier]) unless params[:carrier].blank?
s = s.where(ship_date: params[:ship_date]) unless params[:ship_date].blank?
s = s.where(status: params[:status]) unless params[:status].blank?
s = s.where(manifest_id: params[:manifest_id]) unless params[:manifest_id].blank?
respond_to do |format|
format.html { @shipments = s.paginate(page: params[:page], per_page: @per_page) }
format.csv { send_data Shipment.to_csv(s, skip_cols: ['label_data']) }
end
end
def new
# check if order id was passed in?
return redirect_to action: 'choose_order' if params[:order_id].nil?
@order = Order.find(params[:order_id])
if @order.nil?
flash[:notice] = "Order #{params[:order_id]} was not found."
return redirect_to action: 'choose_order'
end
@shipment = @order.create_shipment(session[:user_id], false)
@shipment.invoice_amount = @order.total if @shipment.sequence == 1
render 'edit'
end
def create
@shipment = Shipment.new(shipment_params)
@shipment.fulfilled_by_id = current_user.id
puts ">>>>>>>>>>> #{@shipment.skip_inventory}"
if @shipment.save
# create order history item
OrderHistory.create order_id: @shipment.order_id, user_id: current_user.id, event_type: :shipment_created,
system_name: 'Rhombus', identifier: @shipment.id, comment: "shipment created: #{@shipment}"
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was successfully created."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def show
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def packing_slip
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate], [items: :order_item]).find(params[:id])
render 'packing_slip', layout: false
end
def invoice
@shipment = Shipment.find(params[:id])
render 'invoice', layout: false
end
def email_invoice
@shipment = Shipment.find(params[:id])
SendInvoiceJob.perform_later(@shipment.id, session[:user_id])
flash[:success] = "Invoice was emailed to #{@shipment.order.notify_email}"
redirect_to :back
end
def create_payment
@shipment = Shipment.find(params[:id])
if @shipment.post_invoice
OrderHistory.create(order_id: @shipment.order.id, user_id: session[:user_id],
event_type: :invoice, system_name: 'Rhombus', identifier: @shipment.to_s,
comment: "Invoiced $#{@shipment.invoice_amount}" )
flash[:success] = 'Invoice posted'
else
flash[:error] = 'Invoice was not posted'
end
redirect_to :back
end
def edit
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate], [items: :order_item]).find(params[:id])
end
def update
@shipment = Shipment.find(params[:id])
@shipment.fulfilled_by_id = current_user.id
if @shipment.update(shipment_params)
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was updated."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def destroy
Shipment.find(params[:id]).destroy
redirect_to :back
end
def choose_order
end
def label_image
shipment = Shipment.find(params[:id])
send_data shipment.label_data, type: shipment.label_format
end
def label
return render text: :ok
# used background processing for printing to thermal printer as it can take a few seconds
if ['epl2','zpl'].include?(params[:format])
ShippingLabelJob.perform_later(session[:user_id], params[:id], params[:format])
flash[:info] = "Shipping label dispatched to printer"
return redirect_to :back
end
# requested a PNG probably
shipment = Shipment.find(params[:id])
courier_data = JSON.parse(shipment.courier_data)
begin
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => params[:format]})
# download label
label_url = response[:postage_label]["label_#{params[:format]}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
rescue => e
flash[:error] = "Error downloading shipping label: " + e.message
return redirect_to :back
end
send_data label_data, filename: shipment.to_s + "." + params[:format]
end
def void_label
shipment = Shipment.find(params[:id])
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
courier_data = JSON.parse(shipment.courier_data)
begin
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.refund
flash[:info] = "Refund status: #{response[:refund_status]} - / - Tracking: #{response[:tracking_code]} - / - Confirmation: #{response[:confirmation_number] || "n/a"}"
shipment.update_attribute(:status, 'void') if response[:refund_status] == 'submitted'
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
def product_labels_pending
@shipments = Shipment.where(status: :pending)
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = si.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{@shipments.map(&:id).join(",")})
and si.quantity > 0
group by shipment_id, p.sku, si.quantity
order by sheet.name;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
render 'product_labels'
end
def product_labels
@shipments = Shipment.where(id: params[:shipment_id])
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = si.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{params[:shipment_id].join(",")})
and si.quantity > 0
group by shipment_id, p.sku, si.quantity
order by sheet.name;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
end
# Print one size of labels
def label_print
# First find the printer
p = Printer.find_by(id: params[:printer_id])
if p.nil?
flash[:error] = "Printer not found"
return redirect_to :back
end
# See which label size this job is for (poor English here)
label_prefix = Setting.get(:kiaro, "Label Prefix")
label = params[:label].split(" ", 2)[1] + ".alf"
label_count = 0
str = ""
logs = []
# LOOP through the items selected
params[:shipment_items].each do |h|
puts h.inspect
next if h['quantity'] == "0"
si = ShipmentItem.find(h['id'])
next if (h['personalized'] == 'true' && si.order_item.rendered_file.blank?)
item_number = si.order_item.item_number
qty = h['quantity'].to_i
label_count += qty
### QUICKCOMMAND LABEL SPECS #########
str << "LABELNAME=#{label}\r\n"
if h['personalized'] == 'true'
img = si.order_item.rendered_file.split('/').last
str << "FIELD 001=#{label_prefix}\\personalized_labels\\#{img}\r\n"
else
sku, breed, variant = item_number.split("-")
str << "FIELD 001=#{label_prefix}\\hb_labels\\#{breed}\\#{item_number}.pdf\r\n"
end
str << "LABELQUANTITY=#{qty}\r\n"
str << "PRINTER=#{p.url}\r\n\r\n"
######################################
logs << Log.new(timestamp: DateTime.now,
loggable_type: 'Shipment',
loggable_id: si.shipment.id,
event: :label_printed,
data1: item_number,
data2: qty,
data3: p.name,
ip_address: request.remote_ip,
user_id: session[:user_id])
end
# handle nothing to print
if label_count == 0
flash[:error] = "No labels specified for printing."
return redirect_to :back
end
# SCP file over to server
tmp_file = "/tmp/" + Time.now.strftime("%Y-%m-%d-%H%M%S") + ".acf"
File.write(tmp_file, str)
# example scp://user:pass@server1.mydomain.com:/home/kiaro/monitor/
uri = URI(Setting.get(:kiaro, "Print Job URI"))
begin
Net::SCP.upload!(uri.host, uri.user, tmp_file, uri.path, :ssh => { :password => uri.password, :port => uri.port || 22 })
flash[:success] = "#{label_count} labels submitted for printing"
logs.each(&:save)
Log.create(timestamp: DateTime.now, loggable_type: 'Printer', loggable_id: p.id, event: :job_submitted,
data1: label, data2: label_count, ip_address: request.remote_ip, user_id: session[:user_id])
rescue => e
flash[:error] = e.message
end
File.delete(tmp_file)
redirect_to :back
end
def packing_slip_batch
urls = ''
token = Cache.setting(Rails.configuration.domain_id, :system, 'Security Token')
website_url = Cache.setting(Rails.configuration.domain_id, :system, 'Website URL')
Shipment.where(id: params[:shipment_id]).each do |s|
digest = Digest::MD5.hexdigest(s.id.to_s + token)
urls += " " + website_url + packing_slip_admin_store_shipment_path(s, digest: digest)
OrderHistory.create(order_id: s.order_id, user_id: session[:user_id], event_type: :packing_slip_print,system_name: 'Rhombus',comment: "Packing slip printed")
end
output_file = "/tmp/#{SecureRandom.hex(6)}.pdf"
ret = system("wkhtmltopdf -q #{urls} #{output_file}")
unless File.exists?(output_file)
flash[:error] = "Unable to generate PDF [Debug: #{$?}]"
return redirect_to :back
end
if params[:printer_id].blank?
send_file output_file
else
printer = Printer.find(params[:printer_id])
job = printer.print_file(output_file)
flash[:info] = "Print job submitted to '#{printer.name} [#{printer.location}]'. CUPS JobID: #{job.id}"
redirect_to :back
end
end
def shipping_label_batch
EasyPost.api_key = Cache.setting(Rails.configuration.domain_id, 'Shipping', 'EasyPost API Key')
if params[:printer_id].blank?
file_format = 'pdf'
else
p = Printer.find(params[:printer_id])
file_format = p.preferred_format
mime_type = (file_format == 'pdf' ? 'application/pdf' : 'text/plain')
end
count = 0
begin
Shipment.where(id: params[:shipment_id]).each do |s|
courier_data = JSON.parse(s.courier_data)
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => file_format})
# download label
label_url = response[:postage_label]["label_#{file_format}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
if params[:printer_id].blank?
return send_data label_data, filename: s.to_s + "." + file_format
else
p.print_data(label_data, mime_type)
count += 1
end
end
rescue => e
flash[:error] = e.message
return redirect_to :back
end
flash[:info] = "#{count} label(s) sent to #{p.name} [#{p.location}]"
redirect_to :back
end
def update_status
shipments = Shipment.where(id: params[:shipment_id]).where.not(status: params[:status])
shipments.each do |s|
s.update_attribute(:status, params[:status])
if s.status == 'shipped' && s.ship_date.nil?
s.update_attribute(:ship_date, Date.today)
end
end
flash[:info] = "Status of #{shipments.length} shipment(s) updated to '#{params[:status]}'"
redirect_to :back
end
def email_confirmation
shipment = Shipment.find(params[:id])
begin
OrderMailer.order_shipped(shipment.id, session[:user_id]).deliver_now
flash[:info] = "Shipment email sent to '#{shipment.order.notify_email}'"
rescue => e
flash[:info] = e.message
end
redirect_to :back
end
def batch
@shipments = Shipment.where(id: params[:shipment_id])
if @shipments.length == 0
flash[:error] = "No shipments selected."
return redirect_to :back
end
@batch = Shipment.new(ship_date: Date.today)
# try to autopopulate fields
shipments = Shipment.includes(:items)
.where(status: :shipped)
.order(ship_date: :desc)
.limit(10)
shipments.each do |s|
if s.same_content?(@shipments[0])
@batch = s.dup
@batch.ship_date = Date.today
break
end
end
end
def scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def verify_scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
scan_list = params["upc_list"].split("\n").map { |x| x.chomp! }
@scans = {}
scan_list.each do |scan|
@scans[scan] = 0 if @scans[scan].nil?
@scans[scan] += 1
end
render 'scan'
end
def create_inventory_transaction
@shipment = Shipment.includes(:items, [items: :product]).find(params[:id])
begin
tran = @shipment.new_inventory_transaction
tran.shipment_id = @shipment.id
tran.save!
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
private
def shipment_params
params.require(:shipment).permit!
end
end
|
class Api::V1::TaxonConceptsController < Api::V1::BaseController
after_action only: [:index] { set_pagination_headers(:taxon_concepts) }
before_action :validate_params, only: [:index]
resource_description do
formats ['JSON', 'XML']
api_base_url 'api/v1/taxon_concepts'
name 'Taxon Concepts'
end
api :GET, '/', 'Lists taxon concepts'
description <<-EOS
The following taxon concept fields are returned:
[id] unique identifier of a taxon concept
[full_name] scientific name [max 255 characters]
[author_year] author and year (parentheses where applicable) [max 255 characters]
[rank] one of +KINGDOM+, +PHYLUM+, +CLASS+, +ORDER+, +FAMILY+, +SUBFAMILY+, +GENUS+, +SPECIES+, +SUBSPECIES+, +VARIETY+ [max 255 characters]
[name_status] +A+ for accepted names, +S+ for synonyms (both types of names are taxon concepts in Species+) [max 255 characters]
[updated_at] timestamp of last update to the taxon concept in Species+
[active] if false, taxon concept has been deleted
[synonyms] list of synonyms (only for accepted names, i.e. name_status == A) [full_name, author_year and rank follow the same length constraints as respective properties of the main taxon concept]
[higher_taxa] object that gives scientific names of ancestors in the taxonomic tree (only for accepted names) [higher taxa names follow the same length constraint as full_name of the main taxon concept]
[common_names] list of common names (with language given by ISO 639-1 code; only for accepted names) [name, language max 255 characters]
[cites_listing] value of current CITES listing (as per CITES Checklist). When taxon concept is removed from appendices this becomes +NC+. When taxon is split listed it becomes a concatenation of appendix symbols, e.g. +I/II/NC+ (only for accepted names) [max 255 characters]
[cites_listings] list of current CITES listings with annotations (there will be more than one element in this list in case of split listings; only for accepted names) [appendix max 255 characters; annotation, hash_annotation unlimited length]
[accepted_names] list of accepted names (only for synonyms, i.e. name_status == S) [full_name, author_year and rank follow the same length constraints as respective properties of the main taxon concept]
==== Note on deleted taxon concepts
In the event of removal from CITES appendices, a taxon is not deleted from the Species+ DB. As a historically listed taxon it remains in the database, and in most cases the value of current CITES listing becomes 'NC'. In some cases the value is affected by listed subspecies, e.g. <i>Pseudomys fieldi</i> has been removed from Appendix I, but since there is a listed subspecies, the current listing is 'I/NC'.
Taxon concepts are deleted from the Species+ database only on rare occasions when they have been entered by mistake. API consumers can detect this event by checking the value of the 'active' flag, which is set to false in case of deleted taxa.
==== Note on +updated_since+
The +updated_since+ parameter is intended to be used in order to allow API clients to only fetch taxon concepts updated since the last run of the synchronisation job. Those taxa will include:
- Taxa that have been newly added. This can be verified by checking against the list of existing identifiers.
- Taxa that have been deleted. This can be inferred by the value of the active flag.
- Taxa that have been updated, and that includes changes to the taxon concept record as well as linked records (names, distribution, references, CITES or EU legislation). There is no way to infer which piece of information was changed and in such cases a full re-synchronisation of all infomation on that taxon concept is recommended.
==== Note on pagination
Where more than 500 taxon concepts are returned, the request is paginated, showing 500 objects (or less by passing in an optional 'per_page' parameter) at a time. To fetch the remaining objects, you will need to make a new request and pass the optional ‘page’ parameter as below:
http://api.speciesplus.net/api/v1/taxon_concepts?page=2&per_page=25
Information about the remaining pages is provided in the Link header of the API response. For example, making the above request for page two, with a limit of 25 objects per page would return the following in the link header along with a total-count header:
Link: <http://api.speciesplus.net/api/v1/taxon_concepts?page=3&per_page=25>; rel="next", <http://api.speciesplus.net/api/v1/taxon_concepts?page=2570&per_page=25>; rel="last"
Total-Count: 64230
If there are additional pages, the link header will contain the URL for the next page of results, followed by the URL for the last page of results. The Total-Count header shows the total number of objects returned for this call, regardless of pagination.
For convenience, a 'pagination' meta object is also included in the body of the response.
EOS
param :page, String, desc: 'Page number for paginated responses', required: false
param :per_page, String, desc: 'Limit for how many objects returned per page for paginated responses. If not specificed it will default to the maximum value of 500', required: false
param :updated_since, String, desc: 'Pull only objects updated after (and including) the specified timestamp in ISO8601 format (UTC time).', required: false
param :name, String, desc: 'Filter taxon concepts by name', required: false
param :with_descendants, String, desc: 'Broadens the above search by name to include higher taxa. Value must be true or false', required: false
param :taxonomy, String, desc: 'Filter taxon concepts by taxonomy, accepts either CITES or CMS as its value. Defaults to CITES if no value is specified', required: false
param :language, String, desc: 'Filter languages returned for common names. Value should be a single country code or a comma separated string of country codes (e.g. language=EN,PL,IT). Defaults to showing all available languages if no language parameter is specified', required: false
example <<-EOS
{
"pagination":{
"current_page":1,
"per_page":500,
"total_entries":1
},
"taxon_concepts":[
{
"id":4521,
"full_name":"Loxodonta africana",
"author_year":"(Blumenbach, 1797)",
"rank":"SPECIES",
"name_status":"A",
"updated_at":"2014-12-11T15:39:51.620Z",
"active":true,
"cites_listing":"I/II",
"higher_taxa":{
"kingdom":"Animalia",
"phylum":"Chordata",
"class":"Mammalia",
"order":"Proboscidea",
"family":"Elephantidae"
},
"synonyms":[
{
"id":37069,
"full_name":"Loxodonta cyclotis",
"author_year":"(Matschie, 1900)",
"rank":"SPECIES"
}
],
"common_names":[
{
"name":"African Elephant",
"language":"EN"
},
{
"name":"African Savannah Elephant",
"language":"EN"
},
{
"name":"Eléphant d'Afrique",
"language":"FR"
},
{
"name":"Eléphant africain",
"language":"FR"
}
],
"cites_listings":[
{
"appendix":"II",
"annotation":"The populations of Botswana, Namibia, South Africa and Zimbabwe are listed in Appendix II for the exclusive purpose of allowing: [...]",
"hash_annotation":null
},
{
"appendix":"I",
"annotation":"Included in Appendix I, except the populations of Botswana, Namibia, South Africa and Zimbabwe, which are included in Appendix II.",
"hash_annotation":null
}
]
}
]
}
EOS
example <<-EOS
<?xml version="1.0" encoding="UTF-8"?>
<hash>
<pagination>
<current-page type="integer">1</current-page>
<per-page type="integer">500</per-page>
<total-entries type="integer">1</total-entries>
</pagination>
<taxon-concepts type="array">
<taxon-concept>
<id type="integer">4521</id>
<full-name>Loxodonta africana</full-name>
<author-year>(Blumenbach, 1797)</author-year>
<rank>SPECIES</rank>
<name-status>A</name-status>
<updated-at type="dateTime">2014-12-11T15:39:51Z</updated-at>
<active type="boolean">true</active>
<cites-listing>I/II</cites-listing>
<higher-taxa>
<kingdom>Animalia</kingdom>
<phylum>Chordata</phylum>
<class>Mammalia</class>
<order>Proboscidea</order>
<family>Elephantidae</family>
</higher-taxa>
<synonyms type="array">
<synonym>
<id type="integer">37069</id>
<full-name>Loxodonta cyclotis</full-name>
<author-year>(Matschie, 1900)</author-year>
<rank>SPECIES</rank>
</synonym>
</synonyms>
<common-names type="array">
<common-name>
<name>African Elephant</name>
<language>EN</language>
</common-name>s
<common-name>
<name>African Savannah Elephant</name>
<language>EN</language>
</common-name>
<common-name>
<name>Eléphant d'Afrique</name>
<language>FR</language>
</common-name>
<common-name>
<name>Eléphant africain</name>
<language>FR</language>
</common-name>
</common-names>
<cites-listings type="array">
<cites-listing>
<appendix>II</appendix>
<annotation>The populations of Botswana, Namibia, South Africa and Zimbabwe are listed in Appendix II for the exclusive purpose of allowing: [...]</annotation>
<hash-annotation nil="true"/>
</cites-listing>
<cites-listing>
<appendix>I</appendix>
<annotation>Included in Appendix I, except the populations of Botswana, Namibia, South Africa and Zimbabwe, which are included in Appendix II.</annotation>
<hash-annotation nil="true"/>
</cites-listing>
</cites-listings>
</taxon-concept>
</taxon-concepts>
</hash>
EOS
error code: 400, desc: "Bad Request"
error code: 401, desc: "Unauthorized"
error code: 404, desc: "Not Found"
error code: 422, desc: "Unprocessable Entity"
error code: 500, desc: "Internal Server Error"
def index
taxon_per_page = TaxonConcept.per_page
new_per_page = params[:per_page] && params[:per_page].to_i < taxon_per_page ? params[:per_page] : taxon_per_page
@taxon_concepts = TaxonConcept.
select([
:id, :full_name, :author_year, :name_status, :rank, :cites_listing,
:higher_taxa, :synonyms, :accepted_names, :updated_at, :active
]).
paginate(
page: params[:page],
per_page: new_per_page
).order(:taxonomic_position)
if params[:with_descendants] == "true" && params[:name]
@taxon_concepts = @taxon_concepts.where("lower(full_name) = :name
OR lower(genus_name) = :name
OR lower(family_name) = :name
OR lower(order_name) = :name
OR lower(class_name) = :name
OR lower(phylum_name) = :name
OR lower(kingdom_name) = :name
", name: params[:name].downcase)
elsif params[:name]
@taxon_concepts = @taxon_concepts.where("lower(full_name) = ?", params[:name].downcase)
end
if params[:updated_since]
@taxon_concepts = @taxon_concepts.where("updated_at >= ?", params[:updated_since])
end
taxonomy_is_cites_eu = if params[:taxonomy] && params[:taxonomy].downcase == 'cms'
false
else
true
end
@taxon_concepts = @taxon_concepts.where(taxonomy_is_cites_eu: taxonomy_is_cites_eu)
render 'api/v1/taxon_concepts/index'
end
private
#overrides method from parent controller
def set_language
language = params[:language] ? params[:language].try(:downcase).split(',').first.delete(' ') ||
'en' : 'en'
I18n.locale = if ['en', 'es', 'fr'].include?(language)
language
else
I18n.default_locale
end
@languages = params[:language].delete(' ').split(',').map! { |lang| lang.upcase } unless params[:language].nil?
end
def permitted_params
[
:page, :per_page, :updated_since, :name,
:with_descendants, :taxonomy, :language, :format
]
end
def validate_params
super()
[
:updated_since,
:page,
:per_page,
:with_descendants
].each do |param|
unless send(:"validate_#{param}_format")
track_api_error("Invalid parameter format: #{param}", 400) and return
end
end
if (params[:taxonomy].present? && !(/^(cms|cites)$/.match(params[:taxonomy].downcase)))
track_api_error("Unknown taxonomy: #{params[:taxonomy]}", 422) and return
end
if (params[:with_descendants] == 'true' && params[:name].blank?)
track_api_error("Invalid use of with_descendants", 422) and return
end
end
def validate_updated_since_format
return true unless params[:updated_since]
y, m, d = params[:updated_since].split('-')
Date.valid_date? y.to_i, m.to_i, d.to_i
end
def validate_page_format
return true unless params[:page]
/\A\d+\Z/.match(params[:page])
end
def validate_per_page_format
return true unless params[:per_page]
/\A\d+\Z/.match(params[:per_page])
end
def validate_with_descendants_format
return true unless params[:with_descendants]
/^(true|false)$/.match(params[:with_descendants])
end
end
updated documentation of higher taxa
class Api::V1::TaxonConceptsController < Api::V1::BaseController
after_action only: [:index] { set_pagination_headers(:taxon_concepts) }
before_action :validate_params, only: [:index]
resource_description do
formats ['JSON', 'XML']
api_base_url 'api/v1/taxon_concepts'
name 'Taxon Concepts'
end
api :GET, '/', 'Lists taxon concepts'
description <<-EOS
The following taxon concept fields are returned:
[id] unique identifier of a taxon concept
[full_name] scientific name [max 255 characters]
[author_year] author and year (parentheses where applicable) [max 255 characters]
[rank] one of +KINGDOM+, +PHYLUM+, +CLASS+, +ORDER+, +FAMILY+, +SUBFAMILY+, +GENUS+, +SPECIES+, +SUBSPECIES+, +VARIETY+ [max 255 characters]
[name_status] +A+ for accepted names, +S+ for synonyms (both types of names are taxon concepts in Species+) [max 255 characters]
[updated_at] timestamp of last update to the taxon concept in Species+
[active] if false, taxon concept has been deleted
[synonyms] list of synonyms (only for accepted names, i.e. name_status == A) [full_name, author_year and rank follow the same length constraints as respective properties of the main taxon concept]
[higher_taxa] object that gives scientific names of ancestors in the taxonomic tree (only for active accepted names) [higher taxa names follow the same length constraint as full_name of the main taxon concept]
[common_names] list of common names (with language given by ISO 639-1 code; only for accepted names) [name, language max 255 characters]
[cites_listing] value of current CITES listing (as per CITES Checklist). When taxon concept is removed from appendices this becomes +NC+. When taxon is split listed it becomes a concatenation of appendix symbols, e.g. +I/II/NC+ (only for accepted names) [max 255 characters]
[cites_listings] list of current CITES listings with annotations (there will be more than one element in this list in case of split listings; only for accepted names) [appendix max 255 characters; annotation, hash_annotation unlimited length]
[accepted_names] list of accepted names (only for synonyms, i.e. name_status == S) [full_name, author_year and rank follow the same length constraints as respective properties of the main taxon concept]
==== Note on deleted taxon concepts
In the event of removal from CITES appendices, a taxon is not deleted from the Species+ DB. As a historically listed taxon it remains in the database, and in most cases the value of current CITES listing becomes 'NC'. In some cases the value is affected by listed subspecies, e.g. <i>Pseudomys fieldi</i> has been removed from Appendix I, but since there is a listed subspecies, the current listing is 'I/NC'.
Taxon concepts are deleted from the Species+ database only on rare occasions when they have been entered by mistake. API consumers can detect this event by checking the value of the 'active' flag, which is set to false in case of deleted taxa.
==== Note on +updated_since+
The +updated_since+ parameter is intended to be used in order to allow API clients to only fetch taxon concepts updated since the last run of the synchronisation job. Those taxa will include:
- Taxa that have been newly added. This can be verified by checking against the list of existing identifiers.
- Taxa that have been deleted. This can be inferred by the value of the active flag.
- Taxa that have been updated, and that includes changes to the taxon concept record as well as linked records (names, distribution, references, CITES or EU legislation). There is no way to infer which piece of information was changed and in such cases a full re-synchronisation of all infomation on that taxon concept is recommended.
==== Note on pagination
Where more than 500 taxon concepts are returned, the request is paginated, showing 500 objects (or less by passing in an optional 'per_page' parameter) at a time. To fetch the remaining objects, you will need to make a new request and pass the optional ‘page’ parameter as below:
http://api.speciesplus.net/api/v1/taxon_concepts?page=2&per_page=25
Information about the remaining pages is provided in the Link header of the API response. For example, making the above request for page two, with a limit of 25 objects per page would return the following in the link header along with a total-count header:
Link: <http://api.speciesplus.net/api/v1/taxon_concepts?page=3&per_page=25>; rel="next", <http://api.speciesplus.net/api/v1/taxon_concepts?page=2570&per_page=25>; rel="last"
Total-Count: 64230
If there are additional pages, the link header will contain the URL for the next page of results, followed by the URL for the last page of results. The Total-Count header shows the total number of objects returned for this call, regardless of pagination.
For convenience, a 'pagination' meta object is also included in the body of the response.
EOS
param :page, String, desc: 'Page number for paginated responses', required: false
param :per_page, String, desc: 'Limit for how many objects returned per page for paginated responses. If not specificed it will default to the maximum value of 500', required: false
param :updated_since, String, desc: 'Pull only objects updated after (and including) the specified timestamp in ISO8601 format (UTC time).', required: false
param :name, String, desc: 'Filter taxon concepts by name', required: false
param :with_descendants, String, desc: 'Broadens the above search by name to include higher taxa. Value must be true or false', required: false
param :taxonomy, String, desc: 'Filter taxon concepts by taxonomy, accepts either CITES or CMS as its value. Defaults to CITES if no value is specified', required: false
param :language, String, desc: 'Filter languages returned for common names. Value should be a single country code or a comma separated string of country codes (e.g. language=EN,PL,IT). Defaults to showing all available languages if no language parameter is specified', required: false
example <<-EOS
{
"pagination":{
"current_page":1,
"per_page":500,
"total_entries":1
},
"taxon_concepts":[
{
"id":4521,
"full_name":"Loxodonta africana",
"author_year":"(Blumenbach, 1797)",
"rank":"SPECIES",
"name_status":"A",
"updated_at":"2014-12-11T15:39:51.620Z",
"active":true,
"cites_listing":"I/II",
"higher_taxa":{
"kingdom":"Animalia",
"phylum":"Chordata",
"class":"Mammalia",
"order":"Proboscidea",
"family":"Elephantidae"
},
"synonyms":[
{
"id":37069,
"full_name":"Loxodonta cyclotis",
"author_year":"(Matschie, 1900)",
"rank":"SPECIES"
}
],
"common_names":[
{
"name":"African Elephant",
"language":"EN"
},
{
"name":"African Savannah Elephant",
"language":"EN"
},
{
"name":"Eléphant d'Afrique",
"language":"FR"
},
{
"name":"Eléphant africain",
"language":"FR"
}
],
"cites_listings":[
{
"appendix":"II",
"annotation":"The populations of Botswana, Namibia, South Africa and Zimbabwe are listed in Appendix II for the exclusive purpose of allowing: [...]",
"hash_annotation":null
},
{
"appendix":"I",
"annotation":"Included in Appendix I, except the populations of Botswana, Namibia, South Africa and Zimbabwe, which are included in Appendix II.",
"hash_annotation":null
}
]
}
]
}
EOS
example <<-EOS
<?xml version="1.0" encoding="UTF-8"?>
<hash>
<pagination>
<current-page type="integer">1</current-page>
<per-page type="integer">500</per-page>
<total-entries type="integer">1</total-entries>
</pagination>
<taxon-concepts type="array">
<taxon-concept>
<id type="integer">4521</id>
<full-name>Loxodonta africana</full-name>
<author-year>(Blumenbach, 1797)</author-year>
<rank>SPECIES</rank>
<name-status>A</name-status>
<updated-at type="dateTime">2014-12-11T15:39:51Z</updated-at>
<active type="boolean">true</active>
<cites-listing>I/II</cites-listing>
<higher-taxa>
<kingdom>Animalia</kingdom>
<phylum>Chordata</phylum>
<class>Mammalia</class>
<order>Proboscidea</order>
<family>Elephantidae</family>
</higher-taxa>
<synonyms type="array">
<synonym>
<id type="integer">37069</id>
<full-name>Loxodonta cyclotis</full-name>
<author-year>(Matschie, 1900)</author-year>
<rank>SPECIES</rank>
</synonym>
</synonyms>
<common-names type="array">
<common-name>
<name>African Elephant</name>
<language>EN</language>
</common-name>s
<common-name>
<name>African Savannah Elephant</name>
<language>EN</language>
</common-name>
<common-name>
<name>Eléphant d'Afrique</name>
<language>FR</language>
</common-name>
<common-name>
<name>Eléphant africain</name>
<language>FR</language>
</common-name>
</common-names>
<cites-listings type="array">
<cites-listing>
<appendix>II</appendix>
<annotation>The populations of Botswana, Namibia, South Africa and Zimbabwe are listed in Appendix II for the exclusive purpose of allowing: [...]</annotation>
<hash-annotation nil="true"/>
</cites-listing>
<cites-listing>
<appendix>I</appendix>
<annotation>Included in Appendix I, except the populations of Botswana, Namibia, South Africa and Zimbabwe, which are included in Appendix II.</annotation>
<hash-annotation nil="true"/>
</cites-listing>
</cites-listings>
</taxon-concept>
</taxon-concepts>
</hash>
EOS
error code: 400, desc: "Bad Request"
error code: 401, desc: "Unauthorized"
error code: 404, desc: "Not Found"
error code: 422, desc: "Unprocessable Entity"
error code: 500, desc: "Internal Server Error"
def index
taxon_per_page = TaxonConcept.per_page
new_per_page = params[:per_page] && params[:per_page].to_i < taxon_per_page ? params[:per_page] : taxon_per_page
@taxon_concepts = TaxonConcept.
select([
:id, :full_name, :author_year, :name_status, :rank, :cites_listing,
:higher_taxa, :synonyms, :accepted_names, :updated_at, :active
]).
paginate(
page: params[:page],
per_page: new_per_page
).order(:taxonomic_position)
if params[:with_descendants] == "true" && params[:name]
@taxon_concepts = @taxon_concepts.where("lower(full_name) = :name
OR lower(genus_name) = :name
OR lower(family_name) = :name
OR lower(order_name) = :name
OR lower(class_name) = :name
OR lower(phylum_name) = :name
OR lower(kingdom_name) = :name
", name: params[:name].downcase)
elsif params[:name]
@taxon_concepts = @taxon_concepts.where("lower(full_name) = ?", params[:name].downcase)
end
if params[:updated_since]
@taxon_concepts = @taxon_concepts.where("updated_at >= ?", params[:updated_since])
end
taxonomy_is_cites_eu = if params[:taxonomy] && params[:taxonomy].downcase == 'cms'
false
else
true
end
@taxon_concepts = @taxon_concepts.where(taxonomy_is_cites_eu: taxonomy_is_cites_eu)
render 'api/v1/taxon_concepts/index'
end
private
#overrides method from parent controller
def set_language
language = params[:language] ? params[:language].try(:downcase).split(',').first.delete(' ') ||
'en' : 'en'
I18n.locale = if ['en', 'es', 'fr'].include?(language)
language
else
I18n.default_locale
end
@languages = params[:language].delete(' ').split(',').map! { |lang| lang.upcase } unless params[:language].nil?
end
def permitted_params
[
:page, :per_page, :updated_since, :name,
:with_descendants, :taxonomy, :language, :format
]
end
def validate_params
super()
[
:updated_since,
:page,
:per_page,
:with_descendants
].each do |param|
unless send(:"validate_#{param}_format")
track_api_error("Invalid parameter format: #{param}", 400) and return
end
end
if (params[:taxonomy].present? && !(/^(cms|cites)$/.match(params[:taxonomy].downcase)))
track_api_error("Unknown taxonomy: #{params[:taxonomy]}", 422) and return
end
if (params[:with_descendants] == 'true' && params[:name].blank?)
track_api_error("Invalid use of with_descendants", 422) and return
end
end
def validate_updated_since_format
return true unless params[:updated_since]
y, m, d = params[:updated_since].split('-')
Date.valid_date? y.to_i, m.to_i, d.to_i
end
def validate_page_format
return true unless params[:page]
/\A\d+\Z/.match(params[:page])
end
def validate_per_page_format
return true unless params[:per_page]
/\A\d+\Z/.match(params[:per_page])
end
def validate_with_descendants_format
return true unless params[:with_descendants]
/^(true|false)$/.match(params[:with_descendants])
end
end
|
class Carto::Api::PermissionsController < ::Api::ApplicationController
ssl_required :update
def update
permission = Carto::Permission.where(id: params[:id]).first
return head(404) if permission.nil?
return head(401) unless permission.is_owner?(current_user)
begin
acl = params[:acl]
acl ||= []
permission.acl = acl.map(&:deep_symbolize_keys)
rescue CartoDB::PermissionError => e
CartoDB::Logger.error(exception: e)
return head(400)
end
permission.save
render json: Carto::Api::PermissionPresenter.new(permission,
current_viewer: current_viewer, fetch_user_groups: true).to_poro
end
end
Better saving of permissions
class Carto::Api::PermissionsController < ::Api::ApplicationController
include Carto::ControllerHelper
extend Carto::DefaultRescueFroms
ssl_required :update
def update
permission = Carto::Permission.where(id: params[:id]).first
return head(404) if permission.nil?
return head(401) unless permission.is_owner?(current_user)
begin
acl = params[:acl]
acl ||= []
permission.acl = acl.map(&:deep_symbolize_keys)
rescue CartoDB::PermissionError => e
CartoDB::Logger.error(exception: e)
return head(400)
end
permission.save!
render json: Carto::Api::PermissionPresenter.new(permission,
current_viewer: current_viewer, fetch_user_groups: true).to_poro
end
end
|
# This controller mostly will be used in development mode.
# In the production mode, serving files from GridFS should
# be done through Nginx GridFS module.
require "mongo"
module ControlCenter
class GridfsController < ActionController::Metal
def serve
gridfs_path = env["PATH_INFO"].gsub("/gridfs/", "")
begin
gridfs_file = Mongo::GridFileSystem.new(Mongoid.database).open(gridfs_path, 'r')
self.response_body = gridfs_file.read
self.content_type = gridfs_file.content_type
rescue
self.status = :not_found
self.content_type = 'text/plain'
self.response_body = ''
end
end
end
end
gridfs_controller no longer needed (replaced by rack-gridfs).
|
class Legislation::ProcessesController < Legislation::BaseController
has_filters %w{open next past}, only: :index
load_and_authorize_resource :process
def index
@current_filter ||= 'open'
@processes = ::Legislation::Process.send(@current_filter).published.page(params[:page])
end
def show
draft_version = @process.draft_versions.published.last
if @process.allegations_phase.enabled? && @process.allegations_phase.started? && draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
elsif @process.debate_phase.enabled?
redirect_to debate_legislation_process_path(@process)
elsif @process.proposals_phase.enabled?
redirect_to proposals_legislation_process_path(@process)
else
redirect_to allegations_legislation_process_path(@process)
end
end
def debate
set_process
@phase = :debate_phase
if @process.debate_phase.started?
render :debate
else
render :phase_not_open
end
end
def draft_publication
set_process
@phase = :draft_publication
if @process.draft_publication.started?
draft_version = @process.draft_versions.published.last
if draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def allegations
set_process
@phase = :allegations_phase
if @process.allegations_phase.started?
draft_version = @process.draft_versions.published.last
if draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def result_publication
set_process
@phase = :result_publication
if @process.result_publication.started?
final_version = @process.final_draft_version
if final_version.present?
redirect_to legislation_process_draft_version_path(@process, final_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def proposals
set_process
@phase = :proposals_phase
if @process.proposals_phase.started?
set_legislation_proposal_votes(@process.proposals)
render :proposals
else
render :phase_not_open
end
end
private
def member_method?
params[:id].present?
end
def set_process
return if member_method?
@process = ::Legislation::Process.find(params[:process_id])
end
end
Allow admins to see legislation debates and proposals index
class Legislation::ProcessesController < Legislation::BaseController
has_filters %w{open next past}, only: :index
load_and_authorize_resource :process
def index
@current_filter ||= 'open'
@processes = ::Legislation::Process.send(@current_filter).published.page(params[:page])
end
def show
draft_version = @process.draft_versions.published.last
if @process.allegations_phase.enabled? && @process.allegations_phase.started? && draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
elsif @process.debate_phase.enabled?
redirect_to debate_legislation_process_path(@process)
elsif @process.proposals_phase.enabled?
redirect_to proposals_legislation_process_path(@process)
else
redirect_to allegations_legislation_process_path(@process)
end
end
def debate
set_process
@phase = :debate_phase
if @process.debate_phase.started? || current_user.administrator?
render :debate
else
render :phase_not_open
end
end
def draft_publication
set_process
@phase = :draft_publication
if @process.draft_publication.started?
draft_version = @process.draft_versions.published.last
if draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def allegations
set_process
@phase = :allegations_phase
if @process.allegations_phase.started?
draft_version = @process.draft_versions.published.last
if draft_version.present?
redirect_to legislation_process_draft_version_path(@process, draft_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def result_publication
set_process
@phase = :result_publication
if @process.result_publication.started?
final_version = @process.final_draft_version
if final_version.present?
redirect_to legislation_process_draft_version_path(@process, final_version)
else
render :phase_empty
end
else
render :phase_not_open
end
end
def proposals
set_process
@phase = :proposals_phase
if @process.proposals_phase.started? || current_user.administrator?
set_legislation_proposal_votes(@process.proposals)
render :proposals
else
render :phase_not_open
end
end
private
def member_method?
params[:id].present?
end
def set_process
return if member_method?
@process = ::Legislation::Process.find(params[:process_id])
end
end
|
class MetricConfigurationsController < BaseMetricConfigurationsController
def choose_metric
@kalibro_configuration = KalibroConfiguration.find(params[:kalibro_configuration_id].to_i)
@metric_configuration_id = params[:metric_configuration_id].to_i
@metric_collectors_names = KalibroClient::Entities::Processor::MetricCollectorDetails.all_names
end
def new
super
# FIXME: find_by_name throws an exception instead of returning nil, unlike ActiveRecord's API
metric_configuration.metric = KalibroClient::Entities::Processor::MetricCollectorDetails.find_by_name(params[:metric_collector_name]).find_metric_by_code params[:metric_code]
@reading_groups = ReadingGroup.public_or_owned_by_user(current_user).map { |reading_group|
[reading_group.name, reading_group.id]
}
end
def create
super
@metric_configuration.metric = KalibroClient::Entities::Processor::MetricCollectorDetails.find_by_name(params[:metric_collector_name]).find_metric_by_name params[:metric_name]
respond_to do |format|
create_and_redir(format)
end
update_caches
end
def edit
# FIXME: set the configuration id just once!
@kalibro_configuration_id = params[:kalibro_configuration_id]
@metric_configuration.kalibro_configuration_id = @kalibro_configuration_id
@reading_groups = ReadingGroup.public_or_owned_by_user(current_user).map { |reading_group|
[reading_group.name, reading_group.id]
}
end
def update
respond_to do |format|
@metric_configuration.kalibro_configuration_id = params[:kalibro_configuration_id]
if @metric_configuration.update(metric_configuration_params)
format.html { redirect_to(kalibro_configuration_path(@metric_configuration.kalibro_configuration_id), notice: t('successfully_updated', :record => t(metric_configuration.class))) }
format.json { head :no_content }
update_caches
else
failed_action(format, 'edit')
end
end
end
def destroy
@metric_configuration.destroy
respond_to do |format|
format.html { redirect_to kalibro_configuration_path(params[:kalibro_configuration_id]) }
format.json { head :no_content }
end
update_caches
end
protected
def metric_configuration
@metric_configuration
end
def update_metric_configuration (new_metric_configuration)
@kalibro_configuration_id = params[:kalibro_configuration_id]
@metric_configuration = new_metric_configuration
end
private
def update_caches
Rails.cache.delete("#{params[:kalibro_configuration_id]}_tree_metric_configurations")
Rails.cache.delete("#{params[:kalibro_configuration_id]}_hotspot_metric_configurations")
end
# FIXME: Duplicated code on create and update actions extracted here
def failed_action(format, destiny_action)
@kalibro_configuration_id = params[:kalibro_configuration_id]
format.html { render action: destiny_action }
format.json { render json: @metric_configuration.kalibro_errors, status: :unprocessable_entity }
end
# Code extracted from create action
def create_and_redir(format)
if @metric_configuration.save
format.html { redirect_to kalibro_configuration_path(@metric_configuration.kalibro_configuration_id), notice: t('successfully_created', :record => t(metric_configuration.class)) }
else
failed_action(format, 'new')
end
end
end
Rename update_caches to clear_caches for better semantics
Signed off by: Eduardo Araújo <duduktamg@hotmail.com>
class MetricConfigurationsController < BaseMetricConfigurationsController
def choose_metric
@kalibro_configuration = KalibroConfiguration.find(params[:kalibro_configuration_id].to_i)
@metric_configuration_id = params[:metric_configuration_id].to_i
@metric_collectors_names = KalibroClient::Entities::Processor::MetricCollectorDetails.all_names
end
def new
super
# FIXME: find_by_name throws an exception instead of returning nil, unlike ActiveRecord's API
metric_configuration.metric = KalibroClient::Entities::Processor::MetricCollectorDetails.find_by_name(params[:metric_collector_name]).find_metric_by_code params[:metric_code]
@reading_groups = ReadingGroup.public_or_owned_by_user(current_user).map { |reading_group|
[reading_group.name, reading_group.id]
}
end
def create
super
@metric_configuration.metric = KalibroClient::Entities::Processor::MetricCollectorDetails.find_by_name(params[:metric_collector_name]).find_metric_by_name params[:metric_name]
respond_to do |format|
create_and_redir(format)
end
clear_caches
end
def edit
# FIXME: set the configuration id just once!
@kalibro_configuration_id = params[:kalibro_configuration_id]
@metric_configuration.kalibro_configuration_id = @kalibro_configuration_id
@reading_groups = ReadingGroup.public_or_owned_by_user(current_user).map { |reading_group|
[reading_group.name, reading_group.id]
}
end
def update
respond_to do |format|
@metric_configuration.kalibro_configuration_id = params[:kalibro_configuration_id]
if @metric_configuration.update(metric_configuration_params)
format.html { redirect_to(kalibro_configuration_path(@metric_configuration.kalibro_configuration_id), notice: t('successfully_updated', :record => t(metric_configuration.class))) }
format.json { head :no_content }
clear_caches
else
failed_action(format, 'edit')
end
end
end
def destroy
@metric_configuration.destroy
respond_to do |format|
format.html { redirect_to kalibro_configuration_path(params[:kalibro_configuration_id]) }
format.json { head :no_content }
end
clear_caches
end
protected
def metric_configuration
@metric_configuration
end
def update_metric_configuration (new_metric_configuration)
@kalibro_configuration_id = params[:kalibro_configuration_id]
@metric_configuration = new_metric_configuration
end
private
def clear_caches
Rails.cache.delete("#{params[:kalibro_configuration_id]}_tree_metric_configurations")
Rails.cache.delete("#{params[:kalibro_configuration_id]}_hotspot_metric_configurations")
end
# FIXME: Duplicated code on create and update actions extracted here
def failed_action(format, destiny_action)
@kalibro_configuration_id = params[:kalibro_configuration_id]
format.html { render action: destiny_action }
format.json { render json: @metric_configuration.kalibro_errors, status: :unprocessable_entity }
end
# Code extracted from create action
def create_and_redir(format)
if @metric_configuration.save
format.html { redirect_to kalibro_configuration_path(@metric_configuration.kalibro_configuration_id), notice: t('successfully_created', :record => t(metric_configuration.class)) }
else
failed_action(format, 'new')
end
end
end
|
class ResourceImportFilesController < ApplicationController
before_action :set_resource_import_file, only: [:show, :edit, :update, :destroy]
before_action :check_policy, only: [:index, :new, :create]
before_action :prepare_options, only: [:new, :edit]
# GET /resource_import_files
# GET /resource_import_files.json
def index
@resource_import_files = ResourceImportFile.order(created_at: :desc).page(params[:page])
respond_to do |format|
format.html # index.html.erb
format.json { render json: @resource_import_files }
end
end
# GET /resource_import_files/1
# GET /resource_import_files/1.json
def show
respond_to do |format|
format.html # show.html.erb
format.json { render json: @resource_import_file }
format.download {
send_data @resource_import_file.resource_import.download, fileename: @resource_import_file.resource_import.filename.to_s, type: 'application/octet-stream'
}
end
end
# GET /resource_import_files/new
# GET /resource_import_files/new.json
def new
@resource_import_file = ResourceImportFile.new
@resource_import_file.library_id = current_user.profile.library_id
respond_to do |format|
format.html # new.html.erb
format.json { render json: @resource_import_file }
end
end
# GET /resource_import_files/1/edit
def edit
end
# POST /resource_import_files
# POST /resource_import_files.json
def create
@resource_import_file = ResourceImportFile.new(resource_import_file_params)
@resource_import_file.user = current_user
respond_to do |format|
if @resource_import_file.save
if @resource_import_file.mode == 'import'
ResourceImportFileJob.perform_later(@resource_import_file)
end
format.html { redirect_to @resource_import_file, notice: t('import.successfully_created', model: t('activerecord.models.resource_import_file')) }
format.json { render json: @resource_import_file, status: :created, location: @resource_import_file }
else
prepare_options
format.html { render action: "new" }
format.json { render json: @resource_import_file.errors, status: :unprocessable_entity }
end
end
end
# PUT /resource_import_files/1
# PUT /resource_import_files/1.json
def update
respond_to do |format|
if @resource_import_file.update(resource_import_file_params)
if @resource_import_file.mode == 'import'
ResourceImportFileJob.perform_later(@resource_import_file)
end
format.html { redirect_to @resource_import_file, notice: t('controller.successfully_updated', model: t('activerecord.models.resource_import_file')) }
format.json { head :no_content }
else
prepare_options
format.html { render action: "edit" }
format.json { render json: @resource_import_file.errors, status: :unprocessable_entity }
end
end
end
# DELETE /resource_import_files/1
# DELETE /resource_import_files/1.json
def destroy
@resource_import_file.destroy
respond_to do |format|
format.html { redirect_to resource_import_files_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.resource_import_file')) }
format.json { head :no_content }
end
end
private
def set_resource_import_file
@resource_import_file = ResourceImportFile.find(params[:id])
authorize @resource_import_file
end
def check_policy
authorize ResourceImportFile
end
def resource_import_file_params
params.require(:resource_import_file).permit(
:resource_import, :edit_mode, :user_encoding, :mode,
:default_shelf_id, :library_id
)
end
def prepare_options
@libraries = Library.all
library = Library.where(id: @resource_import_file.try(:library_id)).first
if library
@shelves = library.shelves
else
@shelves = current_user.profile.library.try(:shelves)
end
end
end
retrieve import results
class ResourceImportFilesController < ApplicationController
before_action :set_resource_import_file, only: [:show, :edit, :update, :destroy]
before_action :check_policy, only: [:index, :new, :create]
before_action :prepare_options, only: [:new, :edit]
# GET /resource_import_files
# GET /resource_import_files.json
def index
@resource_import_files = ResourceImportFile.order(created_at: :desc).page(params[:page])
respond_to do |format|
format.html # index.html.erb
format.json { render json: @resource_import_files }
end
end
# GET /resource_import_files/1
# GET /resource_import_files/1.json
def show
@resource_import_results = @resource_import_file.resource_import_results.page(params[:page])
respond_to do |format|
format.html # show.html.erb
format.json { render json: @resource_import_file }
format.download {
send_data @resource_import_file.resource_import.download, fileename: @resource_import_file.resource_import.filename.to_s, type: 'application/octet-stream'
}
end
end
# GET /resource_import_files/new
# GET /resource_import_files/new.json
def new
@resource_import_file = ResourceImportFile.new
@resource_import_file.library_id = current_user.profile.library_id
respond_to do |format|
format.html # new.html.erb
format.json { render json: @resource_import_file }
end
end
# GET /resource_import_files/1/edit
def edit
end
# POST /resource_import_files
# POST /resource_import_files.json
def create
@resource_import_file = ResourceImportFile.new(resource_import_file_params)
@resource_import_file.user = current_user
respond_to do |format|
if @resource_import_file.save
if @resource_import_file.mode == 'import'
ResourceImportFileJob.perform_later(@resource_import_file)
end
format.html { redirect_to @resource_import_file, notice: t('import.successfully_created', model: t('activerecord.models.resource_import_file')) }
format.json { render json: @resource_import_file, status: :created, location: @resource_import_file }
else
prepare_options
format.html { render action: "new" }
format.json { render json: @resource_import_file.errors, status: :unprocessable_entity }
end
end
end
# PUT /resource_import_files/1
# PUT /resource_import_files/1.json
def update
respond_to do |format|
if @resource_import_file.update(resource_import_file_params)
if @resource_import_file.mode == 'import'
ResourceImportFileJob.perform_later(@resource_import_file)
end
format.html { redirect_to @resource_import_file, notice: t('controller.successfully_updated', model: t('activerecord.models.resource_import_file')) }
format.json { head :no_content }
else
prepare_options
format.html { render action: "edit" }
format.json { render json: @resource_import_file.errors, status: :unprocessable_entity }
end
end
end
# DELETE /resource_import_files/1
# DELETE /resource_import_files/1.json
def destroy
@resource_import_file.destroy
respond_to do |format|
format.html { redirect_to resource_import_files_url, notice: t('controller.successfully_deleted', model: t('activerecord.models.resource_import_file')) }
format.json { head :no_content }
end
end
private
def set_resource_import_file
@resource_import_file = ResourceImportFile.find(params[:id])
authorize @resource_import_file
end
def check_policy
authorize ResourceImportFile
end
def resource_import_file_params
params.require(:resource_import_file).permit(
:resource_import, :edit_mode, :user_encoding, :mode,
:default_shelf_id, :library_id
)
end
def prepare_options
@libraries = Library.all
library = Library.where(id: @resource_import_file.try(:library_id)).first
if library
@shelves = library.shelves
else
@shelves = current_user.profile.library.try(:shelves)
end
end
end
|
class Sprangular::Store::BaseController < Spree::StoreController
rescue_from ActiveRecord::RecordNotFound, with: :not_found
respond_to :json
def invalid_resource!(resource)
@resource = resource
render "store/errors/invalid", status: 422
end
def unauthorized
render "store/errors/unauthorized", status: 401
end
def not_found
render "store/errors/not_found", status: 404
end
protected
def check_authorization
@user = current_spree_user
unauthorized unless @user
end
end
Added ControllerHelper::Order to BaseController
class Sprangular::Store::BaseController < Sprangular::ApplicationController
include Spree::Core::ControllerHelpers::Order
rescue_from ActiveRecord::RecordNotFound, with: :not_found
respond_to :json
def invalid_resource!(resource)
@resource = resource
render "store/errors/invalid", status: 422
end
def unauthorized
render "store/errors/unauthorized", status: 401
end
def not_found
render "store/errors/not_found", status: 404
end
protected
def check_authorization
@user = current_spree_user
unauthorized unless @user
end
end
|
User.class_eval do
attr_accessible :tag_list, :occupation, :description, :organization, :city, :country, :birthday, :website
delegate :description, :description=,
:organization, :organization=,
:city, :city=,
:country, :country=,
:website, :website=,
to: :profile
delegate_attributes :birthday, :birthday=,
:to => :profile
Occupation = [:select, :teacher, :scientist, :other]
scope :registered, lambda {
User.where("invited_by_id IS NULL or invitation_accepted_at is NOT NULL")
}
before_validation :fill_user_locale
if Vish::Application.config.cas
validates :password, :presence =>true, :confirmation =>true, length: { minimum: Devise.password_length.min, maximum: Devise.password_length.max }, :on=>:create
end
devise :omniauthable, omniauth_providers: %i[idm]
validate :user_locale
def user_locale
if !self.language.blank? and I18n.available_locales.include?(self.language.to_sym)
true
else
errors[:base] << "User without language"
end
end
belongs_to :private_student_group
has_one :private_teacher, class_name: "Actor", through: :private_student_group
has_and_belongs_to_many :courses
before_destroy :destroy_user_resources
def occupation_sym
if occupation
Occupation[occupation]
else
:select
end
end
def occupation_t
I18n.t "profile.occupation.options.#{occupation_sym}"
end
def description
profile.description
end
def has_permission(perm_key)
ServicePermission.where(:owner_id => actor_id, :key => perm_key).count > 0
end
def self.from_omniauth(auth)
#get user email
if auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"] && auth["extra"]["raw_info"]["eidas_profile"]["Email"]
email = auth["extra"]["raw_info"]["eidas_profile"]["Email"].downcase
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["email"]
email = auth["extra"]["raw_info"]["email"].downcase
else
email = auth["info"]["email"].downcase
end
user = find_by_email(email)
if user
return user
else
#EIDAS Case
#does not exist in BBDD, create it
u = User.new(provider: auth.provider, uid: auth.uid)
u.email = email
u.password = Devise.friendly_token[0,20]
u.provider = "idm"
if auth["info"] && auth["info"]["name"]
u.name = auth["info"]["name"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"] && auth["extra"]["raw_info"]["eidas_profile"]["FirstName"]
u.name = auth["extra"]["raw_info"]["eidas_profile"]["FirstName"] + " " + auth["extra"]["raw_info"]["eidas_profile"]["FamilyName"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["username"]
u.name = auth["extra"]["raw_info"]["username"]
else
u.name = auth["info"]["name"]
end
u.save!
if auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"]
#birthday
if auth["extra"]["raw_info"]["eidas_profile"]["DateOfBirth"]
u.birthday = Date.parse(auth["extra"]["raw_info"]["eidas_profile"]["DateOfBirth"])
end
#city
if auth["extra"]["raw_info"]["eidas_profile"]["PlaceOfBirth"]
u.city = auth["extra"]["raw_info"]["eidas_profile"]["PlaceOfBirth"]
end
#country
if auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"]
u.country = Eid4u.getCountry(auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"])
end
#language
if auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"] && auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"] == "ES"
u.language = "es"
else
u.language = "en"
end
#organization
if auth["extra"]["raw_info"]["eidas_profile"]["HomeInstitutionName"]
u.organization = auth["extra"]["raw_info"]["eidas_profile"]["HomeInstitutionName"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["organizations"]
u.organization = auth["extra"]["raw_info"]["organizations"].join(" ")
end
#tags
if auth["extra"]["raw_info"]["eidas_profile"]["FieldOfStudy"]
u.tag_list = Eid4u.getTagsFromIscedCode(auth["extra"]["raw_info"]["eidas_profile"]["FieldOfStudy"])
if u.tag_list.length > 0
u.tag_list = u.tag_list.split(" ")
else
u.tag_list = [ "ETSIT" ]
end
end
end
#binding.pry
u.save!
return u
end
end
private
def fill_user_locale
self.language = I18n.default_locale.to_s unless (!self.language.blank? and I18n.available_locales.include?(self.language.to_sym))
end
def destroy_user_resources
ActivityObject.authored_by(self).each do |ao|
object = ao.object
object.destroy unless object.nil?
end
ActivityObject.owned_by(self).each do |ao|
object = ao.object
object.destroy unless object.nil?
end
end
end
new tag if no tags
User.class_eval do
attr_accessible :tag_list, :occupation, :description, :organization, :city, :country, :birthday, :website
delegate :description, :description=,
:organization, :organization=,
:city, :city=,
:country, :country=,
:website, :website=,
to: :profile
delegate_attributes :birthday, :birthday=,
:to => :profile
Occupation = [:select, :teacher, :scientist, :other]
scope :registered, lambda {
User.where("invited_by_id IS NULL or invitation_accepted_at is NOT NULL")
}
before_validation :fill_user_locale
if Vish::Application.config.cas
validates :password, :presence =>true, :confirmation =>true, length: { minimum: Devise.password_length.min, maximum: Devise.password_length.max }, :on=>:create
end
devise :omniauthable, omniauth_providers: %i[idm]
validate :user_locale
def user_locale
if !self.language.blank? and I18n.available_locales.include?(self.language.to_sym)
true
else
errors[:base] << "User without language"
end
end
belongs_to :private_student_group
has_one :private_teacher, class_name: "Actor", through: :private_student_group
has_and_belongs_to_many :courses
before_destroy :destroy_user_resources
def occupation_sym
if occupation
Occupation[occupation]
else
:select
end
end
def occupation_t
I18n.t "profile.occupation.options.#{occupation_sym}"
end
def description
profile.description
end
def has_permission(perm_key)
ServicePermission.where(:owner_id => actor_id, :key => perm_key).count > 0
end
def self.from_omniauth(auth)
#get user email
if auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"] && auth["extra"]["raw_info"]["eidas_profile"]["Email"]
email = auth["extra"]["raw_info"]["eidas_profile"]["Email"].downcase
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["email"]
email = auth["extra"]["raw_info"]["email"].downcase
else
email = auth["info"]["email"].downcase
end
user = find_by_email(email)
if user
return user
else
#EIDAS Case
#does not exist in BBDD, create it
u = User.new(provider: auth.provider, uid: auth.uid)
u.email = email
u.password = Devise.friendly_token[0,20]
u.provider = "idm"
if auth["info"] && auth["info"]["name"]
u.name = auth["info"]["name"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"] && auth["extra"]["raw_info"]["eidas_profile"]["FirstName"]
u.name = auth["extra"]["raw_info"]["eidas_profile"]["FirstName"] + " " + auth["extra"]["raw_info"]["eidas_profile"]["FamilyName"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["username"]
u.name = auth["extra"]["raw_info"]["username"]
else
u.name = auth["info"]["name"]
end
u.save!
if auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["eidas_profile"]
#birthday
if auth["extra"]["raw_info"]["eidas_profile"]["DateOfBirth"]
u.birthday = Date.parse(auth["extra"]["raw_info"]["eidas_profile"]["DateOfBirth"])
end
#city
if auth["extra"]["raw_info"]["eidas_profile"]["PlaceOfBirth"]
u.city = auth["extra"]["raw_info"]["eidas_profile"]["PlaceOfBirth"]
end
#country
if auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"]
u.country = Eid4u.getCountry(auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"])
end
#language
if auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"] && auth["extra"]["raw_info"]["eidas_profile"]["CountryOfBirth"] == "ES"
u.language = "es"
else
u.language = "en"
end
#organization
if auth["extra"]["raw_info"]["eidas_profile"]["HomeInstitutionName"]
u.organization = auth["extra"]["raw_info"]["eidas_profile"]["HomeInstitutionName"]
elsif auth["extra"] && auth["extra"]["raw_info"] && auth["extra"]["raw_info"]["organizations"]
u.organization = auth["extra"]["raw_info"]["organizations"].join(" ")
end
#tags
if auth["extra"]["raw_info"]["eidas_profile"]["FieldOfStudy"]
u.tag_list = Eid4u.getTagsFromIscedCode(auth["extra"]["raw_info"]["eidas_profile"]["FieldOfStudy"])
if u.tag_list.length > 0
u.tag_list = u.tag_list.split(" ")
else
u.tag_list = [ "Erasmus" ]
end
else
u.tag_list = [ "Erasmus" ]
end
end
u.save!
return u
end
end
private
def fill_user_locale
self.language = I18n.default_locale.to_s unless (!self.language.blank? and I18n.available_locales.include?(self.language.to_sym))
end
def destroy_user_resources
ActivityObject.authored_by(self).each do |ao|
object = ao.object
object.destroy unless object.nil?
end
ActivityObject.owned_by(self).each do |ao|
object = ao.object
object.destroy unless object.nil?
end
end
end
|
# A staged payload Metasploit Module that combines a stager payload Metasploit Module that downloads a staged payload
# Metasploit Module.
#
# The stager and stage payload must be compatible. A stager and stage are compatible if they share some subset of
# architectures and platforms.
class Metasploit::Cache::Payload::Staged::Class < ActiveRecord::Base
#
# Associations
#
# Stage payload Metasploit Module downloaded by {#payload_stager_instance}.
belongs_to :payload_stage_instance,
class_name: 'Metasploit::Cache::Payload::Stage::Instance',
inverse_of: :payload_staged_classes
# Stager payload Metasploit Module that exploit Metasploit Module runs on target system and which then downloads
# {#payload_stage_instance stage payload Metasploit Module} to complete this staged payload Metasploit Module on the
# target system.
belongs_to :payload_stager_instance,
class_name: 'Metasploit::Cache::Payload::Stager::Instance',
inverse_of: :payload_staged_classes
#
# Attributes
#
# @!attribute payload_stage_instance_id
# Foreign key for {#payload_stage_instance}.
#
# @return [Integer]
# @!attribute payload_stager_instance_id
# Foreign key for {#payload_stager_instance}.
#
# @return [Integer]
#
#
# Validations
#
#
#
# Method Validations
#
validate :compatible_architectures
validate :compatible_platforms
#
# Attribute Validations
#
validates :payload_stage_instance,
presence: true
validates :payload_stage_instance_id,
uniqueness: {
scope: :payload_stager_instance_id
}
validates :payload_stager_instance,
presence: true
#
# Class Methods
#
# Binds combined bind values from subqueries onto the combined query's relation.
#
# @param relation [ActiveRecord::Relation] Relation that does not have bind_values for `Arel::Nodes::BindParam`s.
# @param bind_values [Array<Array(ActiveRecord::ConnectionAdapters::Column, Object)>] Array of bind values
# (pairs of columns and values) for the combined query. Must be in order of final query.
# @return [ActiveRecord::Relation] a new relation with values bound.
def self.bind_renumbered_bind_params(relation, bind_values)
bind_values.reduce(relation) { |bound_relation, bind_value|
bound_relation.bind(bind_value)
}
end
# Renumbers the `Arel::Nodes::BindParam`s when combining subquery
#
# @param node [Arel::Nodes::Node, #grep] a Arel node that has `Arel::Nodes::BindParam` findable with `#grep`.
# @param bind_values [Array<Array(ActiveRecord::ConnectionAdapters::Column, Object)>] Array of bind values
# (pairs of columns and values) for the combined query. Must be in order of final query.
# @param start [Integer] The starting index to look up in `bind_values`.
# @return [Integer] the `start` for the next call to `renumber_bind_params`
def self.renumber_bind_params(node, bind_values, start=0)
index = start
node.grep(Arel::Nodes::BindParam) do |bind_param|
column = bind_values[index].first
bind_param.replace connection.substitute_at(column, index)
index += 1
end
index
end
#
# Instance Methods
#
# @!method payload_stage_instance_id=(payload_stage_instance_id)
# Sets {#payload_stage_instance_id} and invalidates cached {#payload_stage_instance} so it is reloaded on next
# access.
#
# @param payload_stage_instance_id [Integer]
# @return [void]
# @!method payload_stager_instance_id=(payload_stager_instance_id)
# Sets {#payload_stager_instance_id} and invalidates cached {#payload_stager_instance} so it is reloaded on next
# access.
#
# @param payload_stager_instance_id [Integer]
# @return [void]
private
# The intersection of {#payload_stage_instance} {Metasploit::Cache::Payload::Stage::Instance#architectures} and
# {#payload_stager_instance} {Metasploit::Cache::Payload::Stager::Instance#architectures}.
#
# @return [ActiveRecord::Relation<Metasploit::Cache::Architecture>]
# @return [nil] unless {#payload_stage_instance} and {#payload_stager_instance} are present
def architectures
# TODO replace with ActiveRecord::QueryMethods.none
if payload_stage_instance && payload_stager_instance
payload_stage_architectures = payload_stage_instance.architectures
payload_stager_architectures = payload_stager_instance.architectures
# @see https://github.com/rails/rails/commit/2e6625fb775783cdbc721391be18a073a5b9a9c8
bind_values = payload_stage_architectures.bind_values + payload_stager_architectures.bind_values
intersection = payload_stage_instance.architectures.intersect(payload_stager_instance.architectures)
[:left, :right].reduce(0) { |start, side|
operand = intersection.send(side)
self.class.renumber_bind_params(operand, bind_values, start)
}
architecture_table = Metasploit::Cache::Architecture.arel_table
relation = Metasploit::Cache::Architecture.from(
architecture_table.create_table_alias(intersection, architecture_table.name)
)
self.class.bind_renumbered_bind_params(relation, bind_values)
end
end
# Validates that {#payload_stage_instance} and {#payload_stager_instance} have at least one
# {Metasploit::Cache::Architecture} in common.
#
# @return [void]
def compatible_architectures
scope = architectures
unless scope.nil?
unless scope.exists?
errors.add(:base, :incompatible_architectures)
end
end
end
# Validates taht {#payload_stage_instance} and {#payload_stager_instance} have at least one
# {Metasploit::Cache::Platform} in common.
#
# @return [void]
def compatible_platforms
arel = platforms_arel
unless arel.nil?
if Metasploit::Cache::Platform.find_by_sql(arel.take(1)).empty?
errors.add(:base, :incompatible_platforms)
end
end
end
# @note Cannot return an `ActiveRecord::Relation<Metasploit::Cache::Platform>` because
# `Metasploit::Cache::Platform.from` can't take an AREL query containing a Common Table Expression (CTE) `WITH`
# clause.
#
# The nested set intersection of {#payload_stage_instance} {Metasploit::Cache::Payload::Stage::Instance#platforms} and
# {#payload_stager_instance} {Metasploit::Cache::Payload::Stager::Instance#platforms}.
#
# @return [Arel::SelectManager] An AREL select that will return the platforms supported by this staged payload
# Metasploit Module.
# @return [nil] unless {#payload_stage_instance} and {#payload_stager_instance} are present
def platforms_arel
# TODO replace with ActiveRecord::QueryMethods.none
if payload_stage_instance && payload_stager_instance
payload_stage_platforms_table = Arel::Table.new(:payload_stage_platforms)
payload_stager_platforms_table = Arel::Table.new(:payload_stager_platforms)
payload_stage_platforms_cte = Arel::Nodes::As.new(
payload_stage_platforms_table,
# @see https://github.com/rails/arel/issues/309
Arel.sql("(#{payload_stage_instance.platforms.to_sql})")
)
payload_stager_platforms_cte = Arel::Nodes::As.new(
payload_stager_platforms_table,
# @see https://github.com/rails/arel/issues/309
Arel.sql("(#{payload_stager_instance.platforms.to_sql})")
)
union = subset_query(payload_stage_platforms_table, payload_stager_platforms_table).union(
subset_query(payload_stager_platforms_table, payload_stage_platforms_table)
)
# union isn't a Arel::SelectManager, so it doesn't respond to `with` so can't use CTE.
platforms_table = Metasploit::Cache::Platform.arel_table
union_alias = platforms_table.create_table_alias(
union,
platforms_table.name
)
platforms_table.from(union_alias).project(
platforms_table[Arel.star]
).with(
payload_stage_platforms_cte,
payload_stager_platforms_cte
)
end
end
# Returns AREL query for the element of the subset table that are (improper) subset of superset table when
# `superset_table` and `subset_table` are aliases of the same nested set table.
#
# @param superset_table [Arel::Table] table that is the superset in the nested set
# @param subset_table [Arel::Table] table that is the subset in the nested set
# @return [Arel::SelectManager]
def subset_query(superset_table, subset_table)
subset_table.join(
superset_table
).on(
superset_table[:left].lteq(subset_table[:left]).and(
superset_table[:right].gteq(subset_table[:right])
)
).project(subset_table[Arel.star])
end
Metasploit::Concern.run(self)
end
Fix bind params and values for Payload::Staged::Class#compatible_platforms
MSP-12855
# A staged payload Metasploit Module that combines a stager payload Metasploit Module that downloads a staged payload
# Metasploit Module.
#
# The stager and stage payload must be compatible. A stager and stage are compatible if they share some subset of
# architectures and platforms.
class Metasploit::Cache::Payload::Staged::Class < ActiveRecord::Base
#
# Associations
#
# Stage payload Metasploit Module downloaded by {#payload_stager_instance}.
belongs_to :payload_stage_instance,
class_name: 'Metasploit::Cache::Payload::Stage::Instance',
inverse_of: :payload_staged_classes
# Stager payload Metasploit Module that exploit Metasploit Module runs on target system and which then downloads
# {#payload_stage_instance stage payload Metasploit Module} to complete this staged payload Metasploit Module on the
# target system.
belongs_to :payload_stager_instance,
class_name: 'Metasploit::Cache::Payload::Stager::Instance',
inverse_of: :payload_staged_classes
#
# Attributes
#
# @!attribute payload_stage_instance_id
# Foreign key for {#payload_stage_instance}.
#
# @return [Integer]
# @!attribute payload_stager_instance_id
# Foreign key for {#payload_stager_instance}.
#
# @return [Integer]
#
#
# Validations
#
#
#
# Method Validations
#
validate :compatible_architectures
validate :compatible_platforms
#
# Attribute Validations
#
validates :payload_stage_instance,
presence: true
validates :payload_stage_instance_id,
uniqueness: {
scope: :payload_stager_instance_id
}
validates :payload_stager_instance,
presence: true
#
# Class Methods
#
# Binds combined bind values from subqueries onto the combined query's relation.
#
# @param relation [ActiveRecord::Relation] Relation that does not have bind_values for `Arel::Nodes::BindParam`s.
# @param bind_values [Array<Array(ActiveRecord::ConnectionAdapters::Column, Object)>] Array of bind values
# (pairs of columns and values) for the combined query. Must be in order of final query.
# @return [ActiveRecord::Relation] a new relation with values bound.
def self.bind_renumbered_bind_params(relation, bind_values)
bind_values.reduce(relation) { |bound_relation, bind_value|
bound_relation.bind(bind_value)
}
end
# Renumbers the `Arel::Nodes::BindParam`s when combining subquery
#
# @param node [Arel::Nodes::Node, #grep] a Arel node that has `Arel::Nodes::BindParam` findable with `#grep`.
# @param bind_values [Array<Array(ActiveRecord::ConnectionAdapters::Column, Object)>] Array of bind values
# (pairs of columns and values) for the combined query. Must be in order of final query.
# @param start [Integer] The starting index to look up in `bind_values`.
# @return [Integer] the `start` for the next call to `renumber_bind_params`
def self.renumber_bind_params(node, bind_values, start=0)
index = start
node.grep(Arel::Nodes::BindParam) do |bind_param|
column = bind_values[index].first
bind_param.replace connection.substitute_at(column, index)
index += 1
end
index
end
#
# Instance Methods
#
# @!method payload_stage_instance_id=(payload_stage_instance_id)
# Sets {#payload_stage_instance_id} and invalidates cached {#payload_stage_instance} so it is reloaded on next
# access.
#
# @param payload_stage_instance_id [Integer]
# @return [void]
# @!method payload_stager_instance_id=(payload_stager_instance_id)
# Sets {#payload_stager_instance_id} and invalidates cached {#payload_stager_instance} so it is reloaded on next
# access.
#
# @param payload_stager_instance_id [Integer]
# @return [void]
private
# The intersection of {#payload_stage_instance} {Metasploit::Cache::Payload::Stage::Instance#architectures} and
# {#payload_stager_instance} {Metasploit::Cache::Payload::Stager::Instance#architectures}.
#
# @return [ActiveRecord::Relation<Metasploit::Cache::Architecture>]
# @return [nil] unless {#payload_stage_instance} and {#payload_stager_instance} are present
def architectures
# TODO replace with ActiveRecord::QueryMethods.none
if payload_stage_instance && payload_stager_instance
payload_stage_architectures = payload_stage_instance.architectures
payload_stager_architectures = payload_stager_instance.architectures
# @see https://github.com/rails/rails/commit/2e6625fb775783cdbc721391be18a073a5b9a9c8
bind_values = payload_stage_architectures.bind_values + payload_stager_architectures.bind_values
intersection = payload_stage_instance.architectures.intersect(payload_stager_instance.architectures)
[:left, :right].reduce(0) { |start, side|
operand = intersection.send(side)
self.class.renumber_bind_params(operand, bind_values, start)
}
architecture_table = Metasploit::Cache::Architecture.arel_table
relation = Metasploit::Cache::Architecture.from(
architecture_table.create_table_alias(intersection, architecture_table.name)
)
self.class.bind_renumbered_bind_params(relation, bind_values)
end
end
# Validates that {#payload_stage_instance} and {#payload_stager_instance} have at least one
# {Metasploit::Cache::Architecture} in common.
#
# @return [void]
def compatible_architectures
scope = architectures
unless scope.nil?
unless scope.exists?
errors.add(:base, :incompatible_architectures)
end
end
end
# Validates taht {#payload_stage_instance} and {#payload_stager_instance} have at least one
# {Metasploit::Cache::Platform} in common.
#
# @return [void]
def compatible_platforms
arel_and_bind_values = platforms_arel_and_bind_values
unless arel_and_bind_values.nil?
arel, bind_values = arel_and_bind_values
if Metasploit::Cache::Platform.find_by_sql(arel.take(1), bind_values).empty?
errors.add(:base, :incompatible_platforms)
end
end
end
# @note Cannot return an `ActiveRecord::Relation<Metasploit::Cache::Platform>` because
# `Metasploit::Cache::Platform.from` can't take an AREL query containing a Common Table Expression (CTE) `WITH`
# clause.
#
# The nested set intersection of {#payload_stage_instance} {Metasploit::Cache::Payload::Stage::Instance#platforms} and
# {#payload_stager_instance} {Metasploit::Cache::Payload::Stager::Instance#platforms}.
#
# @return [Array(Arel::SelectManager, Array<Array(ActiveRecord::ConnectionAdapters::Column, Object)>)] An AREL select
# that will return the platforms supported by this staged payload Metasploit Module along with the bind values for
# any `Arel::Nodes::BindParam`s in the `Arel::SelectManager`.
# @return [nil] unless {#payload_stage_instance} and {#payload_stager_instance} are present
def platforms_arel_and_bind_values
# TODO replace with ActiveRecord::QueryMethods.none
if payload_stage_instance && payload_stager_instance
payload_stage_platforms_table = Arel::Table.new(:payload_stage_platforms)
payload_stager_platforms_table = Arel::Table.new(:payload_stager_platforms)
payload_stage_platforms_relation = payload_stage_instance.platforms
payload_stager_platforms_relation = payload_stager_instance.platforms
bind_values = payload_stage_platforms_relation.bind_values + payload_stager_platforms_relation.bind_values
payload_stage_platforms_cte = Arel::Nodes::As.new(
payload_stage_platforms_table,
payload_stage_platforms_relation.arel
)
start = self.class.renumber_bind_params(payload_stage_platforms_cte.right.ast, bind_values)
payload_stager_platforms_cte = Arel::Nodes::As.new(
payload_stager_platforms_table,
payload_stager_platforms_relation.arel
)
self.class.renumber_bind_params(payload_stager_platforms_cte.right.ast, bind_values, start)
union = subset_query(payload_stage_platforms_table, payload_stager_platforms_table).union(
subset_query(payload_stager_platforms_table, payload_stage_platforms_table)
)
# union isn't a Arel::SelectManager, so it doesn't respond to `with` so can't use CTE.
platforms_table = Metasploit::Cache::Platform.arel_table
union_alias = platforms_table.create_table_alias(
union,
platforms_table.name
)
arel = platforms_table.from(union_alias).project(
platforms_table[Arel.star]
).with(
payload_stage_platforms_cte,
payload_stager_platforms_cte
)
[arel, bind_values]
end
end
# Returns AREL query for the element of the subset table that are (improper) subset of superset table when
# `superset_table` and `subset_table` are aliases of the same nested set table.
#
# @param superset_table [Arel::Table] table that is the superset in the nested set
# @param subset_table [Arel::Table] table that is the subset in the nested set
# @return [Arel::SelectManager]
def subset_query(superset_table, subset_table)
subset_table.join(
superset_table
).on(
superset_table[:left].lteq(subset_table[:left]).and(
superset_table[:right].gteq(subset_table[:right])
)
).project(subset_table[Arel.star])
end
Metasploit::Concern.run(self)
end
|
##
# Cookbook Name:: my_cookbook
# Recipe:: default
#
# Copyright 2014, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
node[:deploy].each do |application, deploy|
app_root = "#{deploy[:deploy_to]}/current/storage"
directory app_root do
owner 'deploy'
group 'www-data'
mode '0775'
recursive true
end
execute "chmo-775" do
command "chmod -R 775 #{deploy[:deploy_to]}/current/storage/framework; chmod 775 #{deploy[:deploy_to]}/current/storage/logs"
action :run
end
#Add php5-mcrypt to cli/conf.d
execute "add mcrypt symlink" do
sudo ln -s /etc/php5/mods-available/mcrypt.ini /etc/php5/cli/conf.d/20-mcrypt.ini
action :run
end
# install composer.
include_recipe "composer::default"
directory "/root/.composer" do
mode '775'
action :create
end
template "/root/.composer/auth.json" do
source "composer_auth_json.erb"
end
#install project vendors
composer_project "#{deploy[:deploy_to]}/current" do
# dev "node[:composer][:dev]"
dev true
quiet true
prefer_dist false
action :update
end
end
add mcrypt symlink
##
# Cookbook Name:: my_cookbook
# Recipe:: default
#
# Copyright 2014, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
#
node[:deploy].each do |application, deploy|
app_root = "#{deploy[:deploy_to]}/current/storage"
directory app_root do
owner 'deploy'
group 'www-data'
mode '0775'
recursive true
end
execute "chmo-775" do
command "chmod -R 775 #{deploy[:deploy_to]}/current/storage/framework; chmod 775 #{deploy[:deploy_to]}/current/storage/logs"
action :run
end
#Add php5-mcrypt to cli/conf.d
execute "add mcrypt symlink" do
command "ln -s /etc/php5/mods-available/mcrypt.ini /etc/php5/cli/conf.d/20-mcrypt.ini"
action :run
not_if { ::File.exists?("/etc/php5/cli/conf.d/20-mcrypt.ini")}
end
# install composer.
include_recipe "composer::default"
directory "/root/.composer" do
mode '775'
action :create
end
template "/root/.composer/auth.json" do
source "composer_auth_json.erb"
end
#install project vendors
composer_project "#{deploy[:deploy_to]}/current" do
# dev "node[:composer][:dev]"
dev true
quiet true
prefer_dist false
action :update
end
end
|
set :application, project
# Cleanup
set :keep_releases, 3
# Composer configuration
set :use_composer, true
set :copy_vendors, true
# Git configuration
set :scm, :git
# Maintenance page
set :maintenance_template_path, "deployment/maintenance.erb"
# Logging
#logger.level = Logger::DEBUG
# Shared files
set :shared_files, ["app/config/parameters.yml"]
set :shared_children, [app_path + "/logs", web_path + "/uploads", "vendor", app_path + "/sessions"]
set :writable_dirs, ["app/cache", "app/logs", "app/sessions"]
# Other configurations
set :use_sudo, false
default_run_options[:pty] = true
set :model_manager, "doctrine"
# SSH options
ssh_options[:forward_agent] = true
Don't share vendors
set :application, project
# Cleanup
set :keep_releases, 3
# Composer configuration
set :use_composer, true
set :copy_vendors, true
# Git configuration
set :scm, :git
# Maintenance page
set :maintenance_template_path, "deployment/maintenance.erb"
# Logging
#logger.level = Logger::DEBUG
# Shared files
set :shared_files, ["app/config/parameters.yml"]
set :shared_children, [app_path + "/logs", web_path + "/uploads", app_path + "/sessions"]
set :writable_dirs, ["app/cache", "app/logs", "app/sessions"]
# Other configurations
set :use_sudo, false
default_run_options[:pty] = true
set :model_manager, "doctrine"
# SSH options
ssh_options[:forward_agent] = true
|
#this code boughts pieces from activeldap and net-ldap
require 'rack'
require 'net/ldap'
require 'net/ntlm'
require 'uri'
module OmniAuth
module Strategies
class LDAP
class Adaptor
class LdapError < StandardError; end
class ConfigurationError < StandardError; end
class AuthenticationError < StandardError; end
class ConnectionError < StandardError; end
VALID_ADAPTER_CONFIGURATION_KEYS = [:host, :port, :method, :bind_dn, :password,
:try_sasl, :sasl_mechanisms, :uid, :base, :allow_anonymous]
MUST_HAVE_KEYS = [:host, :port, :method, :uid, :base]
METHOD = {
:ssl => :simple_tls,
:tls => :start_tls,
:plain => nil,
}
attr_accessor :bind_dn, :password
attr_reader :connection, :uid, :base
def initialize(configuration={})
@connection = nil
@disconnected = false
@bound = false
@configuration = configuration.dup
@configuration[:allow_anonymous] ||= false
@logger = @configuration.delete(:logger)
message = []
MUST_HAVE_KEYS.each do |name|
message << name if configuration[name].nil?
end
raise ArgumentError.new(message.join(",") +" MUST be provided") unless message.empty?
VALID_ADAPTER_CONFIGURATION_KEYS.each do |name|
instance_variable_set("@#{name}", configuration[name])
end
end
def connect(options={})
host = options[:host] || @host
method = ensure_method(options[:method] || @method || :plain)
port = options[:port] || @port || ensure_port(method)
@disconnected = false
@bound = false
@bind_tried = false
config = {
:host => host,
:eport => port,
}
config[:encryption] = {:method => method} if method
@connection, @uri, @with_start_tls = begin
uri = construct_uri(host, port, method == :simple_tls)
with_start_tls = method == :start_tls
puts ({:uri => uri, :with_start_tls => with_start_tls}).inspect
[Net::LDAP::Connection.new(config), uri, with_start_tls]
rescue Net::LDAP::LdapError
raise ConnectionError, $!.message
end
end
def unbind(options={})
@connection.close # Net::LDAP doesn't implement unbind.
end
def bind(options={})
connect(options) unless connecting?
begin
@bind_tried = true
bind_dn = (options[:bind_dn] || @bind_dn).to_s
try_sasl = options.has_key?(:try_sasl) ? options[:try_sasl] : @try_sasl
if options.has_key?(:allow_anonymous)
allow_anonymous = options[:allow_anonymous]
else
allow_anonymous = @allow_anonymous
end
# Rough bind loop:
# Attempt 1: SASL if available
# Attempt 2: SIMPLE with credentials if password block
# Attempt 3: SIMPLE ANONYMOUS if 1 and 2 fail and allow anonymous is set to true
if try_sasl and sasl_bind(bind_dn, options)
puts "bound with sasl"
elsif simple_bind(bind_dn, options)
puts "bound with simple"
elsif allow_anonymous and bind_as_anonymous(options)
puts "bound as anonymous"
else
message = yield if block_given?
message ||= ('All authentication methods for %s exhausted.') % target
raise AuthenticationError, message
end
@bound = true
rescue Net::LDAP::LdapError
raise AuthenticationError, $!.message
end
end
def disconnect!(options={})
unbind(options)
@connection = @uri = @with_start_tls = nil
@disconnected = true
end
def rebind(options={})
unbind(options) if bound?
connect(options)
end
def connecting?
!@connection.nil? and !@disconnected
end
def bound?
connecting? and @bound
end
def search(options={}, &block)
base = options[:base]
filter = options[:filter]
limit = options[:limit]
args = {
:base => @base,
:filter => filter,
:size => limit
}
attributes = {}
execute(:search, args) do |entry|
entry.attribute_names.each do |name|
attributes[name] = entry[name]
end
end
attributes
end
private
def execute(method, *args, &block)
result = @connection.send(method, *args, &block)
message = nil
if result.is_a?(Hash)
message = result[:errorMessage]
result = result[:resultCode]
end
unless result.zero?
message = [Net::LDAP.result2string(result), message].compact.join(": ")
raise LdapError, message
end
end
def ensure_port(method)
if method == :ssl
URI::LDAPS::DEFAULT_PORT
else
URI::LDAP::DEFAULT_PORT
end
end
def prepare_connection(options)
end
def ensure_method(method)
method ||= "plain"
normalized_method = method.to_s.downcase.to_sym
return METHOD[normalized_method] if METHOD.has_key?(normalized_method)
available_methods = METHOD.keys.collect {|m| m.inspect}.join(", ")
format = "%s is not one of the available connect methods: %s"
raise ConfigurationError, format % [method.inspect, available_methods]
end
def sasl_bind(bind_dn, options={})
sasl_mechanisms = options[:sasl_mechanisms] || @sasl_mechanisms
sasl_mechanisms.each do |mechanism|
begin
normalized_mechanism = mechanism.downcase.gsub(/-/, '_')
sasl_bind_setup = "sasl_bind_setup_#{normalized_mechanism}"
next unless respond_to?(sasl_bind_setup, true)
initial_credential, challenge_response = send(sasl_bind_setup, bind_dn, options)
args = {
:method => :sasl,
:initial_credential => initial_credential,
:mechanism => mechanism,
:challenge_response => challenge_response,
}
info = {
:name => "bind: SASL", :dn => bind_dn, :mechanism => mechanism,
}
execute(:bind, args)
return true
rescue Exception => e
puts e.message
end
end
false
end
def sasl_bind_setup_digest_md5(bind_dn, options)
initial_credential = ""
challenge_response = Proc.new do |cred|
pref = SASL::Preferences.new :digest_uri => "ldap/#{@host}", :username => bind_dn, :has_password? => true, :password => options[:password]||@password
sasl = SASL.new("DIGEST-MD5", pref)
response = sasl.receive("challenge", cred)
response[1]
end
[initial_credential, challenge_response]
end
def sasl_bind_setup_gss_spnego(bind_dn, options)
puts options.inspect
user,psw = [bind_dn, options[:password]||@password]
raise LdapError.new( "invalid binding information" ) unless (user && psw)
nego = proc {|challenge|
t2_msg = Net::NTLM::Message.parse( challenge )
user, domain = user.split('\\').reverse
t2_msg.target_name = Net::NTLM::encode_utf16le(domain) if domain
t3_msg = t2_msg.response( {:user => user, :password => psw}, {:ntlmv2 => true} )
t3_msg.serialize
}
[Net::NTLM::Message::Type1.new.serialize, nego]
end
def simple_bind(bind_dn, options={})
args = {
:method => :simple,
:username => bind_dn,
:password => (options[:password]||@password).to_s,
}
begin
execute(:bind, args)
true
rescue Exception
false
end
end
def bind_as_anonymous(options={})
execute(:bind, {:method => :anonymous})
true
end
def construct_uri(host, port, ssl)
protocol = ssl ? "ldaps" : "ldap"
URI.parse("#{protocol}://#{host}:#{port}").to_s
end
def target
return nil if @uri.nil?
if @with_start_tls
"#{@uri}(StartTLS)"
else
@uri
end
end
end
end
end
end
fix typo which broke LDAP authentication
#this code boughts pieces from activeldap and net-ldap
require 'rack'
require 'net/ldap'
require 'net/ntlm'
require 'uri'
module OmniAuth
module Strategies
class LDAP
class Adaptor
class LdapError < StandardError; end
class ConfigurationError < StandardError; end
class AuthenticationError < StandardError; end
class ConnectionError < StandardError; end
VALID_ADAPTER_CONFIGURATION_KEYS = [:host, :port, :method, :bind_dn, :password,
:try_sasl, :sasl_mechanisms, :uid, :base, :allow_anonymous]
MUST_HAVE_KEYS = [:host, :port, :method, :uid, :base]
METHOD = {
:ssl => :simple_tls,
:tls => :start_tls,
:plain => nil,
}
attr_accessor :bind_dn, :password
attr_reader :connection, :uid, :base
def initialize(configuration={})
@connection = nil
@disconnected = false
@bound = false
@configuration = configuration.dup
@configuration[:allow_anonymous] ||= false
@logger = @configuration.delete(:logger)
message = []
MUST_HAVE_KEYS.each do |name|
message << name if configuration[name].nil?
end
raise ArgumentError.new(message.join(",") +" MUST be provided") unless message.empty?
VALID_ADAPTER_CONFIGURATION_KEYS.each do |name|
instance_variable_set("@#{name}", configuration[name])
end
end
def connect(options={})
host = options[:host] || @host
method = ensure_method(options[:method] || @method || :plain)
port = options[:port] || @port || ensure_port(method)
@disconnected = false
@bound = false
@bind_tried = false
config = {
:host => host,
:port => port,
}
config[:encryption] = {:method => method} if method
@connection, @uri, @with_start_tls = begin
uri = construct_uri(host, port, method == :simple_tls)
with_start_tls = method == :start_tls
puts ({:uri => uri, :with_start_tls => with_start_tls}).inspect
[Net::LDAP::Connection.new(config), uri, with_start_tls]
rescue Net::LDAP::LdapError
raise ConnectionError, $!.message
end
end
def unbind(options={})
@connection.close # Net::LDAP doesn't implement unbind.
end
def bind(options={})
connect(options) unless connecting?
begin
@bind_tried = true
bind_dn = (options[:bind_dn] || @bind_dn).to_s
try_sasl = options.has_key?(:try_sasl) ? options[:try_sasl] : @try_sasl
if options.has_key?(:allow_anonymous)
allow_anonymous = options[:allow_anonymous]
else
allow_anonymous = @allow_anonymous
end
# Rough bind loop:
# Attempt 1: SASL if available
# Attempt 2: SIMPLE with credentials if password block
# Attempt 3: SIMPLE ANONYMOUS if 1 and 2 fail and allow anonymous is set to true
if try_sasl and sasl_bind(bind_dn, options)
puts "bound with sasl"
elsif simple_bind(bind_dn, options)
puts "bound with simple"
elsif allow_anonymous and bind_as_anonymous(options)
puts "bound as anonymous"
else
message = yield if block_given?
message ||= ('All authentication methods for %s exhausted.') % target
raise AuthenticationError, message
end
@bound = true
rescue Net::LDAP::LdapError
raise AuthenticationError, $!.message
end
end
def disconnect!(options={})
unbind(options)
@connection = @uri = @with_start_tls = nil
@disconnected = true
end
def rebind(options={})
unbind(options) if bound?
connect(options)
end
def connecting?
!@connection.nil? and !@disconnected
end
def bound?
connecting? and @bound
end
def search(options={}, &block)
base = options[:base]
filter = options[:filter]
limit = options[:limit]
args = {
:base => @base,
:filter => filter,
:size => limit
}
attributes = {}
execute(:search, args) do |entry|
entry.attribute_names.each do |name|
attributes[name] = entry[name]
end
end
attributes
end
private
def execute(method, *args, &block)
result = @connection.send(method, *args, &block)
message = nil
if result.is_a?(Hash)
message = result[:errorMessage]
result = result[:resultCode]
end
unless result.zero?
message = [Net::LDAP.result2string(result), message].compact.join(": ")
raise LdapError, message
end
end
def ensure_port(method)
if method == :ssl
URI::LDAPS::DEFAULT_PORT
else
URI::LDAP::DEFAULT_PORT
end
end
def prepare_connection(options)
end
def ensure_method(method)
method ||= "plain"
normalized_method = method.to_s.downcase.to_sym
return METHOD[normalized_method] if METHOD.has_key?(normalized_method)
available_methods = METHOD.keys.collect {|m| m.inspect}.join(", ")
format = "%s is not one of the available connect methods: %s"
raise ConfigurationError, format % [method.inspect, available_methods]
end
def sasl_bind(bind_dn, options={})
sasl_mechanisms = options[:sasl_mechanisms] || @sasl_mechanisms
sasl_mechanisms.each do |mechanism|
begin
normalized_mechanism = mechanism.downcase.gsub(/-/, '_')
sasl_bind_setup = "sasl_bind_setup_#{normalized_mechanism}"
next unless respond_to?(sasl_bind_setup, true)
initial_credential, challenge_response = send(sasl_bind_setup, bind_dn, options)
args = {
:method => :sasl,
:initial_credential => initial_credential,
:mechanism => mechanism,
:challenge_response => challenge_response,
}
info = {
:name => "bind: SASL", :dn => bind_dn, :mechanism => mechanism,
}
execute(:bind, args)
return true
rescue Exception => e
puts e.message
end
end
false
end
def sasl_bind_setup_digest_md5(bind_dn, options)
initial_credential = ""
challenge_response = Proc.new do |cred|
pref = SASL::Preferences.new :digest_uri => "ldap/#{@host}", :username => bind_dn, :has_password? => true, :password => options[:password]||@password
sasl = SASL.new("DIGEST-MD5", pref)
response = sasl.receive("challenge", cred)
response[1]
end
[initial_credential, challenge_response]
end
def sasl_bind_setup_gss_spnego(bind_dn, options)
puts options.inspect
user,psw = [bind_dn, options[:password]||@password]
raise LdapError.new( "invalid binding information" ) unless (user && psw)
nego = proc {|challenge|
t2_msg = Net::NTLM::Message.parse( challenge )
user, domain = user.split('\\').reverse
t2_msg.target_name = Net::NTLM::encode_utf16le(domain) if domain
t3_msg = t2_msg.response( {:user => user, :password => psw}, {:ntlmv2 => true} )
t3_msg.serialize
}
[Net::NTLM::Message::Type1.new.serialize, nego]
end
def simple_bind(bind_dn, options={})
args = {
:method => :simple,
:username => bind_dn,
:password => (options[:password]||@password).to_s,
}
begin
execute(:bind, args)
true
rescue Exception
false
end
end
def bind_as_anonymous(options={})
execute(:bind, {:method => :anonymous})
true
end
def construct_uri(host, port, ssl)
protocol = ssl ? "ldaps" : "ldap"
URI.parse("#{protocol}://#{host}:#{port}").to_s
end
def target
return nil if @uri.nil?
if @with_start_tls
"#{@uri}(StartTLS)"
else
@uri
end
end
end
end
end
end
|
require 'json'
require 'fileutils'
require 'pathname'
require 'uri'
require 'nokogiri'
class JqueryDocPopulator
def initialize(input_path, output_filename)
@input_path = input_path
@output_filename = output_filename
@output_path = "out"
@full_output_filename = File.join(@output_path, @output_filename)
end
def populate
@all_kinds = {}
File.open(@full_output_filename, 'w:UTF-8') do |out|
out.write <<-eos
{
"metadata" : {
"mapping" : {
"_all" : {
"enabled" : false
},
"properties" : {
"name" : {
"type" : "string",
"index" : "analyzed"
},
"title" : {
"type" : "string",
"index" : "analyzed"
},
"kind" : {
"type" : "string",
"index" : "no"
},
"url" : {
"type" : "string",
"index" : "no"
},
"summaryHtml" : {
"type" : "string",
"index" : "no"
},
"descriptionHtml" : {
"type" : "string",
"index" : "no"
},
"sampleHtml" : {
"type" : "string",
"index" : "no"
},
"returnType" : {
"type" : "string",
"index" : "no"
},
"deprecated" : {
"type" : "string",
"index" : "no"
},
"removed" : {
"type" : "string",
"index" : "no"
},
"signatures" : {
"type" : "object",
"enabled" : false
},
"examples" : {
"type" : "object",
"enabled" : false
}
}
}
},
"updates" : [
eos
write_doc_index(out)
out.write("]\n}")
end
puts "All kinds = " + @all_kinds.keys.join(',')
end
private
def write_doc_index(out)
@first_doc = true
Dir.entries(File.join(@input_path, 'entries')).each do |entry|
next unless entry.end_with?('.xml')
#next unless entry == 'jQuery.browser.xml'
puts "Parsing '#{entry}' ..."
parse_entry_file(out, File.join(@input_path, 'entries', entry))
puts "Done parsing '#{entry}'."
end
end
def extract_arg_types(arg)
type_attr = (arg.attr('type').strip rescue nil)
if type_attr
[type_attr]
else
(arg > 'type').map do |type_node|
type_node.attr('name').strip
end
end
end
def extract_arg_or_property(arg)
doc = {
name: arg.attr('name').strip,
possibleTypes: extract_arg_types(arg),
optional: ((arg.attr('optional').strip == 'true') rescue false),
descriptionHtml: ((arg > 'desc').inner_html.strip rescue nil)
}
added = arg.attr('added')
if added && !added.empty?
doc[:added] = added
end
property_nodes = (arg > 'property')
if property_nodes.length > 0
doc[:properties] = property_nodes.map do |prop|
extract_arg_or_property(prop)
end
end
arg_nodes = (arg > 'argument')
if arg_nodes.length > 0
doc[:arguments] = arg_nodes.map do |argument|
extract_arg_or_property(argument)
end
end
doc
end
def compute_recognition_key(kind)
'com.solveforall.recognition.programming.web.javascript.jquery.' + kind.capitalize
end
def parse_entry_file(out, filename)
File.open(filename) do |f|
doc = Nokogiri::XML(f)
doc.css('entry').each do |entry|
kind = entry.attr('type').strip
@all_kinds[kind] = true
url = ('https://api.jquery.com/' + File.basename(filename)).gsub(/\.xml$/, '/')
output_doc = {
name: entry.attr('name').strip,
url: url,
title: (entry > 'title').text.strip,
kind: entry.attr('type').strip,
returnType: (entry.attr('return').strip rescue nil),
summaryHtml: (entry > 'desc').inner_html.strip,
descriptionHtml: (entry > 'longdesc').inner_html.strip,
sampleHtml: ((entry > 'sample').inner_html.strip rescue nil),
deprecated: (entry.attr('deprecated').strip rescue nil),
removed: (entry.attr('removed').strip rescue nil),
signatures: (entry > 'signature').map do |sig|
{
added: (sig > 'added').text,
args: (sig > 'argument').map do |arg|
extract_arg_or_property(arg)
end
}
end,
examples: (entry > 'example').map do |example|
{
descriptionHtml: (example > 'desc').inner_html.strip,
codeHtml: (example > 'code').inner_html.strip,
exampleHtml: (example > 'html').text().strip
}
end,
categories: (entry > 'category').map do |category|
category.attr('slug')
end,
recognitionKeys: [compute_recognition_key(kind)]
}
if @first_doc
@first_doc = false
else
out.write(",\n")
end
out.write(output_doc.to_json)
end
end
end
end
input_path = nil
output_filename = 'jquery_doc.json'
ARGV.each do |arg|
if input_path
output_filename = arg
else
input_path = arg
end
end
puts "input_path = #{input_path}"
FileUtils.mkdir_p("out")
populator = JqueryDocPopulator.new(input_path, output_filename)
populator.populate()
system("bzip2 -kf out/#{output_filename}")
Output nicer JSON
require 'json'
require 'fileutils'
require 'pathname'
require 'uri'
require 'nokogiri'
class JqueryDocPopulator
def initialize(input_path, output_filename)
@input_path = input_path
@output_filename = output_filename
@output_path = "out"
@full_output_filename = File.join(@output_path, @output_filename)
end
def populate
@all_kinds = {}
File.open(@full_output_filename, 'w:UTF-8') do |out|
out.write <<-eos
{
"metadata" : {
"mapping" : {
"_all" : {
"enabled" : false
},
"properties" : {
"name" : {
"type" : "string",
"index" : "analyzed"
},
"title" : {
"type" : "string",
"index" : "analyzed"
},
"kind" : {
"type" : "string",
"index" : "no"
},
"url" : {
"type" : "string",
"index" : "no"
},
"summaryHtml" : {
"type" : "string",
"index" : "no"
},
"descriptionHtml" : {
"type" : "string",
"index" : "no"
},
"sampleHtml" : {
"type" : "string",
"index" : "no"
},
"returnType" : {
"type" : "string",
"index" : "no"
},
"deprecated" : {
"type" : "string",
"index" : "no"
},
"removed" : {
"type" : "string",
"index" : "no"
},
"signatures" : {
"type" : "object",
"enabled" : false
},
"examples" : {
"type" : "object",
"enabled" : false
}
}
}
},
"updates" : [
eos
write_doc_index(out)
out.write("]\n}")
end
puts "All kinds = " + @all_kinds.keys.join(',')
end
private
def write_doc_index(out)
@first_doc = true
Dir.entries(File.join(@input_path, 'entries')).each do |entry|
next unless entry.end_with?('.xml')
#next unless entry == 'jQuery.browser.xml'
puts "Parsing '#{entry}' ..."
parse_entry_file(out, File.join(@input_path, 'entries', entry))
puts "Done parsing '#{entry}'."
end
end
def extract_arg_types(arg)
type_attr = (arg.attr('type').strip rescue nil)
if type_attr
[type_attr]
else
(arg > 'type').map do |type_node|
type_node.attr('name').strip
end
end
end
def extract_arg_or_property(arg)
doc = {
name: arg.attr('name').strip,
possibleTypes: extract_arg_types(arg),
optional: ((arg.attr('optional').strip == 'true') rescue false),
descriptionHtml: ((arg > 'desc').inner_html.strip rescue nil)
}
added = arg.attr('added')
if added && !added.empty?
doc[:added] = added
end
property_nodes = (arg > 'property')
if property_nodes.length > 0
doc[:properties] = property_nodes.map do |prop|
extract_arg_or_property(prop)
end
end
arg_nodes = (arg > 'argument')
if arg_nodes.length > 0
doc[:arguments] = arg_nodes.map do |argument|
extract_arg_or_property(argument)
end
end
doc
end
def compute_recognition_key(kind)
'com.solveforall.recognition.programming.web.javascript.jquery.' + kind.capitalize
end
def parse_entry_file(out, filename)
File.open(filename) do |f|
doc = Nokogiri::XML(f)
doc.css('entry').each do |entry|
kind = entry.attr('type').strip
@all_kinds[kind] = true
url = ('https://api.jquery.com/' + File.basename(filename)).gsub(/\.xml$/, '/')
output_doc = {
name: entry.attr('name').strip,
url: url,
title: (entry > 'title').text.strip,
kind: entry.attr('type').strip,
returnType: (entry.attr('return').strip rescue nil),
summaryHtml: (entry > 'desc').inner_html.strip,
descriptionHtml: (entry > 'longdesc').inner_html.strip,
sampleHtml: ((entry > 'sample').inner_html.strip rescue nil),
deprecated: (entry.attr('deprecated').strip rescue nil),
removed: (entry.attr('removed').strip rescue nil),
signatures: (entry > 'signature').map do |sig|
{
added: (sig > 'added').text,
args: (sig > 'argument').map do |arg|
extract_arg_or_property(arg)
end
}
end,
examples: (entry > 'example').map do |example|
{
descriptionHtml: (example > 'desc').inner_html.strip,
codeHtml: (example > 'code').inner_html.strip,
exampleHtml: (example > 'html').text().strip
}
end,
categories: (entry > 'category').map do |category|
category.attr('slug')
end,
recognitionKeys: [compute_recognition_key(kind)]
}
if @first_doc
@first_doc = false
else
out.write(",\n")
end
out.write(JSON.pretty_generate(output_doc))
end
end
end
end
input_path = nil
output_filename = 'jquery_doc.json'
ARGV.each do |arg|
if input_path
output_filename = arg
else
input_path = arg
end
end
puts "input_path = #{input_path}"
FileUtils.mkdir_p("out")
populator = JqueryDocPopulator.new(input_path, output_filename)
populator.populate()
system("bzip2 -kf out/#{output_filename}") |
require 'spec_helper'
describe 'alerts', :type => :controller do
stub_authorization!
controller(Spree::Admin::BaseController) do
def index
render :text => 'ok'
end
def should_check_alerts?
true
end
end
before do
# Spree::Alert.should_receive(:current).and_return("string")
end
# Regression test for #3716
it "alerts returned wrong data type" do
get :index, {}
expect(response.body).to eq('ok')
end
end
Stop hitting Spree's servers when running specs
I noticed this while running specs offline.
We were hitting Spree's "alert" endpoint every time we ran this backend spec.
require 'spec_helper'
describe 'alerts', :type => :controller do
stub_authorization!
controller(Spree::Admin::BaseController) do
def index
render :text => 'ok'
end
def should_check_alerts?
true
end
end
before do
expect(Spree::Alert).to receive(:current).and_return("string")
end
# Regression test for #3716
it "alerts returned wrong data type" do
get :index, {}
expect(response.body).to eq('ok')
end
end |
#
# Author:: Seth Chisamore <schisamo@opscode.com>
# Cookbook Name:: php
# Provider:: pear_channel
#
# Copyright:: 2011, Opscode, Inc <legal@opscode.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# http://pear.php.net/manual/en/guide.users.commandline.channels.php
require 'chef/mixin/shell_out'
require 'chef/mixin/language'
include Chef::Mixin::ShellOut
def whyrun_supported?
true
end
action :discover do
unless exists?
Chef::Log.info("Discovering pear channel #{@new_resource}")
execute "pear channel-discover #{@new_resource.channel_name}" do
action :run
end
end
end
action :add do
unless exists?
Chef::Log.info("Adding pear channel #{@new_resource} from #{@new_resource.channel_xml}")
execute "pear channel-add #{@new_resource.channel_xml}" do
action :run
end
end
end
action :update do
if exists?
update_needed = false
begin
updated_needed = true if shell_out("pear search -c #{@new_resource.channel_name} NNNNNN").stdout =~ /channel-update/
rescue Chef::Exceptions::CommandTimeout
# CentOS can hang on 'pear search' if a channel needs updating
Chef::Log.info("Timed out checking if channel-update needed...forcing update of pear channel #{@new_resource}")
update_needed = true
end
if update_needed
description = "update pear channel #{@new_resource}"
converge_by(description) do
Chef::Log.info("Updating pear channel #{@new_resource}")
shell_out!("pear channel-update #{@new_resource.channel_name}")
end
end
end
end
action :remove do
if exists?
Chef::Log.info("Deleting pear channel #{@new_resource}")
execute "pear channel-delete #{@new_resource.channel_name}" do
action :run
end
end
end
def load_current_resource
@current_resource = Chef::Resource::PhpPearChannel.new(@new_resource.name)
@current_resource.channel_name(@new_resource.channel_name)
@current_resource
end
private
def exists?
begin
shell_out!("pear channel-info #{@current_resource.channel_name}")
true
rescue Chef::Exceptions::ShellCommandFailed
rescue Mixlib::ShellOut::ShellCommandFailed
false
end
end
[COOK-2909] - remove Mixlib deprecation warning
Signed-off-by: Sean OMeara <4025b808ec3cf11e3d5c9a1a3bd1e7c4003cb3a1@opscode.com>
#
# Author:: Seth Chisamore <schisamo@opscode.com>
# Cookbook Name:: php
# Provider:: pear_channel
#
# Copyright:: 2011, Opscode, Inc <legal@opscode.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# http://pear.php.net/manual/en/guide.users.commandline.channels.php
require 'chef/mixin/shell_out'
require 'chef/mixin/language'
include Chef::Mixin::ShellOut
def whyrun_supported?
true
end
action :discover do
unless exists?
Chef::Log.info("Discovering pear channel #{@new_resource}")
execute "pear channel-discover #{@new_resource.channel_name}" do
action :run
end
end
end
action :add do
unless exists?
Chef::Log.info("Adding pear channel #{@new_resource} from #{@new_resource.channel_xml}")
execute "pear channel-add #{@new_resource.channel_xml}" do
action :run
end
end
end
action :update do
if exists?
update_needed = false
begin
updated_needed = true if shell_out("pear search -c #{@new_resource.channel_name} NNNNNN").stdout =~ /channel-update/
rescue Chef::Exceptions::CommandTimeout
# CentOS can hang on 'pear search' if a channel needs updating
Chef::Log.info("Timed out checking if channel-update needed...forcing update of pear channel #{@new_resource}")
update_needed = true
end
if update_needed
description = "update pear channel #{@new_resource}"
converge_by(description) do
Chef::Log.info("Updating pear channel #{@new_resource}")
shell_out!("pear channel-update #{@new_resource.channel_name}")
end
end
end
end
action :remove do
if exists?
Chef::Log.info("Deleting pear channel #{@new_resource}")
execute "pear channel-delete #{@new_resource.channel_name}" do
action :run
end
end
end
def load_current_resource
@current_resource = Chef::Resource::PhpPearChannel.new(@new_resource.name)
@current_resource.channel_name(@new_resource.channel_name)
@current_resource
end
private
def exists?
begin
shell_out!("pear channel-info #{@current_resource.channel_name}")
true
rescue Mixlib::ShellOut::ShellCommandFailed
false
end
end
|
#! /usr/bin/env ruby
# Copyright 2007 - $Date$ by PeopleWare n.v.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $Date$
# $Revision$
# $HeadURL$
#
# Script to fix stuff in our Subversion repositories
#
# - setting the correct mime-type
# - setting svn:keywords
#
# Ruben Vandeginste 23/07/2008
#
# change load_path
# $LOAD_PATH << File.join(File.dirname(__FILE__), './lib')
# using subversion library
require "svn/repos"
# subversion repo revision number
SVN_SPIDER_REVISION = "$Rev$"
# log message
# this is kept simple on purpose
@@log_msg = "svn_spider\n"
@@log_msg << "\n"
# initialize mime registry
mime_type_registry = Hash.new
File.readlines("mime_type_registry.txt").map{|l| l.split.map{|e| e.strip}}.each{|l| mime_type_registry[l[0]]=l[1]}
# load config + initialization
@@mime_types = {}
@@property_filters = {}
@@repositories = {}
@@credentials = {}
@@working_directory = ""
load "config.rb"
@@mime_types.each_pair do |key, val|
mime_type_registry[key] = val
end
# consistency check
# todo
# do the actual checking and fixing for each repository
@@repositories.each_pair do |repo, info|
# create a client context
ctx = Svn::Client::Context.new
# authentication
if ((info["auth"] == nil) || (@@credentials[info["auth"]] == nil)) then
# maybe svn already has the authentication info
ctx.add_simple_provider
else
# username + password for authentication
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @@credentials[info["auth"]]["username"]
cred.password = @@credentials[info["auth"]]["password"]
end
end
# log message configuration
@@log = @@log_msg.clone
@@log << "working root:\n"
@@log << " " << info["url"] << "\n\n"
@@log << "fixes:\n"
ctx.set_log_msg_func do |items|
puts @@log
raise RuntimeError
[true, @@log]
end
# check out a project, or update it if a working copy is found
wc_path = File.join @@working_directory, repo
if File.exists? wc_path then
ctx.update wc_path
else
ctx.checkout info["url"], wc_path
end
# walk the file tree and update property lists
ctx.list( wc_path, "HEAD", nil, true) do |path, dirent, lock, abs_path|
# path is path of file relative to wc_path
# abs_path is location of the root of path
# relative to the repository root
if not dirent.directory? then
new_props = {}
# check mime type
ext = File.extname(path)
mime = (ext == nil) ? nil : mime_type_registry[ext[1..-1]]
if ((not ext.empty?) && (mime != nil)) then
new_props[Svn::Core::PROP_MIME_TYPE] = mime
end
# check property filters
@@property_filters.each_pair do |key, value|
if path =~ key then
new_props.merge! value
end
end
# fetch the current property list
wc_file_path = File.join wc_path, path
old_props = {}
if dirent.have_props? then
props = ctx.proplist wc_file_path
# should not be empty since we checked that is has properties
if not props.empty? then
# only 1 entry since we take one file at a time
props.each do |pli|
old_props.merge! pli.props
end
end
end
# compare property lists and update new_props
new_props.clone.each_key do |k|
new_props.delete k if new_props[k] == old_props[k]
end
# update the property list in the working copy
if not new_props.empty? then
@@log << " " << path << "\n"
end
new_props.each_pair do |key, value|
ctx.propset key, value, wc_file_path
# update log
if (old_props[key] != nil) then
@@log << " -" << key << ":"
@@log << (" " * [(20 - key.size), 1].max)
@@log << old_props[key] << "\n"
end
@@log << " +" << key << ":"
@@log << (" " * [(20 - key.size), 1].max)
@@log << value << "\n"
end
end
end
# finally commit the changes in the working copy
ctx.commit wc_path
end
better log message when committing
(the code was already committed, but threw an error on purpose)
#! /usr/bin/env ruby
# Copyright 2007 - $Date$ by PeopleWare n.v.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $Date$
# $Revision$
# $HeadURL$
#
# Script to fix stuff in our Subversion repositories
#
# - setting the correct mime-type
# - setting svn:keywords
#
# Ruben Vandeginste 23/07/2008
#
# change load_path
# $LOAD_PATH << File.join(File.dirname(__FILE__), './lib')
# using subversion library
require "svn/repos"
# subversion repo revision number
SVN_SPIDER_REVISION = "$Rev$"
# log message
# this is kept simple on purpose
@@log_msg = "svn_spider\n"
@@log_msg << "\n"
# initialize mime registry
mime_type_registry = Hash.new
File.readlines("mime_type_registry.txt").map{|l| l.split.map{|e| e.strip}}.each{|l| mime_type_registry[l[0]]=l[1]}
# load config + initialization
@@mime_types = {}
@@property_filters = {}
@@repositories = {}
@@credentials = {}
@@working_directory = ""
load "config.rb"
@@mime_types.each_pair do |key, val|
mime_type_registry[key] = val
end
# consistency check
# todo
# do the actual checking and fixing for each repository
@@repositories.each_pair do |repo, info|
# create a client context
ctx = Svn::Client::Context.new
# authentication
if ((info["auth"] == nil) || (@@credentials[info["auth"]] == nil)) then
# maybe svn already has the authentication info
ctx.add_simple_provider
else
# username + password for authentication
ctx.add_simple_prompt_provider(0) do |cred, realm, username, may_save|
cred.username = @@credentials[info["auth"]]["username"]
cred.password = @@credentials[info["auth"]]["password"]
end
end
# log message configuration
@@log = @@log_msg.clone
@@log << "working root:\n"
@@log << " " << info["url"] << "\n\n"
@@log << "fixes:\n"
ctx.set_log_msg_func do |items|
[true, @@log]
end
# check out a project, or update it if a working copy is found
wc_path = File.join @@working_directory, repo
if File.exists? wc_path then
ctx.update wc_path
else
ctx.checkout info["url"], wc_path
end
# walk the file tree and update property lists
ctx.list( wc_path, "HEAD", nil, true) do |path, dirent, lock, abs_path|
# path is path of file relative to wc_path
# abs_path is location of the root of path
# relative to the repository root
if not dirent.directory? then
new_props = {}
# check mime type
ext = File.extname(path)
mime = (ext == nil) ? nil : mime_type_registry[ext[1..-1]]
if ((not ext.empty?) && (mime != nil)) then
new_props[Svn::Core::PROP_MIME_TYPE] = mime
end
# check property filters
@@property_filters.each_pair do |key, value|
if path =~ key then
new_props.merge! value
end
end
# fetch the current property list
wc_file_path = File.join wc_path, path
old_props = {}
if dirent.have_props? then
props = ctx.proplist wc_file_path
# should not be empty since we checked that is has properties
if not props.empty? then
# only 1 entry since we take one file at a time
props.each do |pli|
old_props.merge! pli.props
end
end
end
# compare property lists and update new_props
new_props.clone.each_key do |k|
new_props.delete k if new_props[k] == old_props[k]
end
# update the property list in the working copy
if not new_props.empty? then
@@log << " " << path << "\n"
end
new_props.each_pair do |key, value|
ctx.propset key, value, wc_file_path
# update log
if (old_props[key] != nil) then
@@log << " -" << key << ":"
@@log << (" " * [(20 - key.size), 1].max)
@@log << old_props[key] << "\n"
end
@@log << " +" << key << ":"
@@log << (" " * [(20 - key.size), 1].max)
@@log << value << "\n"
end
end
end
# finally commit the changes in the working copy
ctx.commit wc_path
end
|
# This profiler run simulates Shopify.
# We are looking in the tests directory for liquid files and render them within the designated layout file.
# We will also export a substantial database to liquid which the templates can render values of.
# All this is to make the benchmark as non syntetic as possible. All templates and tests are lifted from
# direct real-world usage and the profiler measures code that looks very similar to the way it looks in
# Shopify which is likely the biggest user of liquid in the world which something to the tune of several
# million Template#render calls a day.
require 'rubygems'
require 'active_support'
require 'yaml'
require 'digest/md5'
require File.dirname(__FILE__) + '/shopify/liquid'
require File.dirname(__FILE__) + '/shopify/database.rb'
class ThemeRunner
# Load all templates into memory, do this now so that
# we don't profile IO.
def initialize
@tests = Dir[File.dirname(__FILE__) + '/tests/**/*.liquid'].collect do |test|
next if File.basename(test) == 'theme.liquid'
theme_path = File.dirname(test) + '/theme.liquid'
[File.read(test), (File.file?(theme_path) ? File.read(theme_path) : nil), test]
end.compact
end
def compile
# Dup assigns because will make some changes to them
@tests.each do |liquid, layout, template_name|
tmpl = Liquid::Template.new
tmpl.parse(liquid)
tmpl = Liquid::Template.new
tmpl.parse(layout)
end
end
def run
# Dup assigns because will make some changes to them
assigns = Database.tables.dup
@tests.each do |liquid, layout, template_name|
# Compute page_tempalte outside of profiler run, uninteresting to profiler
page_template = File.basename(template_name, File.extname(template_name))
compile_and_render(liquid, layout, assigns, page_template)
end
end
def run_profile
RubyProf.measure_mode = RubyProf::WALL_TIME
# Dup assigns because will make some changes to them
assigns = Database.tables.dup
@tests.each do |liquid, layout, template_name|
# Compute page_tempalte outside of profiler run, uninteresting to profiler
html = nil
page_template = File.basename(template_name, File.extname(template_name))
unless @started
RubyProf.start
RubyProf.pause
@started = true
end
html = nil
RubyProf.resume
html = compile_and_render(liquid, layout, assigns, page_template)
RubyProf.pause
# return the result and the MD5 of the content, this can be used to detect regressions between liquid version
$stdout.puts "* rendered template %s, content: %s" % [template_name, Digest::MD5.hexdigest(html)]
# Uncomment to dump html files to /tmp so that you can inspect for errors
# File.open("/tmp/#{File.basename(template_name)}.html", "w+") { |fp| fp <<html}
end
RubyProf.stop
end
def compile_and_render(template, layout, assigns, page_template)
tmpl = Liquid::Template.new
tmpl.assigns['page_title'] = 'Page title'
tmpl.assigns['template'] = page_template
content_for_layout = tmpl.parse(template).render(assigns)
if layout
assigns['content_for_layout'] = content_for_layout
tmpl.parse(layout).render(assigns)
else
content_for_layout
end
end
end
Support benchmarking templates with 'include' tag
# This profiler run simulates Shopify.
# We are looking in the tests directory for liquid files and render them within the designated layout file.
# We will also export a substantial database to liquid which the templates can render values of.
# All this is to make the benchmark as non syntetic as possible. All templates and tests are lifted from
# direct real-world usage and the profiler measures code that looks very similar to the way it looks in
# Shopify which is likely the biggest user of liquid in the world which something to the tune of several
# million Template#render calls a day.
require 'rubygems'
require 'active_support'
require 'yaml'
require 'digest/md5'
require File.dirname(__FILE__) + '/shopify/liquid'
require File.dirname(__FILE__) + '/shopify/database.rb'
class ThemeRunner
class FileSystem
def initialize(path)
@path = path
end
# Called by Liquid to retrieve a template file
def read_template_file(template_path, context)
File.read(@path + '/' + template_path + '.liquid')
end
end
# Load all templates into memory, do this now so that
# we don't profile IO.
def initialize
@tests = Dir[File.dirname(__FILE__) + '/tests/**/*.liquid'].collect do |test|
next if File.basename(test) == 'theme.liquid'
theme_path = File.dirname(test) + '/theme.liquid'
[File.read(test), (File.file?(theme_path) ? File.read(theme_path) : nil), test]
end.compact
end
def compile
# Dup assigns because will make some changes to them
@tests.each do |liquid, layout, template_name|
tmpl = Liquid::Template.new
tmpl.parse(liquid)
tmpl = Liquid::Template.new
tmpl.parse(layout)
end
end
def run
# Dup assigns because will make some changes to them
assigns = Database.tables.dup
@tests.each do |liquid, layout, template_name|
# Compute page_tempalte outside of profiler run, uninteresting to profiler
page_template = File.basename(template_name, File.extname(template_name))
compile_and_render(liquid, layout, assigns, page_template, template_name)
end
end
def run_profile
RubyProf.measure_mode = RubyProf::WALL_TIME
# Dup assigns because will make some changes to them
assigns = Database.tables.dup
@tests.each do |liquid, layout, template_name|
# Compute page_tempalte outside of profiler run, uninteresting to profiler
html = nil
page_template = File.basename(template_name, File.extname(template_name))
unless @started
RubyProf.start
RubyProf.pause
@started = true
end
html = nil
RubyProf.resume
html = compile_and_render(liquid, layout, assigns, page_template, template_file)
RubyProf.pause
# return the result and the MD5 of the content, this can be used to detect regressions between liquid version
$stdout.puts "* rendered template %s, content: %s" % [template_name, Digest::MD5.hexdigest(html)]
# Uncomment to dump html files to /tmp so that you can inspect for errors
# File.open("/tmp/#{File.basename(template_name)}.html", "w+") { |fp| fp <<html}
end
RubyProf.stop
end
def compile_and_render(template, layout, assigns, page_template, template_file)
tmpl = Liquid::Template.new
tmpl.assigns['page_title'] = 'Page title'
tmpl.assigns['template'] = page_template
tmpl.registers[:file_system] = ThemeRunner::FileSystem.new(File.dirname(template_file))
content_for_layout = tmpl.parse(template).render(assigns)
if layout
assigns['content_for_layout'] = content_for_layout
tmpl.parse(layout).render(assigns)
else
content_for_layout
end
end
end
|
FactoryGirl.define do
factory :episode do
podcast
association :image, factory: :image
prx_id 87683
sequence(:overrides) {|n| "{\"title\":\"Episode #{n}\"}" }
end
end
add pub_date to episode factory
FactoryGirl.define do
factory :episode do
podcast
association :image, factory: :image
prx_id 87683
sequence(:overrides) do |n|
{
title: "Episode #{n}",
pub_date: "Fri, 09 Jan 2015 12:49:44 EST"
}.to_json
end
end
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'devise/oauth2_facebook_grantable/version'
Gem::Specification.new do |s|
s.name = "oauth2_facebook_grantable"
s.version = Devise::OAuth2FacebookGrantable::VERSION
s.authors = ["Pierre-Luc Simard"]
s.email = ["plsimard@mirego.com"]
s.homepage = ""
s.summary = %q{Facebook grant type for OAuth2 authentication}
s.description = %q{Add facebook as a grant_type to the authentication done through devise_oauth2_providable}
s.rubyforge_project = "oauth2_facebook_grantable"
# specify any dependencies here; for example:
s.add_runtime_dependency "koala", "~> 1.4.1"
s.add_runtime_dependency "devise_oauth2_providable"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
Added homepage, email, and dependancy to gemspec
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require 'devise/oauth2_facebook_grantable/version'
Gem::Specification.new do |s|
s.name = "oauth2_facebook_grantable"
s.version = Devise::OAuth2FacebookGrantable::VERSION
s.authors = ["Pierre-Luc Simard"]
s.email = ["p-l@6x9.ca"]
s.homepage = "http://github.com/p-l/oauth2_facebook_grantable"
s.summary = %q{Facebook grant type for OAuth2 authentication}
s.description = %q{Add facebook as a grant_type to the authentication done through devise_oauth2_providable}
s.rubyforge_project = "oauth2_facebook_grantable"
# specify any dependencies here; for example:
s.add_runtime_dependency "koala", "~> 1.4.1"
s.add_runtime_dependency "devise_oauth2_providable", "~> 1.1.0"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {tests,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
|
# (c) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# NOTE: This recipe requires:
# Enclosure group: Eg1
my_client = {
url: ENV['ONEVIEWSDK_URL'],
user: ENV['ONEVIEWSDK_USER'],
password: ENV['ONEVIEWSDK_PASSWORD']
}
oneview_enclosure 'Encl1' do
data(
hostname: '172.18.1.11',
username: 'dcs',
password: 'dcs',
licensingIntent: 'OneView'
)
enclosure_group 'Eg1'
client my_client
action :add
end
# Rename enclosure
# Warning: Operation persists in hardware
oneview_enclosure 'Encl1' do
client my_client
operation 'replace'
path '/name'
value 'ChefEncl1'
action :patch
end
# Restoring its original name
oneview_enclosure 'ChefEncl1' do
client my_client
operation 'replace'
path '/name'
value 'Encl1'
action :patch
end
# Removes it from the appliance
oneview_enclosure 'Encl1' do
client my_client
action :remove
end
Adding enclosure missing example
# (c) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
# NOTE: This recipe requires:
# Enclosure group: Eg1
my_client = {
url: ENV['ONEVIEWSDK_URL'],
user: ENV['ONEVIEWSDK_USER'],
password: ENV['ONEVIEWSDK_PASSWORD']
}
oneview_enclosure 'Encl1' do
data(
hostname: '172.18.1.11',
username: 'dcs',
password: 'dcs',
licensingIntent: 'OneView'
)
enclosure_group 'Eg1'
client my_client
action :add
end
# Rename enclosure
# Warning: Operation persists in hardware
oneview_enclosure 'Encl1' do
client my_client
operation 'replace'
path '/name'
value 'ChefEncl1'
action :patch
end
# Restoring its original name
oneview_enclosure 'ChefEncl1' do
client my_client
operation 'replace'
path '/name'
value 'Encl1'
action :patch
end
# Refreshes the enclosure
oneview_enclosure 'Encl1' do
client my_client
action :refresh
end
# Removes it from the appliance
oneview_enclosure 'Encl1' do
client my_client
action :remove
end
|
Pod::Spec.new do |s|
s.name = "DKHelper"
s.version = File.read('VERSION')
s.summary = "Bunch of categorized classes to improve your iOS development."
s.homepage = "https://github.com/kevindelord/DKHelper"
s.license = 'MIT'
s.author = { "kevindelord" => "delord.kevin@gmail.com" }
s.source = { :git => "https://github.com/kevindelord/DKHelper.git", :tag => s.version.to_s }
s.platform = :ios
s.ios.deployment_target = "7.1"
s.requires_arc = true
s.source_files = 'DKHelper/*'
end
Update podspec file to link the sources
Pod::Spec.new do |s|
s.name = "DKHelper"
s.version = File.read('VERSION')
s.summary = "Bunch of categorized classes to improve your iOS development."
s.homepage = "https://github.com/kevindelord/DKHelper"
s.license = 'MIT'
s.author = { "kevindelord" => "delord.kevin@gmail.com" }
s.source = { :git => "https://github.com/kevindelord/DKHelper.git", :tag => s.version.to_s }
s.platform = :ios
s.ios.deployment_target = "7.1"
s.requires_arc = true
s.source_files = 'Sources/*'
end
|
require 'activerecord'
require 'i18n/backend/active_record/store_procs'
if I18n::Backend::Simple.method_defined?(:interpolate_with_deprecated_syntax)
I18n::Backend::Simple.send(:remove_method, :interpolate)
end
ActiveRecord::Base.establish_connection(:adapter => "sqlite3", :database => ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(:version => 1) do
create_table :translations do |t|
t.string :locale
t.string :key
t.string :value
t.boolean :is_proc, :default => false
end
end
module Tests
module Backend
module ActiveRecord
module Setup
module Base
def setup
super
I18n.locale = nil
I18n.default_locale = :en
I18n.backend = I18n::Backend::ActiveRecord.new
I18n::Backend::ActiveRecord::Translation.send(:include, I18n::Backend::ActiveRecord::StoreProcs)
backend_store_translations(:en, :foo => { :bar => 'bar', :baz => 'baz' })
end
def teardown
super
I18n::Backend::ActiveRecord::Translation.destroy_all
I18n.backend = nil
end
end
module Localization
include Base
def setup
super
setup_datetime_translations
setup_datetime_lambda_translations
@old_timezone, ENV['TZ'] = ENV['TZ'], 'UTC'
end
def teardown
super
@old_timezone ? ENV['TZ'] = @old_timezone : ENV.delete('TZ')
end
def setup_datetime_translations
backend_store_translations :de, {
:date => {
:formats => {
:default => "%d.%m.%Y",
:short => "%d. %b",
:long => "%d. %B %Y",
:long_ordinalized => lambda { |date, options|
tz = " (#{options[:timezone]})" if options[:timezone]
"#{date.day}ter %B %Y#{tz}"
}
},
:day_names => %w(Sonntag Montag Dienstag Mittwoch Donnerstag Freitag Samstag),
:abbr_day_names => %w(So Mo Di Mi Do Fr Sa),
:month_names => %w(Januar Februar März April Mai Juni Juli August September Oktober November Dezember).unshift(nil),
:abbr_month_names => %w(Jan Feb Mar Apr Mai Jun Jul Aug Sep Okt Nov Dez).unshift(nil),
:order => [:day, :month, :year]
},
:time => {
:formats => {
:default => "%a, %d. %b %Y %H:%M:%S %z",
:short => "%d. %b %H:%M",
:long => "%d. %B %Y %H:%M",
:long_ordinalized => lambda { |date, options|
tz = " (#{options[:timezone]})" if options[:timezone]
"#{date.day}ter %B %Y, %H:%M Uhr#{tz}"
}
},
:am => 'am',
:pm => 'pm'
},
:datetime => {
:distance_in_words => {
:half_a_minute => 'half a minute',
:less_than_x_seconds => {
:one => 'less than 1 second',
:other => 'less than {{count}} seconds'
},
:x_seconds => {
:one => '1 second',
:other => '{{count}} seconds'
},
:less_than_x_minutes => {
:one => 'less than a minute',
:other => 'less than {{count}} minutes'
},
:x_minutes => {
:one => '1 minute',
:other => '{{count}} minutes'
},
:about_x_hours => {
:one => 'about 1 hour',
:other => 'about {{count}} hours'
},
:x_days => {
:one => '1 day',
:other => '{{count}} days'
},
:about_x_months => {
:one => 'about 1 month',
:other => 'about {{count}} months'
},
:x_months => {
:one => '1 month',
:other => '{{count}} months'
},
:about_x_years => {
:one => 'about 1 year',
:other => 'about {{count}} year'
},
:over_x_years => {
:one => 'over 1 year',
:other => 'over {{count}} years'
}
}
}
}
end
def setup_datetime_lambda_translations
backend_store_translations 'ru', {
:date => {
:'day_names' => lambda { |key, options|
(options[:format] =~ /^%A/) ?
%w(Воскресенье Понедельник Вторник Среда Четверг Пятница Суббота) :
%w(воскресенье понедельник вторник среда четверг пятница суббота)
},
:'abbr_day_names' => %w(Вс Пн Вт Ср Чт Пт Сб),
:'month_names' => lambda { |key, options|
(options[:format] =~ /(%d|%e)(\s*)?(%B)/) ?
%w(января февраля марта апреля мая июня июля августа сентября октября ноября декабря).unshift(nil) :
%w(Январь Февраль Март Апрель Май Июнь Июль Август Сентябрь Октябрь Ноябрь Декабрь).unshift(nil)
},
:'abbr_month_names' => lambda { |key, options|
(options[:format] =~ /(%d|%e)(\s*)(%b)/) ?
%w(янв. февр. марта апр. мая июня июля авг. сент. окт. нояб. дек.).unshift(nil) :
%w(янв. февр. март апр. май июнь июль авг. сент. окт. нояб. дек.).unshift(nil)
},
},
:time => {
:am => "утра",
:pm => "вечера"
}
}
end
end
end
end
end
end
rescue name error in test setup
require 'activerecord'
require 'i18n/backend/active_record/store_procs'
if I18n::Backend::Simple.method_defined?(:interpolate_with_deprecated_syntax)
I18n::Backend::Simple.send(:remove_method, :interpolate) rescue NameError
end
ActiveRecord::Base.establish_connection(:adapter => "sqlite3", :database => ":memory:")
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define(:version => 1) do
create_table :translations do |t|
t.string :locale
t.string :key
t.string :value
t.boolean :is_proc, :default => false
end
end
module Tests
module Backend
module ActiveRecord
module Setup
module Base
def setup
super
I18n.locale = nil
I18n.default_locale = :en
I18n.backend = I18n::Backend::ActiveRecord.new
I18n::Backend::ActiveRecord::Translation.send(:include, I18n::Backend::ActiveRecord::StoreProcs)
backend_store_translations(:en, :foo => { :bar => 'bar', :baz => 'baz' })
end
def teardown
super
I18n::Backend::ActiveRecord::Translation.destroy_all
I18n.backend = nil
end
end
module Localization
include Base
def setup
super
setup_datetime_translations
setup_datetime_lambda_translations
@old_timezone, ENV['TZ'] = ENV['TZ'], 'UTC'
end
def teardown
super
@old_timezone ? ENV['TZ'] = @old_timezone : ENV.delete('TZ')
end
def setup_datetime_translations
backend_store_translations :de, {
:date => {
:formats => {
:default => "%d.%m.%Y",
:short => "%d. %b",
:long => "%d. %B %Y",
:long_ordinalized => lambda { |date, options|
tz = " (#{options[:timezone]})" if options[:timezone]
"#{date.day}ter %B %Y#{tz}"
}
},
:day_names => %w(Sonntag Montag Dienstag Mittwoch Donnerstag Freitag Samstag),
:abbr_day_names => %w(So Mo Di Mi Do Fr Sa),
:month_names => %w(Januar Februar März April Mai Juni Juli August September Oktober November Dezember).unshift(nil),
:abbr_month_names => %w(Jan Feb Mar Apr Mai Jun Jul Aug Sep Okt Nov Dez).unshift(nil),
:order => [:day, :month, :year]
},
:time => {
:formats => {
:default => "%a, %d. %b %Y %H:%M:%S %z",
:short => "%d. %b %H:%M",
:long => "%d. %B %Y %H:%M",
:long_ordinalized => lambda { |date, options|
tz = " (#{options[:timezone]})" if options[:timezone]
"#{date.day}ter %B %Y, %H:%M Uhr#{tz}"
}
},
:am => 'am',
:pm => 'pm'
},
:datetime => {
:distance_in_words => {
:half_a_minute => 'half a minute',
:less_than_x_seconds => {
:one => 'less than 1 second',
:other => 'less than {{count}} seconds'
},
:x_seconds => {
:one => '1 second',
:other => '{{count}} seconds'
},
:less_than_x_minutes => {
:one => 'less than a minute',
:other => 'less than {{count}} minutes'
},
:x_minutes => {
:one => '1 minute',
:other => '{{count}} minutes'
},
:about_x_hours => {
:one => 'about 1 hour',
:other => 'about {{count}} hours'
},
:x_days => {
:one => '1 day',
:other => '{{count}} days'
},
:about_x_months => {
:one => 'about 1 month',
:other => 'about {{count}} months'
},
:x_months => {
:one => '1 month',
:other => '{{count}} months'
},
:about_x_years => {
:one => 'about 1 year',
:other => 'about {{count}} year'
},
:over_x_years => {
:one => 'over 1 year',
:other => 'over {{count}} years'
}
}
}
}
end
def setup_datetime_lambda_translations
backend_store_translations 'ru', {
:date => {
:'day_names' => lambda { |key, options|
(options[:format] =~ /^%A/) ?
%w(Воскресенье Понедельник Вторник Среда Четверг Пятница Суббота) :
%w(воскресенье понедельник вторник среда четверг пятница суббота)
},
:'abbr_day_names' => %w(Вс Пн Вт Ср Чт Пт Сб),
:'month_names' => lambda { |key, options|
(options[:format] =~ /(%d|%e)(\s*)?(%B)/) ?
%w(января февраля марта апреля мая июня июля августа сентября октября ноября декабря).unshift(nil) :
%w(Январь Февраль Март Апрель Май Июнь Июль Август Сентябрь Октябрь Ноябрь Декабрь).unshift(nil)
},
:'abbr_month_names' => lambda { |key, options|
(options[:format] =~ /(%d|%e)(\s*)(%b)/) ?
%w(янв. февр. марта апр. мая июня июля авг. сент. окт. нояб. дек.).unshift(nil) :
%w(янв. февр. март апр. май июнь июль авг. сент. окт. нояб. дек.).unshift(nil)
},
},
:time => {
:am => "утра",
:pm => "вечера"
}
}
end
end
end
end
end
end
|
# -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "rspec/mocks/version"
Gem::Specification.new do |s|
s.name = "rspec-mocks"
s.version = RSpec::Mocks::Version::STRING
s.platform = Gem::Platform::RUBY
s.authors = ["David Chelimsky", "Chad Humphries"]
s.email = "dchelimsky@gmail.com;chad.humphries@gmail.com"
s.homepage = "http://github.com/rspec/rspec-mocks"
s.summary = "rspec-mocks-#{RSpec::Mocks::Version::STRING}"
s.description = "RSpec's 'test double' framework, with support for stubbing and mocking"
s.rubygems_version = "1.3.7"
s.rubyforge_project = "rspec"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.extra_rdoc_files = [ "README.md" ]
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
dev: remove executables from gemspec
# -*- encoding: utf-8 -*-
$LOAD_PATH.unshift File.expand_path("../lib", __FILE__)
require "rspec/mocks/version"
Gem::Specification.new do |s|
s.name = "rspec-mocks"
s.version = RSpec::Mocks::Version::STRING
s.platform = Gem::Platform::RUBY
s.authors = ["David Chelimsky", "Chad Humphries"]
s.email = "dchelimsky@gmail.com;chad.humphries@gmail.com"
s.homepage = "http://github.com/rspec/rspec-mocks"
s.summary = "rspec-mocks-#{RSpec::Mocks::Version::STRING}"
s.description = "RSpec's 'test double' framework, with support for stubbing and mocking"
s.rubygems_version = "1.3.7"
s.rubyforge_project = "rspec"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,features}/*`.split("\n")
s.extra_rdoc_files = [ "README.md" ]
s.rdoc_options = ["--charset=UTF-8"]
s.require_path = "lib"
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rspec/speak/version'
Gem::Specification.new do |spec|
spec.name = "rspec-speak"
spec.version = "1.0"
spec.authors = ["Nathan Clark"]
spec.email = ["Nathan.Clark@tokenshift.com"]
spec.summary = %q{Listen to your tests.}
spec.description = %q{No, really. Listen.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
end
Hard-coding version.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "rspec-speak"
spec.version = "1.0"
spec.authors = ["Nathan Clark"]
spec.email = ["Nathan.Clark@tokenshift.com"]
spec.summary = %q{Listen to your tests.}
spec.description = %q{No, really. Listen.}
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
end
|
require 'db/sqlite3'
require 'transaction'
class SQLite3TransactionTest < Test::Unit::TestCase
include TransactionTestMethods
# @override
def test_supports_transaction_isolation
assert ActiveRecord::Base.connection.supports_transaction_isolation?
# NOTE: adapter tell us it supports but JDBC meta-data API returns false ?!
#assert ActiveRecord::Base.connection.supports_transaction_isolation?(:read_uncommitted)
assert ActiveRecord::Base.connection.supports_transaction_isolation?(:serializable)
end
# supports only TRANSACTION_SERIALIZABLE and TRANSACTION_READ_UNCOMMITTED
# @override
def test_transaction_isolation_read_committed
assert ! ActiveRecord::Base.connection.supports_transaction_isolation?(:read_committed)
assert_raise ActiveRecord::TransactionIsolationError do
super
end
end if Test::Unit::TestCase.ar_version('4.0')
# @override
def test_transaction_isolation_repeatable_read
assert ! ActiveRecord::Base.connection.supports_transaction_isolation?(:repeatable_read)
assert_raise ActiveRecord::TransactionIsolationError do
super
end
end if Test::Unit::TestCase.ar_version('4.0')
def test_transaction_isolation_read_uncommitted
Entry.transaction(:isolation => :read_uncommitted) do
assert_equal 0, Entry.count
Entry.create # Entry2.create
assert_equal 1, Entry.count
end
end if Test::Unit::TestCase.ar_version('4.0')
def test_supports_savepoints
assert_true ActiveRecord::Base.connection.supports_savepoints?
end
end
there's always someone not playing by the rulez - avoid AR-JDBC custom asserts
require 'db/sqlite3'
require 'transaction'
class SQLite3TransactionTest < Test::Unit::TestCase
include TransactionTestMethods
# @override
def test_supports_transaction_isolation
assert ActiveRecord::Base.connection.supports_transaction_isolation?
# NOTE: adapter tell us it supports but JDBC meta-data API returns false ?!
#assert ActiveRecord::Base.connection.supports_transaction_isolation?(:read_uncommitted)
assert ActiveRecord::Base.connection.supports_transaction_isolation?(:serializable)
end
# supports only TRANSACTION_SERIALIZABLE and TRANSACTION_READ_UNCOMMITTED
# @override
def test_transaction_isolation_read_committed
assert ! ActiveRecord::Base.connection.supports_transaction_isolation?(:read_committed)
assert_raise ActiveRecord::TransactionIsolationError do
super
end
end if Test::Unit::TestCase.ar_version('4.0')
# @override
def test_transaction_isolation_repeatable_read
assert ! ActiveRecord::Base.connection.supports_transaction_isolation?(:repeatable_read)
assert_raise ActiveRecord::TransactionIsolationError do
super
end
end if Test::Unit::TestCase.ar_version('4.0')
def test_transaction_isolation_read_uncommitted
Entry.transaction(:isolation => :read_uncommitted) do
assert_equal 0, Entry.count
Entry.create # Entry2.create
assert_equal 1, Entry.count
end
end if Test::Unit::TestCase.ar_version('4.0')
def test_supports_savepoints
assert_true ActiveRecord::Base.connection.supports_savepoints?
end
# @override
def test_current_savepoints_name
MyUser.transaction do
if ar_version('4.2')
assert_nil MyUser.connection.current_savepoint_name
assert_nil MyUser.connection.current_transaction.savepoint_name
else # 3.2
assert_equal "active_record_1", MyUser.connection.current_savepoint_name
end
MyUser.transaction(:requires_new => true) do
if ar_version('4.2')
assert_equal "active_record_1", MyUser.connection.current_savepoint_name
assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name
else # 3.2
# on AR < 3.2 we do get 'active_record_1' with AR-JDBC which is not compatible
# with MRI but is actually more accurate - maybe 3.2 should be updated as well
assert_equal "active_record_2", MyUser.connection.current_savepoint_name
assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION
#assert_equal "active_record_1", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION
end
MyUser.transaction(:requires_new => true) do
if ar_version('4.2')
assert_equal "active_record_2", MyUser.connection.current_savepoint_name
assert_equal "active_record_2", MyUser.connection.current_transaction.savepoint_name
assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION
#assert_equal "active_record_2", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION
else # 3.2
assert_equal "active_record_3", MyUser.connection.current_savepoint_name
assert_equal "active_record_3", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION
#assert_equal "active_record_2", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION
end
end
if ar_version('4.2')
assert_equal "active_record_1", MyUser.connection.current_savepoint_name
assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name
else # 3.2
assert_equal "active_record_2", MyUser.connection.current_savepoint_name
assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION
#assert_equal "active_record_1", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION
end
end
end
end if Test::Unit::TestCase.ar_version('3.2')
end
|
module SluggedModel
def compact_slug
return slug unless parent
cslug = slug
cslug[parent.slug] = '' if cslug[parent.slug] # Substring replacement of parent.slug with ''
return cslug
end
def self.included(model)
model.extend(ClassMethods)
model.class_eval do
validates :slug, :presence => { :message => "needs a value"}
validates :slug,
:uniqueness => { :message => "must be unique" }
before_validation :generate_random_slug_if_needed
after_validation :revert_generated_slug_if_needed
before_save :generate_random_slug_if_needed
before_save :upcase_slug
after_save :generate_human_slug_if_needed
after_rollback :revert_generated_slug_if_needed
end
end
def slug_split_for_sort
self.slug.split(/(\d+)/).map { |s| [s.to_i, s] }
end
module ClassMethods
def slugfilter(prefix)
if !prefix.blank?
where("#{table_name}.slug LIKE ?", "#{prefix}%").order(:slug)
else
order(:slug)
end
end
def slugtree(items)
st = SlugTree.new("")
items.each { |item| st.insert(item) }
st
end
end
class SlugTree
attr_accessor :prefix, :object, :parent, :children
def initialize(prefix, parent=nil)
@prefix = prefix
@parent = parent
@object = nil
@children = {}
end
def insert(item)
raise "Bad child slug: #{item.slug}" if !item.slug.starts_with?(@prefix)
next_path = item.slug[@prefix.size..-1]
next_step = next_path.split(/(?=\.|-|\(|\))/)[0]
if next_step.blank?
# Replace current node object
@object = item
else
if !@children[next_step]
@children[next_step] = SlugTree.new(@prefix + next_step, parent=self)
end
@children[next_step].insert(item)
end
end
def first_level_descendents
@children.map do |_, child|
child.object.nil? ? child.first_level_descendents : child
end.flatten
end
def first_level_descendents_with_step
first_level_descendents.
map { |node| [node.prefix[prefix.size..-1], node] }.
sort_by { |k,_| k.split(/(\d+)/).map { |s| [s.to_i, s] } }
end
end
def slug=(value)
super(value.present? ? value.upcase : nil)
end
def default_slug_prefix
self.class.to_s
end
private
def upcase_slug
self.slug = slug.present? ? slug.upcase : nil
end
def generate_random_slug_if_needed
@needs_slug = false
if self.slug == nil
if id.present? #!self.new_record?
slug_suffix = '%04d' % id
else
@needs_slug = true
slug_suffix = (Time.now.to_r*1000000).to_i.to_s(32) + '-' + Random.rand(1000000000).to_s(32)
end
new_slug = ''
if self.has_attribute?(:parent_id) && !self.parent.nil?
new_slug += self.parent.slug + '-'
end
new_slug += default_slug_prefix + '-' + slug_suffix
self.slug = new_slug
end
true
end
def generate_human_slug_if_needed
if @needs_slug
self.without_versioning do
new_slug = ''
if self.has_attribute?(:parent_id) && !self.parent.nil?
new_slug += self.parent.slug + '-'
end
new_slug += default_slug_prefix + '-%04d' % id
self.slug = new_slug
# HACK: Shouldn't recurse, but still a bit scary.
self.save
end
end
true
end
def revert_generated_slug_if_needed
# In the case of a validation failure or some other save failure, revert the
# slug back to nil if it was auto-generated.
if @needs_slug
self.slug = nil
end
end
def validate_slug_parent
upcase_slug
return unless parent
if slug && slug.starts_with?(parent.slug)
true
else
errors.add(:slug, "must start with parent's code #{parent.slug}")
end
end
end
Add fallback options for auto-codes [story:43811539]
module SluggedModel
def compact_slug
return slug unless parent
cslug = slug
cslug[parent.slug] = '' if cslug[parent.slug] # Substring replacement of parent.slug with ''
return cslug
end
def self.included(model)
model.extend(ClassMethods)
model.class_eval do
validates :slug, :presence => { :message => "needs a value"}
validates :slug,
:uniqueness => { :message => "must be unique" }
before_validation :generate_random_slug_if_needed
after_validation :revert_generated_slug_if_needed
before_save :generate_random_slug_if_needed
before_save :upcase_slug
after_save :generate_human_slug_if_needed
after_rollback :revert_generated_slug_if_needed
end
end
def slug_split_for_sort
self.slug.split(/(\d+)/).map { |s| [s.to_i, s] }
end
module ClassMethods
def slugfilter(prefix)
if !prefix.blank?
where("#{table_name}.slug LIKE ?", "#{prefix}%").order(:slug)
else
order(:slug)
end
end
def slugtree(items)
st = SlugTree.new("")
items.each { |item| st.insert(item) }
st
end
end
class SlugTree
attr_accessor :prefix, :object, :parent, :children
def initialize(prefix, parent=nil)
@prefix = prefix
@parent = parent
@object = nil
@children = {}
end
def insert(item)
raise "Bad child slug: #{item.slug}" if !item.slug.starts_with?(@prefix)
next_path = item.slug[@prefix.size..-1]
next_step = next_path.split(/(?=\.|-|\(|\))/)[0]
if next_step.blank?
# Replace current node object
@object = item
else
if !@children[next_step]
@children[next_step] = SlugTree.new(@prefix + next_step, parent=self)
end
@children[next_step].insert(item)
end
end
def first_level_descendents
@children.map do |_, child|
child.object.nil? ? child.first_level_descendents : child
end.flatten
end
def first_level_descendents_with_step
first_level_descendents.
map { |node| [node.prefix[prefix.size..-1], node] }.
sort_by { |k,_| k.split(/(\d+)/).map { |s| [s.to_i, s] } }
end
end
def slug=(value)
super(value.present? ? value.upcase : nil)
end
def default_slug_prefix
self.class.to_s
end
private
def upcase_slug
self.slug = slug.present? ? slug.upcase : nil
end
def generate_random_slug_if_needed
@needs_slug = false
if self.slug == nil
if id.present? #!self.new_record?
slug_suffix = '%04d' % id
else
@needs_slug = true
slug_suffix = (Time.now.to_r*1000000).to_i.to_s(32) + '-' + Random.rand(1000000000).to_s(32)
end
new_slug = ''
if self.has_attribute?(:parent_id) && !self.parent.nil?
new_slug += self.parent.slug + '-'
end
new_slug += default_slug_prefix + '-' + slug_suffix
self.slug = new_slug
end
true
end
def generate_human_slug_if_needed
if @needs_slug
self.without_versioning do
new_slug = ''
if self.has_attribute?(:parent_id) && !self.parent.nil?
new_slug += self.parent.slug + '-'
end
self.slug = new_slug + default_slug_prefix + '-%04d' % id
# HACK: Shouldn't recurse, but still a bit scary.
try_count = 0
while !self.save
try_count += 1
self.slug = new_slug + default_slug_prefix + ('-%04d' % id) + '-' + try_count.to_s
end
end
end
true
end
def revert_generated_slug_if_needed
# In the case of a validation failure or some other save failure, revert the
# slug back to nil if it was auto-generated.
if @needs_slug
self.slug = nil
end
end
def validate_slug_parent
upcase_slug
return unless parent
if slug && slug.starts_with?(parent.slug)
true
else
errors.add(:slug, "must start with parent's code #{parent.slug}")
end
end
end
|
Initial podspec for v2.0b1
#
# Be sure to run `pod spec lint Deferred.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "Deferred"
s.version = "2.0b1"
s.summary = "An implementation of OCaml's Deferred for Swift."
s.description = <<-DESC
Deferred is an asynchronous promise-style API that can be used as an
alternative to the "block callback" pattern.
DESC
s.homepage = "https://github.com/bignerdranch/Deferred"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.authors = ["John Gallagher", "Zachary Waldowski"]
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.ios.deployment_target = "8.0"
s.osx.deployment_target = "10.10"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/bignerdranch/Deferred.git", :tag => "v2.0b1" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Deferred"
s.public_header_files = "Deferred/Deferred.h"
# ――― Subspecs
# s.subspec "iOS" do |iOS_spec|
# iOS_spec.platforms = [:ios]
# end
#
# s.subspec "Mac" do |mac_spec|
# mac_spec.platforms = [:osx]
# end
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'paperclip-optimizer/version'
Gem::Specification.new do |spec|
spec.name = "paperclip-optimizer"
spec.version = PaperclipOptimizer::VERSION
spec.authors = ["Jan-Christian Föh"]
spec.email = ["jan@programmanstalt.de"]
spec.description = %q{paperclip-optimizer is a processor for Paperclip that allows you to optimize and minify uploaded JPEG and PNG files.}
spec.summary = %q{Minify Paperclip JPEG and PNG attachments}
spec.homepage = "https://github.com/janfoeh/paperclip-optimizer"
spec.license = "MIT"
spec.add_dependency "paperclip", "~> 3.4"
spec.add_dependency "image_optim", "~> 0.8"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake", "~> 10.1"
spec.add_development_dependency 'rspec', "~> 2.13"
spec.add_development_dependency 'rails', ">= 3.2.13", "< 4.1.0"
spec.add_development_dependency 'sqlite3', "~> 1.3.7"
spec.add_runtime_dependency "paperclip"
spec.files = `git ls-files`.split($/)
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
end
Update to image_optim 0.9.1
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'paperclip-optimizer/version'
Gem::Specification.new do |spec|
spec.name = "paperclip-optimizer"
spec.version = PaperclipOptimizer::VERSION
spec.authors = ["Jan-Christian Föh"]
spec.email = ["jan@programmanstalt.de"]
spec.description = %q{paperclip-optimizer is a processor for Paperclip that allows you to optimize and minify uploaded JPEG and PNG files.}
spec.summary = %q{Minify Paperclip JPEG and PNG attachments}
spec.homepage = "https://github.com/janfoeh/paperclip-optimizer"
spec.license = "MIT"
spec.add_dependency "paperclip", "~> 3.4"
spec.add_dependency "image_optim", "~> 0.9"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake", "~> 10.1"
spec.add_development_dependency 'rspec', "~> 2.13"
spec.add_development_dependency 'rails', ">= 3.2.13", "< 4.1.0"
spec.add_development_dependency 'sqlite3', "~> 1.3.7"
spec.add_runtime_dependency "paperclip"
spec.files = `git ls-files`.split($/)
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
end
|
# encoding: utf-8
require_relative "load_fuzzy_logic.rb"
require "minitest/autorun"
added test for collection
# encoding: utf-8
require_relative "load_fuzzy_logic.rb"
require "minitest/autorun"
describe FuzzyLogic::Collection do
before do
@collection = FuzzyLogic::Collection.new("Collection Test") { |t|
o = true
if not t.is_a? Numeric then
o = false
elsif t < 0 or t > 100 then
o = false
end
o
}
end
describe "can add a fuzzyset" do
it "should work like a normal hash" do
@collection[:test] = FuzzyLogic::Generate.trapezoid(10,20,30,40)
end
it "should throw an error when get not FuzzySet" do
lambda { @collection[:no_fuzzy_set] = 13 }.must_raise ArgumentError
end
end
describe "give a hash on a value" do
before do
@collection[:test] = FuzzyLogic::Generate.trapezoid(10,20,30,40)
end
it "wont be empty on 25 (or 0 with true)" do
@collection.get(25).wont_be_empty
@collection.get(0, true).wont_be_empty
end
it "should be empty on 0" do
@collection.get(0).must_be_empty
end
end
end
|
Pod::Spec.new do |s|
s.name = "NSObject-LNTXAutoDescription"
s.version = "1.0"
s.summary = "UIImageView category that let you define an automatic description method."
s.description = "NSObject category to define a description method that automatically lists all properties of a specific instance."
s.homepage = "https://github.com/linitix/NSObject-LNTXAutoDescription"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Damien Rambout" => "damien.rambout@linitix.com" }
s.platform = :ios, '6.0'
s.source = { :git => "https://github.com/linitix/NSObject-LNTXAutoDescription.git", :tag => s.version }
s.source_files = 'NSObject+LNTXAutoDescription'
s.requires_arc = true
end
Update podspec
Pod::Spec.new do |s|
s.name = "NSObject-LNTXAutoDescription"
s.version = "1.0.0"
s.summary = "NSObject category that let you define an automatic description method."
s.description = "NSObject category to define a description method that automatically lists all properties of a specific instance."
s.homepage = "https://github.com/linitix/NSObject-LNTXAutoDescription"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Damien Rambout" => "damien.rambout@linitix.com" }
s.platform = :ios, '6.0'
s.source = { :git => "https://github.com/linitix/NSObject-LNTXAutoDescription.git", :tag => s.version }
s.source_files = 'NSObject+LNTXAutoDescription'
s.requires_arc = true
end
|
# Clock solution
class Clock
attr_accessor :hour, :minute
def initialize(hour: 0, minute: 0)
roll_over_hour, @minute = minute.divmod(60)
@hour = (hour + roll_over_hour) % 24
end
def to_s
format('%02d:%02d', hour, minute)
end
def +(other)
Clock.new(hour: hour + other.hour, minute: minute + other.minute)
end
def -(other)
Clock.new(hour: hour - other.hour, minute: minute - other.minute)
end
def ==(other)
self.class === other &&
hour == other.hour &&
minute == other.minute
end
def hash
hour.hash ^ minute.hash
end
end
Use one attr_accessor
# Clock solution
class Clock
attr_accessor :minute
def initialize(hour: 0, minute: 0)
@minute = hour * 60 + minute
end
def to_s
format('%02d:%02d', divided_hours, divided_minutes)
end
def divided_hours
hours = minute / 60
hours % 24
end
def divided_minutes
minute % 60
end
def +(other)
Clock.new(hour: divided_hours, minute: divided_minutes + other.minute)
end
def -(other)
Clock.new(hour: divided_hours, minute: divided_minutes - other.minute)
end
def ==(other)
self.class === other &&
self.divided_hours == other.divided_hours &&
self.divided_minutes == other.divided_minutes
end
def hash
minute.hash
end
end
|
# -*- coding: utf-8 -*-
require 'erb'
require 'stack_q_tmpl'
class STACK_Q
include ERB::Util
def initialize(s, opt = {})
@txt = s
@err_msg = ""
@category = opt["category"] || "stack_q"
@opt = opt
end
attr_reader :err_msg
def txt2xml
ret = ""
ret << ERB.new(HEAD).result(binding)
line_num = 1
sort_prefix0 = sort_prefix()
@txt.each_line{|l|
next if /\A\s*\Z/ =~ l
@err_msg = "error at line: #{line_num}"
x = ERB.new(TMPL)
input_size = @opt["form-size"] || 100
input_type = "algebraic"
qname, qstr, a1, mthd, ext = l.split(/\s*\*\*\s*/).map{|s| s.sub(/\A\s*/, "").sub(/\s*\Z/, "") }
mthd = mthd || "AlgEquiv"
forbidwords = ""
if @opt["sort-prefix"]
ln = "%.2d" % line_num
qname_0 = sort_prefix0 + "-" + ln + "-" + qname
else
qname_0 = qname
end
begin
validate_maxima_exp(a1)
rescue RuntimeError
@err_msg = "validation error of maxima expression at line #{line_num}" + "\n"
raise "\n\n" + @err_msg + l + "\n\n"
end
if is_matrix_type(a1)
input_size = @opt["form-size"] || 15
input_type = "matrix"
end
# workaround for Moodle + MathJax bug
qstr = '\(\,\) ' + inline_tex(qstr)
# teacher's answer == a1 == t_ans1, (prt stands for potential response tree)
# student's answer == ans1
case mthd
when "AlgEquiv", "CasEqualNotAsin"
stack_mthd = mthd
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1)
if mthd == "CasEqualNotAsin"
stack_mthd = "CasEqual"
forbidwords = ",asin,acos,atan"
end
when "is_same_interval", "is_same_linear_eq", "has_same_nullspace", "has_same_deriv", "does_satisfy"
stack_mthd = "CasEqual"
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1, ext)
case mthd
when "is_same_linear_eq"
eq_type_check(a1, line_num)
when "has_same_deriv"
stack_mthd = "AlgEquiv"
end
when "is_same_plane"
# plane_type_check(a1, line_num)
stack_mthd = "CasEqual"
t_ans1 = cdata("transpose(matrix(" + a1 + "))")
feedbk = feedback(mthd, a1)
input_size = 15
input_type = "matrix"
when "is_same_diag"
stack_mthd = "CasEqual"
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1)
input_size = 15
input_type = "matrix"
#
# questions with multiple answers
#
when "multi_eigen_eq"
case mthd
when "multi_eigen_eq"
desc_varnames = [["固有値", "eigenval"], ["重複度", "chofuku"], ["固有空間の次元", "jigen"]]
else
raise
end
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_multi, nil, '-')
ans_num, ans_dim = multi_ans_num_dim(a1)
multi_ans_check_size(ans_dim, desc_varnames)
ans_nodes = multi_ans_nodes(ans_num, desc_varnames, input_size)
feedbk = multi_feedback(ans_num, desc_varnames)
ans_forms = multi_forms(ans_num, desc_varnames)
when "is_basis_of_same_linear_space", "is_orthonormal_basis_of_same_linear_space"
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_basis, nil, '-')
basis_type_check(a1, line_num)
dim = basis_dim(a1)
ans_nodes = basis_ans(dim, dim, input_size)
feedbk = basis_feedback(dim, mthd)
ans_forms = basis_forms(dim)
when "is_same_eigenval_and_eigenvec"
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_eigen, nil, '-')
eigen_val_num, dim = eigen_num_dim(a1)
ans_forms = eigen_forms(eigen_val_num, dim)
feedbk = eigen_feedback(eigen_val_num, dim)
ans_nodes = eigen_ans_nodes(eigen_val_num, dim, input_size)
else
@err_msg = "error at line: #{line_num}"
raise "invalid grading method"
end
ret << x.result(binding)
line_num += 1
}
ret << FOOT
end
def inline_tex(s)
s.gsub(/([^\\]|\A)\$((\\\$|[^\$])*)\$/) { $1 + '\\(' + $2 + '\\)' }
end
def does_hold_mac
<<EOS.chomp
stackqsimp(ex) := ratsimp( radcan( exponentialize(ex) ) );
does_hold(ex) := is( stackqsimp(ex) );
EOS
end
def feedback(mthd, a1, ext="")
fdbk_alart = <<EOS.chomp
listofops(x) := block([], if not atom(x) then cons( op(x), flatten(map(listofops, args(x))) ) else [] );
xyalart_set : intersection({xy, yx}, setify( append(listofvars(ans1), listofops(ans1)) ));
xyalart_elem : if not emptyp( xyalart_set ) then listify(xyalart_set)[1];
xyalart : if not emptyp( xyalart_set ) then 1 else false;
sinalart : if not emptyp( intersection({sin2, sin3, sin4, sin5, cos2, cos3, cos4, cos5, tan2, tan3, tan4, tan5, asin2, asin3, acos2, acos3, atan2, atan3}, setify(listofvars(ans1))) ) then 1 else false;
fxalart_set : intersection({x, y, s, t, fx, fy, fxx, fxy, fyx, fyy}, setify(listofops(ans1)));
fxalart_elem : if not emptyp( fxalart_set ) then listify(fxalart_set)[1];
fxalart : if not emptyp( fxalart_set ) then 1 else false;
#{does_hold_mac}
ans1 : ratsubst(fxy, fyx, ans1);
EOS
case mthd
when "AlgEquiv", "CasEqualNotAsin"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
result : if does_hold( a1 = ans1 ) then 1 else false;
]]>
EOS
when "has_same_deriv"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
a1 : diff(a1,x);
ans1 : diff(ans1, x);
result : if does_hold( a1 = ans1 ) then 1 else false;
]]>
EOS
when "does_satisfy"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
result : if does_hold( #{esq_cdata(ext)} ) then 1 else false;
]]>
EOS
when "is_same_interval"
<<EOS.chomp
<![CDATA[
myargs(xs) := block([as, zzz],as : if atom(xs) then xs else args(xs),if not chk_op(as, xs) then return(zzz),as);
chk_op(as, xs) := block([op1, x],if not( atom(as) ) and not( atom(xs) ) then (if member(x, as) then (op1 : op(xs),return( member(op1, ["and", "or", "<", ">", ">=", "<="]) ))),true);
edges(xs) := block([x],delete(x, flatten( scanmap(myargs, xs))));
xs_in_interval(xs, cond) := block(map(lambda([x], charfun(cond)), xs));
is_same_interval(c1, c2) := block([ret, xs1, xs2, v1, v2, x, m],ret : true,xs1 : edges(c1),xs2 : edges(c2),m : lmax( map(abs, append(xs1, xs2)) ),m : 2*min(max(m, 1), 100),ret : ret and is(xs_in_interval(xs1, c1) = xs_in_interval(xs1, c2)),ret : ret and is(xs_in_interval(xs2, c1) = xs_in_interval(xs2, c2)),if ret then (v1 : quad_qags(charfun(c1), x, -m, m, 'epsrel=10^(-12) )[1],v2 : quad_qags(charfun(c2)*charfun(c1), x, -m, m, 'epsrel=10^(-12) )[1],ret : ret and is(v1 = v2)),ret);
a1 : #{esq_cdata(a1)};
result : if is_same_interval(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_diag"
<<EOS.chomp
<![CDATA[
is_diagonal(m) := block([col_size, row_size],col_size : length(m),row_size : length(m[1]),is(col_size = row_size) and is( m = m * diagmatrix(col_size, 1)));
get_diag_element(m) := block([len, i],len : length(m),maplist(lambda([i], m[i,i]), makelist(i, i, len)));
is_same_diag(a, x) := block([],is_diagonal(a) and is_diagonal(x) and does_hold( sort(get_diag_element(a)) = sort(get_diag_element(x)) ));
#{does_hold_mac}
a1 : #{esq_cdata(a1)};
result : if is_same_diag(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_linear_eq", "is_same_plane", "has_same_nullspace"
ret = ""
ret <<
<<EOS
<![CDATA[
is_same_linear_space(a, x) := block([ret, a0, x0, am, xm, am_dim, i],ret : true,a0 : listify(radcan(a)),x0 : listify(radcan(x)),am : apply(matrix, a0),xm : apply(matrix, x0),ret: ret and is(rank(am) = rank(xm)),if ret then (am_dim : rank(am),for i:1 thru length(x0) do (m : apply(matrix, cons(x0[i], a0)),ret : ret and is(rank(m) = am_dim))),ret);
basis_of_plane(v) := block([params],params : listofvars(v),map(lambda([v1], diff(v, v1)), params));
pos_of_plane(v) := block([v0 : v, params, i],params : listofvars(v),for i:1 thru length(params) do v0 : subst(0, params[i], v0),v0);
is_on_plane(p, v) := block([eq],eq : map("=", makelist(0, length(v)), v-p),is(not ([] = solve(eq, listofvars(v)))));
is_same_plane(v1, v2) := block([b1, b2, p1, p2, ret : true],b1 : basis_of_plane(v1),b2 : basis_of_plane(v2),ret : ret and is_same_linear_space(b1, b2),ret : ret and is_on_plane(pos_of_plane(v1), v2),ret : ret and is_on_plane(pos_of_plane(v2), v1));
eq_to_param(eq) := block([params, tmp],eq : listify(eq),params : sort(listofvars(eq)),tmp : solve(eq, params),subst(tmp[1], params));
is_same_linear_eq(eq1, eq2) := block([pa1, pa2],pa1 : eq_to_param(eq1),pa2 : eq_to_param(eq2),is_same_plane(pa1, pa2));
EOS
# ]]> should be added in the following.
case mthd
when "is_same_linear_eq"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
result : if is_same_linear_eq(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_plane"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
ans1 : list_matrix_entries(ans1);
result : if is_same_plane(a1, ans1) then 1 else false;
]]>
EOS
when "has_same_nullspace"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
result : if is_same_linear_space(args(a1), args(ans1)) then 1 else false;
]]>
EOS
end
ret
else
""
end
end
def validate_maxima_exp(s)
tmp = s
until tmp == " XXX "
prev = tmp
tmp = tmp.gsub(/(?<=\A|[\(\[\{,]|and|or|not)\s*-?(\s*([a-zA-Z]\w*|\d+|%e|%pi|%i)\s*([\*\+\-\^\/\=]|[\>\<]=?))*\s*([a-zA-Z]\w*|\d+|%e|%pi|%i)\s*(?=\z|[\)\]\},]|and|or)/, " XXX ")
5.times{
tmp = tmp.gsub(/(?!(and|or|not)\s*\()([a-z]{3,})\s*\(( XXX ,)* XXX \)/, " XXX ")
}
tmp = tmp.gsub(/\( XXX \)/, " XXX ")
tmp = tmp.gsub(/\[( XXX ,)* XXX \]/, " XXX ")
tmp = tmp.gsub(/\{( XXX ,)* XXX \}/, " XXX ")
tmp = tmp.gsub(/ XXX (and|or) XXX /, " XXX ")
tmp = tmp.gsub(/not XXX/, " XXX ")
if tmp == prev
raise s
end
end
return true
end
def cdata(s)
"<![CDATA[" + esq_cdata(s) + "]]>"
end
def esq_cdata(s)
(s || "").gsub("]]>", "]]]]><![CDATA[>")
end
def is_matrix_type(a)
if /\Amatrix/ =~ a
a = a.gsub(/\s+/, "")
7.times{
a = a.gsub(/\([^\(\)]*\)/, "")
}
"matrix" == a
else
false
end
end
def sort_prefix
today = Time.now
if [1, 2, 3].include?( today.month )
num = Time.new(today.year, 4, 1) - today
else
num = Time.new(today.year+1, 4, 1) - today
end
num = num.round / (60*60)
"%.4d" % num
end
def multi_ans_num_dim(s)
vecs = []
arry = s.scan(/\[.*?\]/)
arry.each{|e|
vecs << e.split(",")
}
vecs_sizes = vecs.map{|e| e.size }
unless vecs_sizes.uniq.size == 1
raise "the dims of eigen vectors are not the same"
end
return *[arry.size, vecs_sizes[0]]
end
def multi_ans_check_size(ans_dim, desc_varnames)
unless ans_dim == desc_varnames.size
raise "ans_dim and the size of desc_varnames are not the same"
end
end
def multi_ans_nodes(ans_num, desc_varnames, input_size = 15)
ret = ""
(1..ans_num).each{|i|
desc_varnames.each{|desc0, name0|
ret << multi_val_nodes(name0, i, input_size)
}
}
ret
end
def multi_val_nodes(name, i, input_size)
ERB.new(<<HERE, nil, '-').result(binding)
<input>
<name><%= varname_0(name, i) %></name>
<type>algebraic</type>
<tans>1</tans>
<boxsize><%= input_size %></boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
HERE
end
def varname_0(name, idx)
"#{name}_#{idx}"
end
def multi_forms(ans_num, desc_varnames)
ERB.new(<<HERE, nil, '-').result(binding)
<% (1..ans_num).each do |idx| %>
<p>
<% desc_varnames.each do |desc0, name0| -%>
<%=h desc0 %> [[input:<%= varname_0(name0, idx) %>]]
<% end -%>
</p>
<div>
<% desc_varnames.each do |desc0, name0| -%>
[[validation:<%= varname_0(name0, idx) %>]]
<% end -%>
</div>
<br><br>
<% end -%>
HERE
end
def multi_feedback(ans_num, desc_varnames)
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
#{does_hold_mac}
sans1 : stackqsimp([<%= (1..ans_num).map{|idx| "[" + desc_varnames.map{|desc0, name0| varname_0(name0, idx) }.join(", ") + "]" }.join(",") %>]);
ith : 0;
result : is(<%= ans_num %> = length(unique(sans1)));
<% (1..ans_num).each do |idx| -%>
ith : if result then ith + 1 else ith;
sans0 : [<%= desc_varnames.map{|desc0, name0| varname_0(name0, idx) }.join(", ") %>];
result : result and some(lambda([x], does_hold(sans0 = x)), k1);
<% end -%>
]]>
HERE
end
def eigen_num_dim(s)
vecs = []
arry = s.scan(/\[(.*?), \[\s*((?:\[.*?\],?)+)\s*\]\s*\]/)
arry.each{|e|
vecs += e[1].scan(/\[.*?\]/).map{|s| s.split(",") }
}
vecs_sizes = vecs.map{|e| e.size }
unless vecs_sizes.uniq.size == 1
raise "the dims of eigen vectors are not the same"
end
dim = vecs_sizes[0]
eigen_val_num = arry.size
return *[eigen_val_num, dim]
end
def n_join(n, str, sp = ", ")
(1..n).map{|i| str % i }.join(sp)
end
def eigen_feedback(eigen_val_num, dim)
ans_vals = n_join(eigen_val_num, "ans_val%d") # ans_val == ans_eigenval
large_Ns = n_join(dim, "N", ", ")
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
<%= basis_feedback_0() %>
ith : 0;
result : is(<%= eigen_val_num %> = length(unique([<%= ans_vals %>])));
ith : if result then ith + 1 else ith;
<%- (1..eigen_val_num).each do |i| -%>
vec<%= i %> : delete([<%= large_Ns %>], maplist(list_matrix_entries, [<%= n_join(dim, "ans%d_%%d" % i) %>]));
kvec<%= i %> : assoc(ans_val<%= i %>, k1);
result : result and listp(kvec<%= i %>) and is_basis(vec<%= i %>) and is_same_linear_space(kvec<%= i %>, vec<%= i %>);
ith : if result then ith + 1 else ith;
<%- end -%>
]]>
HERE
end
def eigen_val_nodes(i)
ERB.new(<<HERE, nil, '-').result(binding)
<input>
<name>ans_val<%= i %></name>
<type>algebraic</type>
<tans>1</tans>
<boxsize>15</boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
HERE
end
def eigen_ans_nodes(eigen_val_num, dim, input_size)
ret = ""
(1..eigen_val_num).each{|i|
ret << eigen_val_nodes(i)
ret << basis_ans(dim, dim, input_size, "#{i}_")
}
ret
end
def eigen_forms(eigen_val_num, dim)
ret = ""
(1..eigen_val_num).each{|idx|
ans = n_join(dim, "[[input:ans#{idx}_%d]]", " ")
valid = "[[validation:ans_val#{idx}]] "
valid += n_join(dim, "[[validation:ans#{idx}_%d]]", " ")
ret << ERB.new(<<HERE, nil, '-').result(binding)
<p> 固有値 [[input:ans_val<%= idx %>]] それに対する固有ベクトルは次のベクトルの1次結合である <%= ans %></p>
<div><%= valid %></div><br><br>
HERE
}
ret
end
def basis_dim(s)
if m = s.match(/\[([^\[\]]*?)\]/)
$1.split(",").size
end
end
def basis_forms(dim)
ret = ERB.new(<<HERE, nil, '-').result(binding).chomp
<p> <%= basis_ans_form(dim) %></p>
<div><%= basis_validation_form(dim) %></div>
HERE
end
def basis_ans_form(dim)
n_join(dim, "[[input:ans%d]]", " ")
end
def basis_validation_form(dim)
n_join(dim, "[[validation:ans%d]]", " ")
end
def basis_ans(n, dim, input_size, prefix="")
ERB.new(<<HERE, nil, '-').result(binding)
<%- (1..n).each do |i| -%>
<input>
<name>ans<%= prefix %><%= i %></name>
<type>matrix</type>
<tans>matrix(<%= n_join(dim, "[1]", ",") %>)</tans>
<boxsize><%= input_size %></boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
<%- end -%>
HERE
end
def basis_feedback_0
<<HERE.chomp
#{does_hold_mac}
is_same_linear_space(a, x) := block([ret, a0, x0, am, xm, am_dim, i],ret : true,a0 : listify(radcan(a)),x0 : listify(radcan(x)),am : apply(matrix, a0),xm : apply(matrix, x0),ret: ret and is(rank(am) = rank(xm)),if ret then (am_dim : rank(am),for i:1 thru length(x0) do (m : apply(matrix, cons(x0[i], a0)),ret : ret and is(rank(m) = am_dim))),ret);
is_basis(x) := block([xm],xm : apply(matrix, x),is( rank(xm) = length(x) ));
is_orthonormal_basis(x) := block([xm],xm : apply(matrix, radcan(x)),does_hold( ident(length(x)) = xm.(conjugate(transpose(xm))) ));
HERE
end
def basis_feedback(dim, mthd)
b1 = n_join(dim, "list_matrix_entries(ans%d)", ", ") # b1 == ans1 == student's answer
large_Ns = n_join(dim, "N", ", ")
basis_chk =
case mthd
when "is_basis_of_same_linear_space"
"is_basis"
when "is_orthonormal_basis_of_same_linear_space"
"is_orthonormal_basis"
else
raise
end
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
<%= basis_feedback_0() %>
b1 : delete([<%= large_Ns %>], [<%= b1 %>]);
result : if is_same_linear_space(k1, b1) and <%= basis_chk %>(b1) then true else false;
]]>
HERE
end
def basis_type_check(s, line_num)
unless /\A\[(\[[^\[\]]*?\],?\s*)*\]\Z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
arry = s.scan(/\[[^\[\]]*?\]/)
siz = arry[0].split(",").size
arry.each{|e|
unless siz == e.split(",").size
@err_msg = "error at line: #{line_num}" + "\n" + "dimensions of basis are different"
raise "invalid answer type"
end
}
end
def plane_type_check(s, line_num)
unless /\A\[[^\[\]]*?\]\z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
end
def eq_type_check(s, line_num)
unless /\A\{[^\{\}]*?\}\z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
end
end
minor edit
# -*- coding: utf-8 -*-
require 'erb'
require 'stack_q_tmpl'
class STACK_Q
include ERB::Util
def initialize(s, opt = {})
@txt = s
@err_msg = ""
@category = opt["category"] || "stack_q"
@opt = opt
end
attr_reader :err_msg
def txt2xml
ret = ""
ret << ERB.new(HEAD).result(binding)
line_num = 1
sort_prefix0 = sort_prefix()
@txt.each_line{|l|
next if /\A\s*\Z/ =~ l
@err_msg = "error at line: #{line_num}"
x = ERB.new(TMPL)
input_size = @opt["form-size"] || 100
input_type = "algebraic"
qname, qstr, a1, mthd, ext = l.split(/\s*\*\*\s*/).map{|s| s.sub(/\A\s*/, "").sub(/\s*\Z/, "") }
mthd = mthd || "AlgEquiv"
forbidwords = ""
if @opt["sort-prefix"]
ln = "%.2d" % line_num
qname_0 = sort_prefix0 + "-" + ln + "-" + qname
else
qname_0 = qname
end
begin
validate_maxima_exp(a1)
rescue RuntimeError
@err_msg = "validation error of maxima expression at line #{line_num}" + "\n"
raise "\n\n" + @err_msg + l + "\n\n"
end
if is_matrix_type(a1)
input_size = @opt["form-size"] || 15
input_type = "matrix"
end
# workaround for Moodle + MathJax bug
qstr = '\(\,\) ' + inline_tex(qstr)
# teacher's answer == a1 == t_ans1, (prt stands for potential response tree)
# student's answer == ans1
case mthd
when "AlgEquiv", "CasEqualNotAsin"
stack_mthd = mthd
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1)
if mthd == "CasEqualNotAsin"
stack_mthd = "CasEqual"
forbidwords = ",asin,acos,atan"
end
when "is_same_interval", "is_same_linear_eq", "has_same_nullspace", "has_same_deriv", "does_satisfy"
stack_mthd = "CasEqual"
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1, ext)
case mthd
when "is_same_linear_eq"
eq_type_check(a1, line_num)
when "has_same_deriv"
stack_mthd = "AlgEquiv"
end
when "is_same_plane"
# plane_type_check(a1, line_num)
stack_mthd = "CasEqual"
t_ans1 = cdata("transpose(matrix(" + a1 + "))")
feedbk = feedback(mthd, a1)
input_size = 15
input_type = "matrix"
when "is_same_diag"
stack_mthd = "CasEqual"
t_ans1 = cdata(a1)
feedbk = feedback(mthd, a1)
input_size = 15
input_type = "matrix"
#
# questions with multiple answers
#
when "multi_eigen_eq"
case mthd
when "multi_eigen_eq"
desc_varnames = [["固有値", "eigenval"], ["重複度", "chofuku"], ["固有空間の次元", "jigen"]]
else
raise
end
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_multi, nil, '-')
ans_num, ans_dim = multi_ans_num_dim(a1)
multi_ans_check_size(ans_dim, desc_varnames)
ans_nodes = multi_ans_nodes(ans_num, desc_varnames, input_size)
feedbk = multi_feedback(ans_num, desc_varnames)
ans_forms = multi_forms(ans_num, desc_varnames)
when "is_basis_of_same_linear_space", "is_orthonormal_basis_of_same_linear_space"
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_basis, nil, '-')
basis_type_check(a1, line_num)
dim = basis_dim(a1)
ans_nodes = basis_ans(dim, dim, input_size)
feedbk = basis_feedback(dim, mthd)
ans_forms = basis_forms(dim)
when "is_same_eigenval_and_eigenvec"
input_size = @opt["form-size"] || 15
x = ERB.new(TMPL_eigen, nil, '-')
eigen_val_num, dim = eigen_num_dim(a1)
ans_forms = eigen_forms(eigen_val_num, dim)
feedbk = eigen_feedback(eigen_val_num, dim)
ans_nodes = eigen_ans_nodes(eigen_val_num, dim, input_size)
else
@err_msg = "error at line: #{line_num}"
raise "invalid grading method"
end
ret << x.result(binding)
line_num += 1
}
ret << FOOT
end
def inline_tex(s)
s.gsub(/([^\\]|\A)\$((\\\$|[^\$])*)\$/) { $1 + '\\(' + $2 + '\\)' }
end
def does_hold_mac
<<EOS.chomp
stackqsimp(ex) := ratsimp( radcan( exponentialize(ex) ) );
does_hold(ex) := is( stackqsimp(ex) );
EOS
end
def feedback(mthd, a1, ext="")
fdbk_alart = <<EOS.chomp
listofops(x) := block([], if not atom(x) then cons( op(x), flatten(map(listofops, args(x))) ) else [] );
xyalart_set : intersection({xy, yx}, setify( append(listofvars(ans1), listofops(ans1)) ));
xyalart_elem : if not emptyp( xyalart_set ) then listify(xyalart_set)[1];
xyalart : if not emptyp( xyalart_set ) then 1 else false;
sinalart : if not emptyp( intersection({sin2, sin3, sin4, sin5, cos2, cos3, cos4, cos5, tan2, tan3, tan4, tan5, asin2, asin3, acos2, acos3, atan2, atan3}, setify(listofvars(ans1))) ) then 1 else false;
fxalart_set : intersection({x, y, s, t, fx, fy, fxx, fxy, fyx, fyy}, setify(listofops(ans1)));
fxalart_elem : if not emptyp( fxalart_set ) then listify(fxalart_set)[1];
fxalart : if not emptyp( fxalart_set ) then 1 else false;
#{does_hold_mac}
ans1 : ratsubst(fxy, fyx, ans1);
EOS
case mthd
when "AlgEquiv", "CasEqualNotAsin"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
result : if does_hold( a1 = ans1 ) then 1 else false;
]]>
EOS
when "has_same_deriv"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
a1 : diff(a1,x);
ans1 : diff(ans1, x);
result : if does_hold( a1 = ans1 ) then 1 else false;
]]>
EOS
when "does_satisfy"
<<EOS.chomp
<![CDATA[
#{fdbk_alart}
a1 : #{esq_cdata(a1)};
result : if does_hold( #{esq_cdata(ext)} ) then 1 else false;
]]>
EOS
when "is_same_interval"
<<EOS.chomp
<![CDATA[
myargs(xs) := block([as, zzz],as : if atom(xs) then xs else args(xs),if not chk_op(as, xs) then return(zzz),as);
chk_op(as, xs) := block([op1, x],if not( atom(as) ) and not( atom(xs) ) then (if member(x, as) then (op1 : op(xs),return( member(op1, ["and", "or", "<", ">", ">=", "<="]) ))),true);
edges(xs) := block([x],delete(x, flatten( scanmap(myargs, xs))));
xs_in_interval(xs, cond) := block(map(lambda([x], charfun(cond)), xs));
is_same_interval(c1, c2) := block([ret, xs1, xs2, v1, v2, x, m],ret : true,xs1 : edges(c1),xs2 : edges(c2),m : lmax( map(abs, append(xs1, xs2)) ),m : 2*min(max(m, 1), 100),ret : ret and is(xs_in_interval(xs1, c1) = xs_in_interval(xs1, c2)),ret : ret and is(xs_in_interval(xs2, c1) = xs_in_interval(xs2, c2)),if ret then (v1 : quad_qags(charfun(c1), x, -m, m, 'epsrel=10^(-12) )[1],v2 : quad_qags(charfun(c2)*charfun(c1), x, -m, m, 'epsrel=10^(-12) )[1],ret : ret and is(v1 = v2)),ret);
a1 : #{esq_cdata(a1)};
result : if is_same_interval(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_diag"
<<EOS.chomp
<![CDATA[
is_diagonal(m) := block([col_size, row_size],col_size : length(m),row_size : length(m[1]),is(col_size = row_size) and is( m = m * diagmatrix(col_size, 1)));
get_diag_element(m) := block([len, i],len : length(m),maplist(lambda([i], m[i,i]), makelist(i, i, len)));
is_same_diag(a, x) := block([],is_diagonal(a) and is_diagonal(x) and does_hold( sort(get_diag_element(a)) = sort(get_diag_element(x)) ));
#{does_hold_mac}
a1 : #{esq_cdata(a1)};
result : if is_same_diag(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_linear_eq", "is_same_plane", "has_same_nullspace"
ret = ""
ret <<
<<EOS
<![CDATA[
is_same_linear_space(a, x) := block([ret, a0, x0, am, xm, am_dim, i],ret : true,a0 : listify(radcan(a)),x0 : listify(radcan(x)),am : apply(matrix, a0),xm : apply(matrix, x0),ret: ret and is(rank(am) = rank(xm)),if ret then (am_dim : rank(am),for i:1 thru length(x0) do (m : apply(matrix, cons(x0[i], a0)),ret : ret and is(rank(m) = am_dim))),ret);
basis_of_plane(v) := block([params],params : listofvars(v),map(lambda([v1], diff(v, v1)), params));
pos_of_plane(v) := block([v0 : v, params, i],params : listofvars(v),for i:1 thru length(params) do v0 : subst(0, params[i], v0),v0);
is_on_plane(p, v) := block([eq],eq : map("=", makelist(0, length(v)), v-p),is(not ([] = solve(eq, listofvars(v)))));
is_same_plane(v1, v2) := block([b1, b2, p1, p2, ret : true],b1 : basis_of_plane(v1),b2 : basis_of_plane(v2),ret : ret and is_same_linear_space(b1, b2),ret : ret and is_on_plane(pos_of_plane(v1), v2),ret : ret and is_on_plane(pos_of_plane(v2), v1));
eq_to_param(eq) := block([params, tmp],eq : listify(eq),params : sort(listofvars(eq)),tmp : solve(eq, params),subst(tmp[1], params));
is_same_linear_eq(eq1, eq2) := block([pa1, pa2],pa1 : eq_to_param(eq1),pa2 : eq_to_param(eq2),is_same_plane(pa1, pa2));
EOS
# ]]> should be added in the following.
case mthd
when "is_same_linear_eq"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
result : if is_same_linear_eq(a1, ans1) then 1 else false;
]]>
EOS
when "is_same_plane"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
ans1 : list_matrix_entries(ans1);
result : if is_same_plane(a1, ans1) then 1 else false;
]]>
EOS
when "has_same_nullspace"
ret <<
<<EOS.chomp
a1 : #{esq_cdata(a1)};
result : if is_same_linear_space(args(a1), args(ans1)) then 1 else false;
]]>
EOS
end
ret
else
""
end
end
def validate_maxima_exp(s)
tmp = s
until tmp == " XXX "
prev = tmp
tmp = tmp.gsub(/(?<=\A|[\(\[\{,]|and|or|not)\s*-?(\s*([a-zA-Z]\w*|\d+|%e|%pi|%i)\s*([\*\+\-\^\/\=]|[\>\<]=?))*\s*([a-zA-Z]\w*|\d+|%e|%pi|%i)\s*(?=\z|[\)\]\},]|and|or)/, " XXX ")
5.times{
tmp = tmp.gsub(/(?!(and|or|not)\s*\()([a-z]{3,})\s*\(( XXX ,)* XXX \)/, " XXX ")
}
tmp = tmp.gsub(/\( XXX \)/, " XXX ")
tmp = tmp.gsub(/\[( XXX ,)* XXX \]/, " XXX ")
tmp = tmp.gsub(/\{( XXX ,)* XXX \}/, " XXX ")
tmp = tmp.gsub(/ XXX (and|or) XXX /, " XXX ")
tmp = tmp.gsub(/not XXX/, " XXX ")
if tmp == prev
raise s
end
end
return true
end
def cdata(s)
"<![CDATA[" + esq_cdata(s) + "]]>"
end
def esq_cdata(s)
(s || "").gsub("]]>", "]]]]><![CDATA[>")
end
def is_matrix_type(a)
if /\Amatrix/ =~ a
a = a.gsub(/\s+/, "")
7.times{
a = a.gsub(/\([^\(\)]*\)/, "")
}
"matrix" == a
else
false
end
end
def sort_prefix
today = Time.now
if [1, 2, 3].include?( today.month )
num = Time.new(today.year, 4, 1) - today
else
num = Time.new(today.year+1, 4, 1) - today
end
num = num.round / (60*60)
"%.4d" % num
end
def multi_ans_num_dim(s)
vecs = []
arry = s.scan(/\[.*?\]/)
arry.each{|e|
vecs << e.split(",")
}
vecs_sizes = vecs.map{|e| e.size }
unless vecs_sizes.uniq.size == 1
raise "the dims of eigen vectors are not the same"
end
return *[arry.size, vecs_sizes[0]]
end
def multi_ans_check_size(ans_dim, desc_varnames)
unless ans_dim == desc_varnames.size
raise "ans_dim and the size of desc_varnames are not the same"
end
end
def multi_ans_nodes(ans_num, desc_varnames, input_size = 15)
ret = ""
(1..ans_num).each{|i|
desc_varnames.each{|desc0, name0|
ret << multi_val_nodes(name0, i, input_size)
}
}
ret
end
def multi_val_nodes(name, i, input_size)
ERB.new(<<HERE, nil, '-').result(binding)
<input>
<name><%= varname(name, i) %></name>
<type>algebraic</type>
<tans>1</tans>
<boxsize><%= input_size %></boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
HERE
end
def varname(name, idx)
"#{name}_#{idx}"
end
def multi_forms(ans_num, desc_varnames)
ERB.new(<<HERE, nil, '-').result(binding)
<% (1..ans_num).each do |idx| %>
<p>
<% desc_varnames.each do |desc0, name0| -%>
<%=h desc0 %> [[input:<%= varname(name0, idx) %>]]
<% end -%>
</p>
<div>
<% desc_varnames.each do |desc0, name0| -%>
[[validation:<%= varname(name0, idx) %>]]
<% end -%>
</div>
<br><br>
<% end -%>
HERE
end
def multi_feedback(ans_num, desc_varnames)
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
#{does_hold_mac}
sans1 : stackqsimp([<%= (1..ans_num).map{|idx| "[" + desc_varnames.map{|desc0, name0| varname(name0, idx) }.join(", ") + "]" }.join(",") %>]);
ith : 0;
result : is(<%= ans_num %> = length(unique(sans1)));
<% (1..ans_num).each do |idx| -%>
ith : if result then ith + 1 else ith;
sans0 : [<%= desc_varnames.map{|desc0, name0| varname(name0, idx) }.join(", ") %>];
result : result and some(lambda([x], does_hold(sans0 = x)), k1);
<% end -%>
]]>
HERE
end
def eigen_num_dim(s)
vecs = []
arry = s.scan(/\[(.*?), \[\s*((?:\[.*?\],?)+)\s*\]\s*\]/)
arry.each{|e|
vecs += e[1].scan(/\[.*?\]/).map{|s| s.split(",") }
}
vecs_sizes = vecs.map{|e| e.size }
unless vecs_sizes.uniq.size == 1
raise "the dims of eigen vectors are not the same"
end
dim = vecs_sizes[0]
eigen_val_num = arry.size
return *[eigen_val_num, dim]
end
def n_join(n, str, sp = ", ")
(1..n).map{|i| str % i }.join(sp)
end
def eigen_feedback(eigen_val_num, dim)
ans_vals = n_join(eigen_val_num, "ans_val%d") # ans_val == ans_eigenval
large_Ns = n_join(dim, "N", ", ")
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
<%= basis_feedback_lib_mac() %>
ith : 0;
result : is(<%= eigen_val_num %> = length(unique([<%= ans_vals %>])));
ith : if result then ith + 1 else ith;
<%- (1..eigen_val_num).each do |i| -%>
vec<%= i %> : delete([<%= large_Ns %>], maplist(list_matrix_entries, [<%= n_join(dim, "ans%d_%%d" % i) %>]));
kvec<%= i %> : assoc(ans_val<%= i %>, k1);
result : result and listp(kvec<%= i %>) and is_basis(vec<%= i %>) and is_same_linear_space(kvec<%= i %>, vec<%= i %>);
ith : if result then ith + 1 else ith;
<%- end -%>
]]>
HERE
end
def eigen_val_nodes(i)
ERB.new(<<HERE, nil, '-').result(binding)
<input>
<name>ans_val<%= i %></name>
<type>algebraic</type>
<tans>1</tans>
<boxsize>15</boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
HERE
end
def eigen_ans_nodes(eigen_val_num, dim, input_size)
ret = ""
(1..eigen_val_num).each{|i|
ret << eigen_val_nodes(i)
ret << basis_ans(dim, dim, input_size, "#{i}_")
}
ret
end
def eigen_forms(eigen_val_num, dim)
ret = ""
(1..eigen_val_num).each{|idx|
ans = n_join(dim, "[[input:ans#{idx}_%d]]", " ")
valid = "[[validation:ans_val#{idx}]] "
valid += n_join(dim, "[[validation:ans#{idx}_%d]]", " ")
ret << ERB.new(<<HERE, nil, '-').result(binding)
<p> 固有値 [[input:ans_val<%= idx %>]] それに対する固有ベクトルは次のベクトルの1次結合である <%= ans %></p>
<div><%= valid %></div><br><br>
HERE
}
ret
end
def basis_dim(s)
if m = s.match(/\[([^\[\]]*?)\]/)
$1.split(",").size
end
end
def basis_forms(dim)
ret = ERB.new(<<HERE, nil, '-').result(binding).chomp
<p> <%= basis_ans_form(dim) %></p>
<div><%= basis_validation_form(dim) %></div>
HERE
end
def basis_ans_form(dim)
n_join(dim, "[[input:ans%d]]", " ")
end
def basis_validation_form(dim)
n_join(dim, "[[validation:ans%d]]", " ")
end
def basis_ans(n, dim, input_size, prefix="")
ERB.new(<<HERE, nil, '-').result(binding)
<%- (1..n).each do |i| -%>
<input>
<name>ans<%= prefix %><%= i %></name>
<type>matrix</type>
<tans>matrix(<%= n_join(dim, "[1]", ",") %>)</tans>
<boxsize><%= input_size %></boxsize>
<strictsyntax>1</strictsyntax>
<insertstars>0</insertstars>
<syntaxhint></syntaxhint>
<forbidwords>[[BASIC-ALGEBRA]],[[BASIC-CALCULUS]],[[BASIC-MATRIX]] </forbidwords>
<allowwords></allowwords>
<forbidfloat>1</forbidfloat>
<requirelowestterms>0</requirelowestterms>
<checkanswertype>0</checkanswertype>
<mustverify>1</mustverify>
<showvalidation>1</showvalidation>
<options></options>
</input>
<%- end -%>
HERE
end
def basis_feedback_lib_mac
<<HERE.chomp
#{does_hold_mac}
is_same_linear_space(a, x) := block([ret, a0, x0, am, xm, am_dim, i],ret : true,a0 : listify(radcan(a)),x0 : listify(radcan(x)),am : apply(matrix, a0),xm : apply(matrix, x0),ret: ret and is(rank(am) = rank(xm)),if ret then (am_dim : rank(am),for i:1 thru length(x0) do (m : apply(matrix, cons(x0[i], a0)),ret : ret and is(rank(m) = am_dim))),ret);
is_basis(x) := block([xm],xm : apply(matrix, x),is( rank(xm) = length(x) ));
is_orthonormal_basis(x) := block([xm],xm : apply(matrix, radcan(x)),does_hold( ident(length(x)) = xm.(conjugate(transpose(xm))) ));
HERE
end
def basis_feedback(dim, mthd)
b1 = n_join(dim, "list_matrix_entries(ans%d)", ", ") # b1 == ans1 == student's answer
large_Ns = n_join(dim, "N", ", ")
basis_chk =
case mthd
when "is_basis_of_same_linear_space"
"is_basis"
when "is_orthonormal_basis_of_same_linear_space"
"is_orthonormal_basis"
else
raise
end
ERB.new(<<HERE, nil, '-').result(binding).chomp
<![CDATA[
<%= basis_feedback_lib_mac() %>
b1 : delete([<%= large_Ns %>], [<%= b1 %>]);
result : if is_same_linear_space(k1, b1) and <%= basis_chk %>(b1) then true else false;
]]>
HERE
end
def basis_type_check(s, line_num)
unless /\A\[(\[[^\[\]]*?\],?\s*)*\]\Z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
arry = s.scan(/\[[^\[\]]*?\]/)
siz = arry[0].split(",").size
arry.each{|e|
unless siz == e.split(",").size
@err_msg = "error at line: #{line_num}" + "\n" + "dimensions of basis are different"
raise "invalid answer type"
end
}
end
def plane_type_check(s, line_num)
unless /\A\[[^\[\]]*?\]\z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
end
def eq_type_check(s, line_num)
unless /\A\{[^\{\}]*?\}\z/ =~ s
@err_msg = "error at line: #{line_num}" + "\n" + "invalid answer type"
raise "invalid answer type"
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rufus-tokyo}
s.version = "1.0.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["John Mettraux", "Zev Blut", "Jeremy Hinegardner", "James Edward Gray II"]
s.date = %q{2010-01-06}
s.description = %q{
Ruby-ffi based lib to access Tokyo Cabinet and Tyrant.
The ffi-based structures are available via the Rufus::Tokyo namespace.
There is a Rufus::Edo namespace that interfaces with Hirabayashi-san's native Ruby interface, and whose API is equal to the Rufus::Tokyo one.
Finally rufus-tokyo includes ffi-based interfaces to Tokyo Dystopia (thanks to Jeremy Hinegardner).
}
s.email = %q{jmettraux@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".gitignore",
"CHANGELOG.txt",
"CREDITS.txt",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"TODO.txt",
"doc/decision_table.numbers",
"lib/rufus-edo.rb",
"lib/rufus-tokyo.rb",
"lib/rufus/edo.rb",
"lib/rufus/edo/README.txt",
"lib/rufus/edo/cabcore.rb",
"lib/rufus/edo/cabinet/abstract.rb",
"lib/rufus/edo/cabinet/table.rb",
"lib/rufus/edo/error.rb",
"lib/rufus/edo/ntyrant.rb",
"lib/rufus/edo/ntyrant/abstract.rb",
"lib/rufus/edo/ntyrant/table.rb",
"lib/rufus/edo/tabcore.rb",
"lib/rufus/tokyo.rb",
"lib/rufus/tokyo/cabinet/abstract.rb",
"lib/rufus/tokyo/cabinet/lib.rb",
"lib/rufus/tokyo/cabinet/table.rb",
"lib/rufus/tokyo/cabinet/util.rb",
"lib/rufus/tokyo/config.rb",
"lib/rufus/tokyo/dystopia.rb",
"lib/rufus/tokyo/dystopia/core.rb",
"lib/rufus/tokyo/dystopia/lib.rb",
"lib/rufus/tokyo/dystopia/words.rb",
"lib/rufus/tokyo/hmethods.rb",
"lib/rufus/tokyo/openable.rb",
"lib/rufus/tokyo/outlen.rb",
"lib/rufus/tokyo/query.rb",
"lib/rufus/tokyo/transactions.rb",
"lib/rufus/tokyo/ttcommons.rb",
"lib/rufus/tokyo/tyrant.rb",
"lib/rufus/tokyo/tyrant/abstract.rb",
"lib/rufus/tokyo/tyrant/ext.rb",
"lib/rufus/tokyo/tyrant/lib.rb",
"lib/rufus/tokyo/tyrant/table.rb",
"lib/rufus/tokyo/utils.rb",
"lib/rufus/tokyo/version.rb",
"rufus-tokyo.gemspec",
"spec/cabinet_btree_spec.rb",
"spec/cabinet_fixed_spec.rb",
"spec/cabinet_spec.rb",
"spec/cabinetconfig_spec.rb",
"spec/dystopia_core_spec.rb",
"spec/edo_cabinet_btree_spec.rb",
"spec/edo_cabinet_fixed_spec.rb",
"spec/edo_cabinet_spec.rb",
"spec/edo_ntyrant_spec.rb",
"spec/edo_ntyrant_table_spec.rb",
"spec/edo_table_spec.rb",
"spec/hmethods_spec.rb",
"spec/incr.lua",
"spec/openable_spec.rb",
"spec/shared_abstract_spec.rb",
"spec/shared_table_spec.rb",
"spec/shared_tyrant_spec.rb",
"spec/spec.rb",
"spec/spec_base.rb",
"spec/start_tyrants.sh",
"spec/stop_tyrants.sh",
"spec/table_spec.rb",
"spec/tyrant_spec.rb",
"spec/tyrant_table_spec.rb",
"spec/util_list_spec.rb",
"spec/util_map_spec.rb",
"tasks/dev.rb",
"test/aaron.rb",
"test/bm0.rb",
"test/bm1_compression.rb",
"test/con0.rb",
"test/jeg.rb",
"test/mem.rb",
"test/mem1.rb",
"test/readme0.rb",
"test/readme1.rb",
"test/readme2.rb",
"test/readme3.rb",
"test/readmes_test.sh"
]
s.homepage = %q{http://github.com/jmettraux/rufus-tokyo/}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{rufus}
s.rubygems_version = %q{1.3.5}
s.summary = %q{ruby-ffi based lib to access Tokyo Cabinet, Tyrant and Dystopia}
s.test_files = [
"spec/spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
end
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
end
end
release 1.0.6
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{rufus-tokyo}
s.version = "1.0.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["John Mettraux", "Zev Blut", "Jeremy Hinegardner", "James Edward Gray II"]
s.date = %q{2010-02-01}
s.description = %q{
Ruby-ffi based lib to access Tokyo Cabinet and Tyrant.
The ffi-based structures are available via the Rufus::Tokyo namespace.
There is a Rufus::Edo namespace that interfaces with Hirabayashi-san's native Ruby interface, and whose API is equal to the Rufus::Tokyo one.
Finally rufus-tokyo includes ffi-based interfaces to Tokyo Dystopia (thanks to Jeremy Hinegardner).
}
s.email = %q{jmettraux@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".gitignore",
"CHANGELOG.txt",
"CREDITS.txt",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"TODO.txt",
"doc/decision_table.numbers",
"lib/rufus-edo.rb",
"lib/rufus-tokyo.rb",
"lib/rufus/edo.rb",
"lib/rufus/edo/README.txt",
"lib/rufus/edo/cabcore.rb",
"lib/rufus/edo/cabinet/abstract.rb",
"lib/rufus/edo/cabinet/table.rb",
"lib/rufus/edo/error.rb",
"lib/rufus/edo/ntyrant.rb",
"lib/rufus/edo/ntyrant/abstract.rb",
"lib/rufus/edo/ntyrant/table.rb",
"lib/rufus/edo/tabcore.rb",
"lib/rufus/tokyo.rb",
"lib/rufus/tokyo/cabinet/abstract.rb",
"lib/rufus/tokyo/cabinet/lib.rb",
"lib/rufus/tokyo/cabinet/table.rb",
"lib/rufus/tokyo/cabinet/util.rb",
"lib/rufus/tokyo/config.rb",
"lib/rufus/tokyo/dystopia.rb",
"lib/rufus/tokyo/dystopia/core.rb",
"lib/rufus/tokyo/dystopia/lib.rb",
"lib/rufus/tokyo/dystopia/words.rb",
"lib/rufus/tokyo/hmethods.rb",
"lib/rufus/tokyo/openable.rb",
"lib/rufus/tokyo/outlen.rb",
"lib/rufus/tokyo/query.rb",
"lib/rufus/tokyo/transactions.rb",
"lib/rufus/tokyo/ttcommons.rb",
"lib/rufus/tokyo/tyrant.rb",
"lib/rufus/tokyo/tyrant/abstract.rb",
"lib/rufus/tokyo/tyrant/ext.rb",
"lib/rufus/tokyo/tyrant/lib.rb",
"lib/rufus/tokyo/tyrant/table.rb",
"lib/rufus/tokyo/utils.rb",
"lib/rufus/tokyo/version.rb",
"rufus-tokyo.gemspec",
"spec/cabinet_btree_spec.rb",
"spec/cabinet_fixed_spec.rb",
"spec/cabinet_spec.rb",
"spec/cabinetconfig_spec.rb",
"spec/dystopia_core_spec.rb",
"spec/edo_cabinet_btree_spec.rb",
"spec/edo_cabinet_fixed_spec.rb",
"spec/edo_cabinet_spec.rb",
"spec/edo_ntyrant_spec.rb",
"spec/edo_ntyrant_table_spec.rb",
"spec/edo_table_spec.rb",
"spec/hmethods_spec.rb",
"spec/incr.lua",
"spec/openable_spec.rb",
"spec/shared_abstract_spec.rb",
"spec/shared_table_spec.rb",
"spec/shared_tyrant_spec.rb",
"spec/spec.rb",
"spec/spec_base.rb",
"spec/start_tyrants.sh",
"spec/stop_tyrants.sh",
"spec/table_spec.rb",
"spec/tyrant_spec.rb",
"spec/tyrant_table_spec.rb",
"spec/util_list_spec.rb",
"spec/util_map_spec.rb",
"tasks/dev.rb",
"test/aaron.rb",
"test/bm0.rb",
"test/bm1_compression.rb",
"test/con0.rb",
"test/jeg.rb",
"test/mem.rb",
"test/mem1.rb",
"test/readme0.rb",
"test/readme1.rb",
"test/readme2.rb",
"test/readme3.rb",
"test/readmes_test.sh"
]
s.homepage = %q{http://github.com/jmettraux/rufus-tokyo/}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{rufus}
s.rubygems_version = %q{1.3.5}
s.summary = %q{ruby-ffi based lib to access Tokyo Cabinet, Tyrant and Dystopia}
s.test_files = [
"spec/spec.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<ffi>, [">= 0"])
s.add_development_dependency(%q<yard>, [">= 0"])
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
end
else
s.add_dependency(%q<ffi>, [">= 0"])
s.add_dependency(%q<yard>, [">= 0"])
end
end
|
require 'test_helper'
class NavigationTest < ActiveSupport::IntegrationCase
def tear_down
DummyObserver.unstub(:update)
end
test 'user sign up should grant badge to itself' do
DummyObserver.any_instance.expects(:update).times 1
visit '/users/new'
fill_in 'Name', with: 'Jack'
assert_difference('Merit::ActivityLog.count') do
click_button('Create User')
end
user = User.where(name: 'Jack').first
assert_equal [Merit::Badge.by_name('just-registered').first], user.badges
end
test 'User#add_badge should add one badge, #rm_badge should delete one' do
DummyObserver.any_instance.expects(:update).times 0
user = User.create(name: 'test-user')
assert_equal [], user.badges
badge = Merit::Badge.first
user.add_badge badge.id
user.add_badge badge.id
assert_equal [badge, badge], user.badges
assert_equal [user], badge.users
user.rm_badge badge.id
assert_equal [badge], user.reload.badges
end
test 'Remove inexistent badge should do nothing' do
DummyObserver.any_instance.expects(:update).times 0
user = User.create(name: 'test-user')
assert_equal [], user.badges
user.rm_badge 1
assert_equal [], user.badges
end
test 'users#index should grant badge multiple times' do
DummyObserver.any_instance.expects(:update).times 14
user = User.create(name: 'test-user')
# Multiple rule
assert_difference 'badges_by_name(user, "gossip").count', 3 do
3.times { visit '/users' }
end
# Namespaced controller
assert_no_difference 'badges_by_name(user, "visited_admin").count' do
visit '/users'
end
assert_difference 'badges_by_name(user, "visited_admin").count' do
visit '/admin/users'
end
# Wildcard controllers
assert_difference 'badges_by_name(user, "wildcard_badge").count', 3 do
visit '/admin/users'
visit '/api/users'
visit '/users'
end
end
test 'user workflow should grant some badges at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
# Commented 9 times, no badges yet
user = User.create(name: 'test-user')
# Create needed friend user object
friend = User.create(name: 'friend')
(1..9).each do |i|
Comment.create(
name: "Title #{i}",
comment: "Comment #{i}",
user_id: user.id,
votes: 8
)
end
assert user.badges.empty?, 'Should not have badges'
assert_equal 0, user.points
assert_equal 0, Merit::Score::Point.count
user.add_points 15
assert_equal 15, user.points
user.subtract_points 15
assert_equal 0, user.points
assert_equal 2, Merit::Score::Point.count
# Make tenth comment, assert 10-commenter badge granted
visit '/comments/new'
fill_in 'Name', with: 'Hi!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Create Comment')
end
assert_equal [Merit::Badge.by_name('commenter').by_level(10).first], user.reload.badges
assert_equal [Merit::Badge.by_name('has_commenter_friend').first], friend.reload.badges
# Vote (to 5) a user's comment, assert relevant-commenter badge granted
relevant_comment = user.comments.where(votes: 8).first
visit '/comments'
within("tr#c_#{relevant_comment.id}") do
click_link '2'
end
relevant_badge = Merit::Badge.by_name('relevant-commenter').first
user_badges = User.where(name: 'test-user').first.badges
assert user_badges.include?(relevant_badge), "User badges: #{user.badges.collect(&:name).inspect} should contain relevant-commenter badge."
# Edit user's name by long name
# tests ruby code in grant_on is being executed, and gives badge
user = User.where(name: 'test-user').first
user_badges = user.badges
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'long_name!'
click_button('Update User')
user = User.where(name: 'long_name!').first
autobiographer_badge = Merit::Badge.by_name('autobiographer').first
assert user.badges.include?(autobiographer_badge), "User badges: #{user.badges.collect(&:name).inspect} should contain autobiographer badge."
# Edit user's name by short name
# tests ruby code in grant_on is being executed, and removes badge
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'abc'
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Update User')
end
# Check created Merit::ActivityLogs
assert_equal 'granted commenter badge', Merit::ActivityLog.all[0].description
assert_equal 'granted 20 points', Merit::ActivityLog.all[-1].description
assert_equal 'removed autobiographer badge', Merit::ActivityLog.all[-2].description
user = User.where(name: 'abc').first
assert !user.badges.include?(autobiographer_badge), "User badges: #{user.badges.collect(&:name).inspect} should remove autobiographer badge."
end
test 'user workflow should add up points at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
User.delete_all
user = User.create(name: 'test-user')
assert_equal 0, user.points, 'User should start with 0 points'
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'a'
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Update User')
end
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Updating info should grant 20 points'
visit '/comments/new'
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Empty comment should grant no points'
visit '/comments/new'
fill_in 'Name', with: 'Hi!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Commenting should not grant 20 points if name.length <= 4'
visit '/comments/new'
fill_in 'Name', with: 'Hi there!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 40, user.points, 'Commenting should grant 20 points if name.length > 4'
visit "/comments/#{Comment.last.id}/vote/4"
user = User.first
assert_equal 46, user.points, 'Voting comments should grant 5 points for voted, and 1 point for voting'
assert_equal 5, user.points(category: 'vote'), 'Voting comments should grant 5 points for voted in vote category'
visit '/comments/new'
fill_in 'Name', with: 'Hi'
fill_in 'Comment', with: '4'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 50, user.points, 'Commenting should grant the integer in comment points if comment is an integer'
end
test 'user workflow should grant levels at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
user = User.create(name: 'test-user')
assert user.badges.empty?
# Edit user's name by 2 chars name
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'ab'
click_button('Update User')
user = User.where(name: 'ab').first
assert_equal 0, user.level, "User level should be 0."
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 2, user.level, "User level should be 2."
# Edit user's name by short name. Doesn't go back to previous rank.
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'a'
click_button('Update User')
user = User.where(name: 'a').first
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 2, user.level, "User level should be 2."
# Edit user's name by 5 chars name
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'abcde'
click_button('Update User')
user = User.where(name: 'abcde').first
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 5, user.level, "User level should be 5."
end
test 'assigning points to a group of records' do
DummyObserver.any_instance.expects(:update).times 4
commenter = User.create(name: 'commenter')
comment_1 = commenter.comments.create(name: 'comment_1', comment: 'a')
comment_2 = commenter.comments.create(name: 'comment_2', comment: 'b')
visit comments_path
# Thanks for voting point, to voted user and it's comments
assert_difference('Merit::ActivityLog.count', 4) do
within "tr#c_#{comment_2.id}" do
click_link '1'
end
end
comment_1.reload.points.must_be :==, 2
comment_2.reload.points.must_be :==, 2
end
test 'api/comments#show should grant 1 point to user' do
DummyObserver.any_instance.expects(:update).times 1
user = User.create(name: 'test-user')
assert_equal 0, user.points
comment = user.comments.create!(name: 'test-comment', comment: 'comment body')
visit "/api/comments/#{comment.id}"
assert_equal 1, user.points
end
def badges_by_name(user, name)
user.reload.badges.select{|b| b.name == name }
end
end
Test reputation doesn't change if there were errors
[Progress #143]
require 'test_helper'
class NavigationTest < ActiveSupport::IntegrationCase
def tear_down
DummyObserver.unstub(:update)
end
test 'user sign up should grant badge to itself' do
DummyObserver.any_instance.expects(:update).times 1
visit '/users/new'
fill_in 'Name', with: 'Jack'
assert_difference('Merit::ActivityLog.count') do
click_button('Create User')
end
user = User.where(name: 'Jack').first
assert_equal [Merit::Badge.by_name('just-registered').first], user.badges
end
test 'User#add_badge should add one badge, #rm_badge should delete one' do
DummyObserver.any_instance.expects(:update).times 0
user = User.create(name: 'test-user')
assert_equal [], user.badges
badge = Merit::Badge.first
user.add_badge badge.id
user.add_badge badge.id
assert_equal [badge, badge], user.badges
assert_equal [user], badge.users
user.rm_badge badge.id
assert_equal [badge], user.reload.badges
end
test 'Remove inexistent badge should do nothing' do
DummyObserver.any_instance.expects(:update).times 0
user = User.create(name: 'test-user')
assert_equal [], user.badges
user.rm_badge 1
assert_equal [], user.badges
end
test 'users#index should grant badge multiple times' do
DummyObserver.any_instance.expects(:update).times 14
user = User.create(name: 'test-user')
# Multiple rule
assert_difference 'badges_by_name(user, "gossip").count', 3 do
3.times { visit '/users' }
end
# Namespaced controller
assert_no_difference 'badges_by_name(user, "visited_admin").count' do
visit '/users'
end
assert_difference 'badges_by_name(user, "visited_admin").count' do
visit '/admin/users'
end
# Wildcard controllers
assert_difference 'badges_by_name(user, "wildcard_badge").count', 3 do
visit '/admin/users'
visit '/api/users'
visit '/users'
end
end
test 'user workflow should grant some badges at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
# Commented 9 times, no badges yet
user = User.create(name: 'test-user')
# Create needed friend user object
friend = User.create(name: 'friend')
(1..9).each do |i|
Comment.create(
name: "Title #{i}",
comment: "Comment #{i}",
user_id: user.id,
votes: 8
)
end
assert user.badges.empty?, 'Should not have badges'
assert_equal 0, user.points
assert_equal 0, Merit::Score::Point.count
user.add_points 15
assert_equal 15, user.points
user.subtract_points 15
assert_equal 0, user.points
assert_equal 2, Merit::Score::Point.count
# Tenth comment with errors doesn't change reputation
badges = user.reload.badges
points = user.points
visit '/comments/new'
assert_no_difference('Merit::ActivityLog.count') do
click_button('Create Comment')
end
assert_equal badges, user.reload.badges
assert_equal points, user.points
# Tenth comment without errors, assert 10-commenter badge granted
fill_in 'Name', with: 'Hi!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Create Comment')
end
assert_equal [Merit::Badge.by_name('commenter').by_level(10).first], user.reload.badges
assert_equal [Merit::Badge.by_name('has_commenter_friend').first], friend.reload.badges
# Vote (to 5) a user's comment, assert relevant-commenter badge granted
relevant_comment = user.comments.where(votes: 8).first
visit '/comments'
within("tr#c_#{relevant_comment.id}") do
click_link '2'
end
relevant_badge = Merit::Badge.by_name('relevant-commenter').first
user_badges = User.where(name: 'test-user').first.badges
assert user_badges.include?(relevant_badge), "User badges: #{user.badges.collect(&:name).inspect} should contain relevant-commenter badge."
# Edit user's name by long name
# tests ruby code in grant_on is being executed, and gives badge
user = User.where(name: 'test-user').first
user_badges = user.badges
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'long_name!'
click_button('Update User')
user = User.where(name: 'long_name!').first
autobiographer_badge = Merit::Badge.by_name('autobiographer').first
assert user.badges.include?(autobiographer_badge), "User badges: #{user.badges.collect(&:name).inspect} should contain autobiographer badge."
# Edit user's name by short name
# tests ruby code in grant_on is being executed, and removes badge
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'abc'
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Update User')
end
# Check created Merit::ActivityLogs
assert_equal 'granted commenter badge', Merit::ActivityLog.all[0].description
assert_equal 'granted 20 points', Merit::ActivityLog.all[-1].description
assert_equal 'removed autobiographer badge', Merit::ActivityLog.all[-2].description
user = User.where(name: 'abc').first
assert !user.badges.include?(autobiographer_badge), "User badges: #{user.badges.collect(&:name).inspect} should remove autobiographer badge."
end
test 'user workflow should add up points at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
User.delete_all
user = User.create(name: 'test-user')
assert_equal 0, user.points, 'User should start with 0 points'
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'a'
assert_difference('Merit::ActivityLog.count', 2) do
click_button('Update User')
end
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Updating info should grant 20 points'
visit '/comments/new'
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Empty comment should grant no points'
visit '/comments/new'
fill_in 'Name', with: 'Hi!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 20, user.points, 'Commenting should not grant 20 points if name.length <= 4'
visit '/comments/new'
fill_in 'Name', with: 'Hi there!'
fill_in 'Comment', with: 'Hi bro!'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 40, user.points, 'Commenting should grant 20 points if name.length > 4'
visit "/comments/#{Comment.last.id}/vote/4"
user = User.first
assert_equal 46, user.points, 'Voting comments should grant 5 points for voted, and 1 point for voting'
assert_equal 5, user.points(category: 'vote'), 'Voting comments should grant 5 points for voted in vote category'
visit '/comments/new'
fill_in 'Name', with: 'Hi'
fill_in 'Comment', with: '4'
fill_in 'User', with: user.id
click_button('Create Comment')
user = User.where(name: 'a').first
assert_equal 50, user.points, 'Commenting should grant the integer in comment points if comment is an integer'
end
test 'user workflow should grant levels at some times' do
DummyObserver.any_instance.expects(:update).at_least_once
user = User.create(name: 'test-user')
assert user.badges.empty?
# Edit user's name by 2 chars name
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'ab'
click_button('Update User')
user = User.where(name: 'ab').first
assert_equal 0, user.level, "User level should be 0."
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 2, user.level, "User level should be 2."
# Edit user's name by short name. Doesn't go back to previous rank.
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'a'
click_button('Update User')
user = User.where(name: 'a').first
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 2, user.level, "User level should be 2."
# Edit user's name by 5 chars name
visit "/users/#{user.id}/edit"
fill_in 'Name', with: 'abcde'
click_button('Update User')
user = User.where(name: 'abcde').first
Merit::RankRules.new.check_rank_rules
user.reload
assert_equal 5, user.level, "User level should be 5."
end
test 'assigning points to a group of records' do
DummyObserver.any_instance.expects(:update).times 4
commenter = User.create(name: 'commenter')
comment_1 = commenter.comments.create(name: 'comment_1', comment: 'a')
comment_2 = commenter.comments.create(name: 'comment_2', comment: 'b')
visit comments_path
# Thanks for voting point, to voted user and it's comments
assert_difference('Merit::ActivityLog.count', 4) do
within "tr#c_#{comment_2.id}" do
click_link '1'
end
end
comment_1.reload.points.must_be :==, 2
comment_2.reload.points.must_be :==, 2
end
test 'api/comments#show should grant 1 point to user' do
DummyObserver.any_instance.expects(:update).times 1
user = User.create(name: 'test-user')
assert_equal 0, user.points
comment = user.comments.create!(name: 'test-comment', comment: 'comment body')
visit "/api/comments/#{comment.id}"
assert_equal 1, user.points
end
def badges_by_name(user, name)
user.reload.badges.select{|b| b.name == name }
end
end
|
#
#Discription: Takes and finds files with --tags and runs them in order
# Original Date: September 17, 2013
#
#Public: Runs tests that need to be run in oder
# Parameters
#
# kc = --tag for Kauli Coeus test senearois that need to be run in order
#
# Example
# @kc1 = Kauli Coeus Test 1
#Returns Nothing
#
def jirra
array = [
['@kc1', '@kc2', '@kc3', '@kc4', '@kc5', '@kc6', '@kc7'],
['@kc1', '@kc2', '@kc5']
]
end
Update ECE.rb
removed a tag from a line
#
#Discription: Takes and finds files with --tags and runs them in order
# Original Date: September 17, 2013
#
#Public: Runs tests that need to be run in oder
# Parameters
#
# kc = --tag for Kauli Coeus test senearois that need to be run in order
#
# Example
# @kc1 = Kauli Coeus Test 1
#Returns Nothing
#
def jirra
array = [
['@kc1', '@kc2', '@kc3', '@kc4', '@kc6', '@kc7'],
['@kc1', '@kc2', '@kc5']
]
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.