CombinedText stringlengths 4 3.42M |
|---|
require 'formula'
class Jetty < Formula
homepage 'http://www.eclipse.org/jetty/'
url 'http://eclipse.org/downloads/download.php?file=/jetty/9.1.0.v20131115/dist/jetty-distribution-9.1.0.v20131115.tar.gz&r=1'
version '9.1.0'
sha1 '50e6a8e45581fc111922cdaada93b9eea27ae937'
def install
rm_rf Dir['bin/*.{cmd,bat]}']
libexec.install Dir['*']
(libexec+'logs').mkpath
bin.mkpath
Dir["#{libexec}/bin/*.sh"].each do |f|
scriptname = File.basename(f, '.sh')
(bin+scriptname).write <<-EOS.undent
#!/bin/bash
JETTY_HOME=#{libexec}
#{f} $@
EOS
chmod 0755, bin+scriptname
end
end
end
jetty 9.1.3
Closes Homebrew/homebrew#27296.
Signed-off-by: Brett Koonce <cbb63d51fa8fe93df04ca2da488d36daa92d0c44@gmail.com>
require 'formula'
class Jetty < Formula
homepage 'http://www.eclipse.org/jetty/'
url 'http://eclipse.org/downloads/download.php?file=/jetty/9.1.3.v20140225/dist/jetty-distribution-9.1.3.v20140225.tar.gz&r=1'
version '9.1.3'
sha1 '09cc96d187ad07bc86414b123475d0fe2d25e255'
def install
rm_rf Dir['bin/*.{cmd,bat]}']
libexec.install Dir['*']
(libexec+'logs').mkpath
bin.mkpath
Dir["#{libexec}/bin/*.sh"].each do |f|
scriptname = File.basename(f, '.sh')
(bin+scriptname).write <<-EOS.undent
#!/bin/bash
JETTY_HOME=#{libexec}
#{f} $@
EOS
chmod 0755, bin+scriptname
end
end
end
|
class Jolie < Formula
desc "Service-oriented programming language"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.10.13/jolie-1.10.13.jar"
sha256 "475c32552eaacb0de1f50e109f52e713610a99538b71abfc9167755e41c022a1"
license "LGPL-2.1-only"
bottle do
sha256 cellar: :any_skip_relocation, all: "56f8f9b008a733c98119fd49c7e966510cc3c502496000d91d0a6d1aa9f10585"
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
JOLIE_HOME: "${JOLIE_HOME:-#{libexec}}",
JAVA_HOME: "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
from console import Console, ConsoleIface
interface PowTwoInterface { OneWay: powTwo( int ) }
service main(){
outputPort Console { interfaces: ConsoleIface }
embed Console in Console
inputPort In {
location: "local://testPort"
interfaces: PowTwoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: PowTwoInterface
}
init {
powTwo@Self( 4 )
}
main {
powTwo( x )
println@Console( x * x )()
}
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
jolie: update 1.10.13 bottle.
class Jolie < Formula
desc "Service-oriented programming language"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.10.13/jolie-1.10.13.jar"
sha256 "475c32552eaacb0de1f50e109f52e713610a99538b71abfc9167755e41c022a1"
license "LGPL-2.1-only"
bottle do
sha256 cellar: :any_skip_relocation, all: "3f2b2870d5769acb0d8a1ab6a3b7f24e4c3a654d55567627a708e641256e8206"
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
JOLIE_HOME: "${JOLIE_HOME:-#{libexec}}",
JAVA_HOME: "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
from console import Console, ConsoleIface
interface PowTwoInterface { OneWay: powTwo( int ) }
service main(){
outputPort Console { interfaces: ConsoleIface }
embed Console in Console
inputPort In {
location: "local://testPort"
interfaces: PowTwoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: PowTwoInterface
}
init {
powTwo@Self( 4 )
}
main {
powTwo( x )
println@Console( x * x )()
}
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
|
class Jolie < Formula
desc "The Jolie Language Interpreter"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.9.0/jolie-1.9.0.jar"
sha256 "1510ed7f114909eb79670462571ab0734a5b01e57d26da6fd1cf9ef6c67eff6e"
bottle do
cellar :any_skip_relocation
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :catalina
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :mojave
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :high_sierra
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
:JOLIE_HOME => "${JOLIE_HOME:-#{libexec}}",
:JAVA_HOME => "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
include "console.iol"
interface EchoInterface {
OneWay: echo( int )
}
inputPort In {
location: "local://testPort"
interfaces: EchoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: EchoInterface
}
init{
echo@Self( 4 )
}
main {
echo( x )
println@Console( x * x )()
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
jolie 1.9.1
Closes #55796.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Jolie < Formula
desc "The Jolie Language Interpreter"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.9.1/jolie-1.9.1.jar"
sha256 "e4b43f2b247102f49c05fb48d64ca294141b3488de38bd089c99653ca83c644d"
bottle do
cellar :any_skip_relocation
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :catalina
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :mojave
sha256 "e18c5ef5fe67fe4ec0736c8eed50e1412d850d25957062db67fbf2eb9eab7160" => :high_sierra
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
:JOLIE_HOME => "${JOLIE_HOME:-#{libexec}}",
:JAVA_HOME => "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
include "console.iol"
interface EchoInterface {
OneWay: echo( int )
}
inputPort In {
location: "local://testPort"
interfaces: EchoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: EchoInterface
}
init{
echo@Self( 4 )
}
main {
echo( x )
println@Console( x * x )()
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
|
class Jolie < Formula
desc "Service-oriented programming language"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.10.5/jolie-1.10.5.jar"
sha256 "1088c357838c58f132c1d17598d5841e63e5dccb0ea518a2cd9aa281def7d92d"
license "LGPL-2.1-only"
bottle do
sha256 cellar: :any_skip_relocation, all: "c924ac39aa0b2efce9222c45cfb9eb82eb261da9425d3454c82cc26199ff03f0"
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
JOLIE_HOME: "${JOLIE_HOME:-#{libexec}}",
JAVA_HOME: "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
from Console import Console, ConsoleIface
interface PowTwoInterface { OneWay: powTwo( int ) }
service main(){
outputPort Console { interfaces: ConsoleIface }
embed Console in Console
inputPort In {
location: "local://testPort"
interfaces: PowTwoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: PowTwoInterface
}
init {
powTwo@Self( 4 )
}
main {
powTwo( x )
println@Console( x * x )()
}
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
jolie: update 1.10.5 bottle.
class Jolie < Formula
desc "Service-oriented programming language"
homepage "https://www.jolie-lang.org/"
url "https://github.com/jolie/jolie/releases/download/v1.10.5/jolie-1.10.5.jar"
sha256 "1088c357838c58f132c1d17598d5841e63e5dccb0ea518a2cd9aa281def7d92d"
license "LGPL-2.1-only"
bottle do
sha256 cellar: :any_skip_relocation, all: "50dcf57021a1915c8403e3596b6189dc435f3e8f286dd66d9efb3a5e2799cbe9"
end
depends_on "openjdk"
def install
system Formula["openjdk"].opt_bin/"java",
"-jar", "jolie-#{version}.jar",
"--jolie-home", libexec,
"--jolie-launchers", libexec/"bin"
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin",
JOLIE_HOME: "${JOLIE_HOME:-#{libexec}}",
JAVA_HOME: "${JAVA_HOME:-#{Formula["openjdk"].opt_prefix}}"
end
test do
file = testpath/"test.ol"
file.write <<~EOS
from Console import Console, ConsoleIface
interface PowTwoInterface { OneWay: powTwo( int ) }
service main(){
outputPort Console { interfaces: ConsoleIface }
embed Console in Console
inputPort In {
location: "local://testPort"
interfaces: PowTwoInterface
}
outputPort Self {
location: "local://testPort"
interfaces: PowTwoInterface
}
init {
powTwo@Self( 4 )
}
main {
powTwo( x )
println@Console( x * x )()
}
}
EOS
out = shell_output("#{bin}/jolie #{file}").strip
assert_equal "16", out
end
end
|
require 'formula'
class Jruby < Formula
url 'http://jruby.kenai.com/downloads/1.4.0/jruby-bin-1.4.0.tar.gz'
homepage 'http://www.jruby.org'
md5 'f37322c18e9134e91e064aebb4baa4c7'
def install
# Remove Windows files
rm Dir['bin/*.{bat,dll,exe}']
# Prefix a 'j' on some commands
Dir.chdir 'bin' do
Dir['*'].each do |file|
mv file, "j#{file}" unless file.match /^[j_]/
end
end
# Only keep the OS X native libraries
Dir.chdir 'lib/native' do
Dir['*'].each do |file|
rm_rf file unless file == 'darwin'
end
end
prefix.install Dir['*']
end
def test
system "jruby -e ''"
end
end
Updated formula: jruby 1.5.0
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Jruby < Formula
url 'http://jruby.org.s3.amazonaws.com/downloads/1.5.0/jruby-bin-1.5.0.tar.gz'
homepage 'http://www.jruby.org'
md5 'fd5c0fa9e42cf499807711a8d98d5402'
def install
# Remove Windows files
rm Dir['bin/*.{bat,dll,exe}']
# Prefix a 'j' on some commands
Dir.chdir 'bin' do
Dir['*'].each do |file|
mv file, "j#{file}" unless file.match /^[j_]/
end
end
# Only keep the OS X native libraries
Dir.chdir 'lib/native' do
Dir['*'].each do |file|
rm_rf file unless file == 'darwin'
end
end
prefix.install Dir['*']
end
def test
system "jruby -e ''"
end
end
|
class Jsawk < Formula
desc "Like awk, but for JSON, using JavaScript objects and arrays"
homepage "https://github.com/micha/jsawk"
url "https://github.com/micha/jsawk/archive/1.4.tar.gz"
sha256 "3d38ffb4b9c6ff7f17072a12c5817ffe68bd0ab58d6182de300fc1e587d34530"
head "https://github.com/micha/jsawk.git"
bottle :unneeded
depends_on "spidermonkey"
def install
bin.install "jsawk"
end
test do
cmd = %(#{bin}/jsawk 'this.a = "foo"')
assert_equal %({"a":"foo"}\n), pipe_output(cmd, "{}")
end
end
jsawk: rubocop tweaks.
class Jsawk < Formula
desc "Like awk, but for JSON, using JavaScript objects and arrays"
homepage "https://github.com/micha/jsawk"
url "https://github.com/micha/jsawk/archive/1.4.tar.gz"
sha256 "3d38ffb4b9c6ff7f17072a12c5817ffe68bd0ab58d6182de300fc1e587d34530"
head "https://github.com/micha/jsawk.git"
bottle :unneeded
depends_on "spidermonkey"
def install
bin.install "jsawk"
end
test do
cmd = %Q(#{bin}/jsawk 'this.a = "foo"')
assert_equal %Q({"a":"foo"}\n), pipe_output(cmd, "{}")
end
end
|
class Jsawk < Formula
desc "Like awk, but for JSON, using JavaScript objects and arrays"
homepage "https://github.com/micha/jsawk"
url "https://github.com/micha/jsawk/archive/1.4.tar.gz"
sha256 "3d38ffb4b9c6ff7f17072a12c5817ffe68bd0ab58d6182de300fc1e587d34530"
head "https://github.com/micha/jsawk.git"
depends_on "spidermonkey"
def install
bin.install "jsawk"
end
test do
cmd = %(#{bin}/jsawk 'this.a = "foo"')
assert_equal %({"a":"foo"}\n), pipe_output(cmd, "{}")
end
end
jsawk: bottle unneeded
class Jsawk < Formula
desc "Like awk, but for JSON, using JavaScript objects and arrays"
homepage "https://github.com/micha/jsawk"
url "https://github.com/micha/jsawk/archive/1.4.tar.gz"
sha256 "3d38ffb4b9c6ff7f17072a12c5817ffe68bd0ab58d6182de300fc1e587d34530"
head "https://github.com/micha/jsawk.git"
bottle :unneeded
depends_on "spidermonkey"
def install
bin.install "jsawk"
end
test do
cmd = %(#{bin}/jsawk 'this.a = "foo"')
assert_equal %({"a":"foo"}\n), pipe_output(cmd, "{}")
end
end
|
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
revision 2
head "https://github.com/JuliaLang/julia.git", branch: "master"
stable do
url "https://github.com/JuliaLang/julia/releases/download/v1.7.1/julia-1.7.1.tar.gz"
sha256 "17d298e50e4e3dd897246ccebd9f40ce5b89077fa36217860efaec4576aa718e"
# Patches for compatibility with LLVM 13
patch do
url "https://github.com/JuliaLang/julia/commit/677ce6d3adc2f70886f72795b0e5c739e75730ee.patch?full_index=1"
sha256 "ebcedfbc61b6cc77c0dd9aebb9f1dfa477326241bf5a54209533e4886aad5af3"
end
patch do
url "https://github.com/JuliaLang/julia/commit/47f9139e88917813cb7beee5e690c48c2ac65de4.patch?full_index=1"
sha256 "cdc41494b2a163ca363da8ea9bcf27d7541a6dc9e6b4eff72f6c8ff8ce1b67b6"
end
patch do
url "https://github.com/JuliaLang/julia/commit/1eb063f1957b2e287ad0c7435debc72af58bb6f1.patch?full_index=1"
sha256 "d95b9fb5f327bc3ac351c35317a776ef6a46c1cdff248562e70c76e58eb9a903"
end
# Backported from:
# https://github.com/JuliaLang/julia/commit/f8c918b00f7c62e204d324a827e2ee2ef05bb66a
patch do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/074e62e4e946201779d2d6df9a261c91d111720f/trunk/f8c918b0.patch"
sha256 "bc6c85cbbca489ef0b2876dbeb6ae493c11573e058507b8bcb9e01273bc3a38c"
end
# Backported from:
# https://github.com/JuliaLang/julia/commit/6330398088e235e4d4fdbda38c41c87e02384edb.patch
patch do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/df73abb8162e31e6541d2143d1db5f9f1d70b632/trunk/63303980.patch"
sha256 "ce9cd140c3bc39987d60340bf365d6238e79cf4d5385494272c49c64af22ef78"
end
end
bottle do
sha256 cellar: :any, monterey: "f16f404c28635062356bf0c28624a5914e8f8fa7b858d86844971cad9224ce8b"
sha256 cellar: :any, big_sur: "16416837ab79f26227b60ed1266992cf660d108a3c4562a2f73d18e8fdd651da"
sha256 cellar: :any, catalina: "dbac1c14a0c578f82b1e74787bf71a7133f17f383d868aef2ee5157354c191d2"
sha256 cellar: :any_skip_relocation, x86_64_linux: "13ffff9f6c25f7c96af6beb284337116e26154e19e81fb4f68965781f10aec7f"
end
# Requires the M1 fork of GCC to build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "ca-certificates"
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libnghttp2"
depends_on "libssh2"
depends_on "llvm"
depends_on "mbedtls@2"
depends_on "mpfr"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "python" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
# Fix segfaults with Curl 7.81. We need to patch the contents of a tarball, so this can't be a `patch` block.
# https://github.com/JuliaLang/Downloads.jl/issues/172
resource "curl-patch" do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/6751794c82949589805db950119afba77549554a/trunk/julia-curl-7.81.patch"
sha256 "710587dd88c7698dc5cdf47a1a50f6f144b584b7d9ffb85fac3f5f79c65fce11"
end
# Fix compatibility with LibGit2 1.2.0+
# https://github.com/JuliaLang/julia/pull/43250
patch do
url "https://github.com/JuliaLang/julia/commit/4d7fc8465ed9eb820893235a6ff3d40274b643a7.patch?full_index=1"
sha256 "3a34a2cd553929c2aee74aba04c8e42ccb896f9d491fb677537cd4bca9ba7caa"
end
# Remove broken tests running in `test` block. Reported at:
# https://github.com/JuliaLang/julia/issues/43004
patch :DATA
def install
# Fix segfaults with Curl 7.81. Remove when this is resolved upstream.
srccache = buildpath/"stdlib/srccache"
srccache.install resource("curl-patch")
cd srccache do
tarball = Pathname.glob("Downloads-*.tar.gz").first
system "tar", "-xzf", tarball
extracted_dir = Pathname.glob("JuliaLang-Downloads.jl-*").first
to_patch = extracted_dir/"src/Curl/Multi.jl"
system "patch", to_patch, "julia-curl-7.81.patch"
system "tar", "-czf", tarball, extracted_dir
md5sum = Digest::MD5.file(tarball).hexdigest
sha512sum = Digest::SHA512.file(tarball).hexdigest
(buildpath/"deps/checksums"/tarball/"md5").atomic_write md5sum
(buildpath/"deps/checksums"/tarball/"sha512").atomic_write sha512sum
end
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
sysconfdir=#{etc}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_LIBUNWIND=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Set MARCH and JULIA_CPU_TARGET to ensure Julia works on machines we distribute to.
# Values adapted from https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
march = if build.head?
"native"
elsif Hardware::CPU.arm?
"armv8-a"
else
Hardware.oldest_cpu
end
args << "MARCH=#{march}"
cpu_targets = ["generic"]
cpu_targets += if Hardware::CPU.arm?
%w[cortex-a57 thunderx2t99 armv8.2-a,crypto,fullfp16,lse,rdm]
else
%w[sandybridge,-xsaveopt,clone_all haswell,-rdrnd,base(1)]
end
args << "JULIA_CPU_TARGET=#{cpu_targets.join(";")}" if build.stable?
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
# Prepare directories we install things into for the build
(buildpath/"usr/lib").mkpath
(buildpath/"usr/lib/julia").mkpath
(buildpath/"usr/share/julia").mkpath
# Help Julia find keg-only dependencies
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
next unless OS.linux?
libdir.glob(shared_library("*")) do |so|
cp so, buildpath/"usr/lib"
cp so, buildpath/"usr/lib/julia"
chmod "u+w", [buildpath/"usr/lib"/so.basename, buildpath/"usr/lib/julia"/so.basename]
end
end
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
if OS.mac?
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
else
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}/julia"
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Don't try to use patchelf on our libLLVM.so. This is only present on 1.7.1.
patchelf = Regexp.escape("$(PATCHELF)")
shlib_ext = Regexp.escape(".$(SHLIB_EXT)")
inreplace "Makefile", %r{^\s+#{patchelf} --set-rpath .*/libLLVM#{shlib_ext}$}, "" if OS.linux? && build.stable?
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2 LibGit2 OpenLibm].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from `ca-certificates`
cp Formula["ca-certificates"].pkgetc/"cert.pem", buildpath/"usr/share/julia"
system "make", *args, "install"
if OS.linux?
# Replace symlinks referencing Cellar paths with ones using opt paths
deps.reject(&:build?).map(&:to_formula).map(&:opt_lib).each do |libdir|
libdir.glob(shared_library("*")) do |so|
next unless (lib/"julia"/so.basename).exist?
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
libllvm = lib/"julia"/shared_library("libLLVM")
(lib/"julia").install_symlink libllvm.basename.to_s => libllvm.realpath.basename.to_s
end
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with ca-certificates'
pkgshare.install_symlink Formula["ca-certificates"].pkgetc/"cert.pem"
end
test do
args = %W[
--startup-file=no
--history-file=no
--project=#{testpath}
--procs #{ENV.make_jobs}
]
assert_equal "4", shell_output("#{bin}/julia #{args.join(" ")} --print '2 + 2'").chomp
system bin/"julia", *args, "--eval", 'Base.runtests("core")'
# Check that installing packages works.
# https://github.com/Homebrew/discussions/discussions/2749
system bin/"julia", *args, "--eval", 'using Pkg; Pkg.add("Example")'
# Check that Julia can load stdlibs that load non-Julia code.
# Most of these also check that Julia can load Homebrew-provided libraries.
jlls = %w[
MPFR_jll SuiteSparse_jll Zlib_jll OpenLibm_jll
nghttp2_jll MbedTLS_jll LibGit2_jll GMP_jll
OpenBLAS_jll CompilerSupportLibraries_jll dSFMT_jll LibUV_jll
LibSSH2_jll LibCURL_jll libLLVM_jll PCRE2_jll
]
system bin/"julia", *args, "--eval", "using #{jlls.join(", ")}"
# Check that Julia can load libraries in lib/"julia".
# Most of these are symlinks to Homebrew-provided libraries.
# This also checks that these libraries can be loaded even when
# the symlinks are broken (e.g. by version bumps).
libs = (lib/"julia").glob(shared_library("*"))
.map(&:basename)
.map(&:to_s)
.reject do |name|
next true if name.start_with? "sys"
next true if name.start_with? "libjulia-internal"
next true if name.start_with? "libccalltest"
false
end
(testpath/"library_test.jl").write <<~EOS
using Libdl
libraries = #{libs}
for lib in libraries
handle = dlopen(lib)
@assert dlclose(handle) "Unable to close $(lib)!"
end
EOS
system bin/"julia", *args, "library_test.jl"
end
end
__END__
diff --git a/test/core.jl b/test/core.jl
index 74edc7c..0d6eaef 100644
--- a/test/core.jl
+++ b/test/core.jl
@@ -3516,9 +3516,6 @@ end
@test_throws TypeError Union{Int, 1}
@test_throws ErrorException Vararg{Any,-2}
-@test_throws ErrorException Vararg{Int, N} where N<:T where T
-@test_throws ErrorException Vararg{Int, N} where N<:Integer
-@test_throws ErrorException Vararg{Int, N} where N>:Integer
mutable struct FooNTuple{N}
z::Tuple{Integer, Vararg{Int, N}}
julia: update 1.7.1_2 bottle.
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
revision 2
head "https://github.com/JuliaLang/julia.git", branch: "master"
stable do
url "https://github.com/JuliaLang/julia/releases/download/v1.7.1/julia-1.7.1.tar.gz"
sha256 "17d298e50e4e3dd897246ccebd9f40ce5b89077fa36217860efaec4576aa718e"
# Patches for compatibility with LLVM 13
patch do
url "https://github.com/JuliaLang/julia/commit/677ce6d3adc2f70886f72795b0e5c739e75730ee.patch?full_index=1"
sha256 "ebcedfbc61b6cc77c0dd9aebb9f1dfa477326241bf5a54209533e4886aad5af3"
end
patch do
url "https://github.com/JuliaLang/julia/commit/47f9139e88917813cb7beee5e690c48c2ac65de4.patch?full_index=1"
sha256 "cdc41494b2a163ca363da8ea9bcf27d7541a6dc9e6b4eff72f6c8ff8ce1b67b6"
end
patch do
url "https://github.com/JuliaLang/julia/commit/1eb063f1957b2e287ad0c7435debc72af58bb6f1.patch?full_index=1"
sha256 "d95b9fb5f327bc3ac351c35317a776ef6a46c1cdff248562e70c76e58eb9a903"
end
# Backported from:
# https://github.com/JuliaLang/julia/commit/f8c918b00f7c62e204d324a827e2ee2ef05bb66a
patch do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/074e62e4e946201779d2d6df9a261c91d111720f/trunk/f8c918b0.patch"
sha256 "bc6c85cbbca489ef0b2876dbeb6ae493c11573e058507b8bcb9e01273bc3a38c"
end
# Backported from:
# https://github.com/JuliaLang/julia/commit/6330398088e235e4d4fdbda38c41c87e02384edb.patch
patch do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/df73abb8162e31e6541d2143d1db5f9f1d70b632/trunk/63303980.patch"
sha256 "ce9cd140c3bc39987d60340bf365d6238e79cf4d5385494272c49c64af22ef78"
end
end
bottle do
sha256 cellar: :any, monterey: "f164e2c719a6a43807e34faceb66927d558cf9e550bfd9875cb3a1c08a746d84"
sha256 cellar: :any, big_sur: "473b26bec5a788eb4a565b721deb83ae2c535220ebbc8bfcd6bfb14ac10a0294"
sha256 cellar: :any, catalina: "1172de1f24e07a50865bebcb72464e9266117a1b2de391ab88f119161aff65f5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c82641075b6c3bfaae5ab6c7ea7466770d93ca092134c223e90105722786489a"
end
# Requires the M1 fork of GCC to build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "ca-certificates"
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libnghttp2"
depends_on "libssh2"
depends_on "llvm"
depends_on "mbedtls@2"
depends_on "mpfr"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "python" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
# Fix segfaults with Curl 7.81. We need to patch the contents of a tarball, so this can't be a `patch` block.
# https://github.com/JuliaLang/Downloads.jl/issues/172
resource "curl-patch" do
url "https://raw.githubusercontent.com/archlinux/svntogit-community/6751794c82949589805db950119afba77549554a/trunk/julia-curl-7.81.patch"
sha256 "710587dd88c7698dc5cdf47a1a50f6f144b584b7d9ffb85fac3f5f79c65fce11"
end
# Fix compatibility with LibGit2 1.2.0+
# https://github.com/JuliaLang/julia/pull/43250
patch do
url "https://github.com/JuliaLang/julia/commit/4d7fc8465ed9eb820893235a6ff3d40274b643a7.patch?full_index=1"
sha256 "3a34a2cd553929c2aee74aba04c8e42ccb896f9d491fb677537cd4bca9ba7caa"
end
# Remove broken tests running in `test` block. Reported at:
# https://github.com/JuliaLang/julia/issues/43004
patch :DATA
def install
# Fix segfaults with Curl 7.81. Remove when this is resolved upstream.
srccache = buildpath/"stdlib/srccache"
srccache.install resource("curl-patch")
cd srccache do
tarball = Pathname.glob("Downloads-*.tar.gz").first
system "tar", "-xzf", tarball
extracted_dir = Pathname.glob("JuliaLang-Downloads.jl-*").first
to_patch = extracted_dir/"src/Curl/Multi.jl"
system "patch", to_patch, "julia-curl-7.81.patch"
system "tar", "-czf", tarball, extracted_dir
md5sum = Digest::MD5.file(tarball).hexdigest
sha512sum = Digest::SHA512.file(tarball).hexdigest
(buildpath/"deps/checksums"/tarball/"md5").atomic_write md5sum
(buildpath/"deps/checksums"/tarball/"sha512").atomic_write sha512sum
end
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
sysconfdir=#{etc}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_LIBUNWIND=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Set MARCH and JULIA_CPU_TARGET to ensure Julia works on machines we distribute to.
# Values adapted from https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
march = if build.head?
"native"
elsif Hardware::CPU.arm?
"armv8-a"
else
Hardware.oldest_cpu
end
args << "MARCH=#{march}"
cpu_targets = ["generic"]
cpu_targets += if Hardware::CPU.arm?
%w[cortex-a57 thunderx2t99 armv8.2-a,crypto,fullfp16,lse,rdm]
else
%w[sandybridge,-xsaveopt,clone_all haswell,-rdrnd,base(1)]
end
args << "JULIA_CPU_TARGET=#{cpu_targets.join(";")}" if build.stable?
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
# Prepare directories we install things into for the build
(buildpath/"usr/lib").mkpath
(buildpath/"usr/lib/julia").mkpath
(buildpath/"usr/share/julia").mkpath
# Help Julia find keg-only dependencies
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
next unless OS.linux?
libdir.glob(shared_library("*")) do |so|
cp so, buildpath/"usr/lib"
cp so, buildpath/"usr/lib/julia"
chmod "u+w", [buildpath/"usr/lib"/so.basename, buildpath/"usr/lib/julia"/so.basename]
end
end
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
if OS.mac?
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
else
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}/julia"
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Don't try to use patchelf on our libLLVM.so. This is only present on 1.7.1.
patchelf = Regexp.escape("$(PATCHELF)")
shlib_ext = Regexp.escape(".$(SHLIB_EXT)")
inreplace "Makefile", %r{^\s+#{patchelf} --set-rpath .*/libLLVM#{shlib_ext}$}, "" if OS.linux? && build.stable?
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2 LibGit2 OpenLibm].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from `ca-certificates`
cp Formula["ca-certificates"].pkgetc/"cert.pem", buildpath/"usr/share/julia"
system "make", *args, "install"
if OS.linux?
# Replace symlinks referencing Cellar paths with ones using opt paths
deps.reject(&:build?).map(&:to_formula).map(&:opt_lib).each do |libdir|
libdir.glob(shared_library("*")) do |so|
next unless (lib/"julia"/so.basename).exist?
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
libllvm = lib/"julia"/shared_library("libLLVM")
(lib/"julia").install_symlink libllvm.basename.to_s => libllvm.realpath.basename.to_s
end
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with ca-certificates'
pkgshare.install_symlink Formula["ca-certificates"].pkgetc/"cert.pem"
end
test do
args = %W[
--startup-file=no
--history-file=no
--project=#{testpath}
--procs #{ENV.make_jobs}
]
assert_equal "4", shell_output("#{bin}/julia #{args.join(" ")} --print '2 + 2'").chomp
system bin/"julia", *args, "--eval", 'Base.runtests("core")'
# Check that installing packages works.
# https://github.com/Homebrew/discussions/discussions/2749
system bin/"julia", *args, "--eval", 'using Pkg; Pkg.add("Example")'
# Check that Julia can load stdlibs that load non-Julia code.
# Most of these also check that Julia can load Homebrew-provided libraries.
jlls = %w[
MPFR_jll SuiteSparse_jll Zlib_jll OpenLibm_jll
nghttp2_jll MbedTLS_jll LibGit2_jll GMP_jll
OpenBLAS_jll CompilerSupportLibraries_jll dSFMT_jll LibUV_jll
LibSSH2_jll LibCURL_jll libLLVM_jll PCRE2_jll
]
system bin/"julia", *args, "--eval", "using #{jlls.join(", ")}"
# Check that Julia can load libraries in lib/"julia".
# Most of these are symlinks to Homebrew-provided libraries.
# This also checks that these libraries can be loaded even when
# the symlinks are broken (e.g. by version bumps).
libs = (lib/"julia").glob(shared_library("*"))
.map(&:basename)
.map(&:to_s)
.reject do |name|
next true if name.start_with? "sys"
next true if name.start_with? "libjulia-internal"
next true if name.start_with? "libccalltest"
false
end
(testpath/"library_test.jl").write <<~EOS
using Libdl
libraries = #{libs}
for lib in libraries
handle = dlopen(lib)
@assert dlclose(handle) "Unable to close $(lib)!"
end
EOS
system bin/"julia", *args, "library_test.jl"
end
end
__END__
diff --git a/test/core.jl b/test/core.jl
index 74edc7c..0d6eaef 100644
--- a/test/core.jl
+++ b/test/core.jl
@@ -3516,9 +3516,6 @@ end
@test_throws TypeError Union{Int, 1}
@test_throws ErrorException Vararg{Any,-2}
-@test_throws ErrorException Vararg{Int, N} where N<:T where T
-@test_throws ErrorException Vararg{Int, N} where N<:Integer
-@test_throws ErrorException Vararg{Int, N} where N>:Integer
mutable struct FooNTuple{N}
z::Tuple{Integer, Vararg{Int, N}}
|
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
url "https://github.com/JuliaLang/julia/releases/download/v1.7.0/julia-1.7.0.tar.gz"
sha256 "8e870dbef71bc72469933317a1a18214fd1b4b12f1080784af7b2c56177efcb4"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
head "https://github.com/JuliaLang/julia.git"
bottle do
sha256 cellar: :any, monterey: "77735bd267a17c49932c29ead90656f78ccab85ca86199dd7c87b734ea5bc8a3"
sha256 cellar: :any, big_sur: "af12f0c921d83c0d34a08a714f033550c4e19150329c3ef8051a7608a3b7efb3"
sha256 cellar: :any, catalina: "f5b8fb6e630eb1c711709acbd81210133d81ebd58e8a42f17309caf1448ad3ca"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5368aa795df50bfc0952423b0d7cd6a88cc2fa2f2725a783a2b4263e3040c191"
end
# Requires the M1 fork of GCC to build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "ca-certificates"
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libnghttp2"
depends_on "libssh2"
depends_on "llvm@12"
depends_on "mbedtls@2"
depends_on "mpfr"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "python" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
# Fix compatibility with LibGit2 1.2.0+
# https://github.com/JuliaLang/julia/pull/43250
patch do
url "https://github.com/JuliaLang/julia/commit/4d7fc8465ed9eb820893235a6ff3d40274b643a7.patch?full_index=1"
sha256 "3a34a2cd553929c2aee74aba04c8e42ccb896f9d491fb677537cd4bca9ba7caa"
end
# Remove broken tests running in `test` block. Reported at:
# https://github.com/JuliaLang/julia/issues/43004
patch :DATA
def install
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
sysconfdir=#{etc}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_LIBUNWIND=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Set MARCH and JULIA_CPU_TARGET to ensure Julia works on machines we distribute to.
# Values adapted from https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
march = if build.head?
"native"
elsif Hardware::CPU.arm?
"armv8-a"
else
Hardware.oldest_cpu
end
args << "MARCH=#{march}"
cpu_targets = ["generic"]
cpu_targets += if Hardware::CPU.arm?
%w[cortex-a57 thunderx2t99 armv8.2-a,crypto,fullfp16,lse,rdm]
else
%w[sandybridge,-xsaveopt,clone_all haswell,-rdrnd,base(1)]
end
args << "JULIA_CPU_TARGET=#{cpu_targets.join(";")}" if build.stable?
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
# Prepare directories we install things into for the build
(buildpath/"usr/lib").mkpath
(buildpath/"usr/lib/julia").mkpath
(buildpath/"usr/share/julia").mkpath
# Help Julia find keg-only dependencies
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
next unless OS.linux?
libdir.glob(shared_library("*")) do |so|
cp so, buildpath/"usr/lib"
cp so, buildpath/"usr/lib/julia"
chmod "u+w", [buildpath/"usr/lib"/so.basename, buildpath/"usr/lib/julia"/so.basename]
end
end
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
if OS.mac?
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
else
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}/julia"
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2 LibGit2 OpenLibm].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from `ca-certificates`
cp Formula["ca-certificates"].pkgetc/"cert.pem", buildpath/"usr/share/julia"
system "make", *args, "install"
if OS.linux?
# Replace symlinks referencing Cellar paths with ones using opt paths
deps.reject(&:build?).map(&:to_formula).map(&:opt_lib).each do |libdir|
libdir.glob(shared_library("*")) do |so|
next unless (lib/"julia"/so.basename).exist?
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
libllvm = lib/"julia"/shared_library("libLLVM")
(lib/"julia").install_symlink libllvm.basename.to_s => libllvm.realpath.basename.to_s
end
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with ca-certificates'
pkgshare.install_symlink Formula["ca-certificates"].pkgetc/"cert.pem"
end
test do
args = %W[
--startup-file=no
--history-file=no
--project=#{testpath}
--procs #{ENV.make_jobs}
]
assert_equal "4", shell_output("#{bin}/julia #{args.join(" ")} --print '2 + 2'").chomp
system bin/"julia", *args, "--eval", 'Base.runtests("core")'
# Check that Julia can load stdlibs that load non-Julia code.
# Most of these also check that Julia can load Homebrew-provided libraries.
jlls = %w[
MPFR_jll SuiteSparse_jll Zlib_jll OpenLibm_jll
nghttp2_jll MbedTLS_jll LibGit2_jll GMP_jll
OpenBLAS_jll CompilerSupportLibraries_jll dSFMT_jll LibUV_jll
LibSSH2_jll LibCURL_jll libLLVM_jll PCRE2_jll
]
system bin/"julia", *args, "--eval", "using #{jlls.join(", ")}"
# FIXME: The test below will try, and fail, to load the unversioned LLVM's
# libraries since LLVM is not keg-only on Linux, but that's not what
# we want when Julia depends on a keg-only LLVM (which it currently does).
llvm = deps.map(&:to_formula)
.find { |f| f.name.match?(/^llvm(@\d+(\.\d+)*)$/) }
return if OS.linux? && llvm.keg_only?
# Check that Julia can load libraries in lib/"julia".
# Most of these are symlinks to Homebrew-provided libraries.
# This also checks that these libraries can be loaded even when
# the symlinks are broken (e.g. by version bumps).
dlext = shared_library("").sub(".", "")
libs = (lib/"julia").children
.reject(&:directory?)
.map(&:basename)
.map(&:to_s)
.select { |s| s.start_with?("lib") && s.end_with?(dlext) }
(testpath/"library_test.jl").write <<~EOS
using Libdl
libraries = #{libs}
for lib in libraries
handle = dlopen(lib)
@assert dlclose(handle) "Unable to close $(lib)!"
end
EOS
system bin/"julia", *args, "library_test.jl"
end
end
__END__
diff --git a/test/core.jl b/test/core.jl
index 74edc7c..0d6eaef 100644
--- a/test/core.jl
+++ b/test/core.jl
@@ -3516,9 +3516,6 @@ end
@test_throws TypeError Union{Int, 1}
@test_throws ErrorException Vararg{Any,-2}
-@test_throws ErrorException Vararg{Int, N} where N<:T where T
-@test_throws ErrorException Vararg{Int, N} where N<:Integer
-@test_throws ErrorException Vararg{Int, N} where N>:Integer
mutable struct FooNTuple{N}
z::Tuple{Integer, Vararg{Int, N}}
julia: improve test
I think we should be able to stop skipping the `dlopen` test on Linux
now.
Closes #90237.
Signed-off-by: Michka Popoff <5d406f95fb0e0230f83654e4a22d0115cc205d59@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
url "https://github.com/JuliaLang/julia/releases/download/v1.7.0/julia-1.7.0.tar.gz"
sha256 "8e870dbef71bc72469933317a1a18214fd1b4b12f1080784af7b2c56177efcb4"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
head "https://github.com/JuliaLang/julia.git"
bottle do
sha256 cellar: :any, monterey: "77735bd267a17c49932c29ead90656f78ccab85ca86199dd7c87b734ea5bc8a3"
sha256 cellar: :any, big_sur: "af12f0c921d83c0d34a08a714f033550c4e19150329c3ef8051a7608a3b7efb3"
sha256 cellar: :any, catalina: "f5b8fb6e630eb1c711709acbd81210133d81ebd58e8a42f17309caf1448ad3ca"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5368aa795df50bfc0952423b0d7cd6a88cc2fa2f2725a783a2b4263e3040c191"
end
# Requires the M1 fork of GCC to build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "ca-certificates"
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libnghttp2"
depends_on "libssh2"
depends_on "llvm@12"
depends_on "mbedtls@2"
depends_on "mpfr"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "python" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
# Fix compatibility with LibGit2 1.2.0+
# https://github.com/JuliaLang/julia/pull/43250
patch do
url "https://github.com/JuliaLang/julia/commit/4d7fc8465ed9eb820893235a6ff3d40274b643a7.patch?full_index=1"
sha256 "3a34a2cd553929c2aee74aba04c8e42ccb896f9d491fb677537cd4bca9ba7caa"
end
# Remove broken tests running in `test` block. Reported at:
# https://github.com/JuliaLang/julia/issues/43004
patch :DATA
def install
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
sysconfdir=#{etc}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_LIBUNWIND=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Set MARCH and JULIA_CPU_TARGET to ensure Julia works on machines we distribute to.
# Values adapted from https://github.com/JuliaCI/julia-buildbot/blob/master/master/inventory.py
march = if build.head?
"native"
elsif Hardware::CPU.arm?
"armv8-a"
else
Hardware.oldest_cpu
end
args << "MARCH=#{march}"
cpu_targets = ["generic"]
cpu_targets += if Hardware::CPU.arm?
%w[cortex-a57 thunderx2t99 armv8.2-a,crypto,fullfp16,lse,rdm]
else
%w[sandybridge,-xsaveopt,clone_all haswell,-rdrnd,base(1)]
end
args << "JULIA_CPU_TARGET=#{cpu_targets.join(";")}" if build.stable?
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
# Prepare directories we install things into for the build
(buildpath/"usr/lib").mkpath
(buildpath/"usr/lib/julia").mkpath
(buildpath/"usr/share/julia").mkpath
# Help Julia find keg-only dependencies
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
next unless OS.linux?
libdir.glob(shared_library("*")) do |so|
cp so, buildpath/"usr/lib"
cp so, buildpath/"usr/lib/julia"
chmod "u+w", [buildpath/"usr/lib"/so.basename, buildpath/"usr/lib/julia"/so.basename]
end
end
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
if OS.mac?
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
else
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{lib}/julia"
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2 LibGit2 OpenLibm].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from `ca-certificates`
cp Formula["ca-certificates"].pkgetc/"cert.pem", buildpath/"usr/share/julia"
system "make", *args, "install"
if OS.linux?
# Replace symlinks referencing Cellar paths with ones using opt paths
deps.reject(&:build?).map(&:to_formula).map(&:opt_lib).each do |libdir|
libdir.glob(shared_library("*")) do |so|
next unless (lib/"julia"/so.basename).exist?
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
libllvm = lib/"julia"/shared_library("libLLVM")
(lib/"julia").install_symlink libllvm.basename.to_s => libllvm.realpath.basename.to_s
end
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with ca-certificates'
pkgshare.install_symlink Formula["ca-certificates"].pkgetc/"cert.pem"
end
test do
args = %W[
--startup-file=no
--history-file=no
--project=#{testpath}
--procs #{ENV.make_jobs}
]
assert_equal "4", shell_output("#{bin}/julia #{args.join(" ")} --print '2 + 2'").chomp
system bin/"julia", *args, "--eval", 'Base.runtests("core")'
# Check that Julia can load stdlibs that load non-Julia code.
# Most of these also check that Julia can load Homebrew-provided libraries.
jlls = %w[
MPFR_jll SuiteSparse_jll Zlib_jll OpenLibm_jll
nghttp2_jll MbedTLS_jll LibGit2_jll GMP_jll
OpenBLAS_jll CompilerSupportLibraries_jll dSFMT_jll LibUV_jll
LibSSH2_jll LibCURL_jll libLLVM_jll PCRE2_jll
]
system bin/"julia", *args, "--eval", "using #{jlls.join(", ")}"
# Check that Julia can load libraries in lib/"julia".
# Most of these are symlinks to Homebrew-provided libraries.
# This also checks that these libraries can be loaded even when
# the symlinks are broken (e.g. by version bumps).
libs = (lib/"julia").glob(shared_library("*"))
.map(&:basename)
.map(&:to_s)
.reject do |name|
next true if name.start_with? "sys"
next true if name.start_with? "libjulia-internal"
next true if name.start_with? "libccalltest"
false
end
(testpath/"library_test.jl").write <<~EOS
using Libdl
libraries = #{libs}
for lib in libraries
handle = dlopen(lib)
@assert dlclose(handle) "Unable to close $(lib)!"
end
EOS
system bin/"julia", *args, "library_test.jl"
end
end
__END__
diff --git a/test/core.jl b/test/core.jl
index 74edc7c..0d6eaef 100644
--- a/test/core.jl
+++ b/test/core.jl
@@ -3516,9 +3516,6 @@ end
@test_throws TypeError Union{Int, 1}
@test_throws ErrorException Vararg{Any,-2}
-@test_throws ErrorException Vararg{Int, N} where N<:T where T
-@test_throws ErrorException Vararg{Int, N} where N<:Integer
-@test_throws ErrorException Vararg{Int, N} where N>:Integer
mutable struct FooNTuple{N}
z::Tuple{Integer, Vararg{Int, N}}
|
require 'rest-client'
#
# This is an institution settings - used for custom imports of users into units.
#
class DeakinInstitutionSettings
def logger
Rails.logger
end
def initialize()
@base_url = ENV['DF_INSTITUTION_SETTINGS_SYNC_BASE_URL']
@client_id = ENV['DF_INSTITUTION_SETTINGS_SYNC_CLIENT_ID']
@client_secret = ENV['DF_INSTITUTION_SETTINGS_SYNC_CLIENT_SECRET']
@star_url = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_URL']
@star_user = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_USER']
@star_secret = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_SECRET']
end
def are_callista_headers? (headers)
headers[0] == "person id" && headers.count == 35
end
def are_star_headers? (headers)
headers.include?("student_code") && headers.count == 11
end
def are_headers_institution_users? (headers)
are_callista_headers?(headers) || are_star_headers?(headers)
end
def missing_headers(row, headers)
headers - row.to_hash.keys
end
def user_import_settings_for(headers)
if are_callista_headers?(headers)
{
missing_headers_lambda: ->(row) {
missing_headers(row, ["person id", "surname", "given names", "unit code", "student attempt status", "email", "preferred given name", "campus"])
},
fetch_row_data_lambda: ->(row, unit) { fetch_callista_row(row, unit) },
replace_existing_tutorial: false
}
else
{
missing_headers_lambda: ->(row) {
missing_headers(row, ["student_code","first_name","last_name","email_address","preferred_name","subject_code","activity_code","campus","day_of_week","start_time","location", "campus"])
},
fetch_row_data_lambda: ->(row, unit) { fetch_star_row(row, unit) },
replace_existing_tutorial: true
}
end
end
def day_abbr_to_name(day)
case day.downcase
when 'mon'
'Monday'
when 'tue'
'Tuesday'
when 'wed'
'Wednesday'
when 'thu'
'Thursday'
when 'fri'
'Friday'
else
day
end
end
def activity_type_for_group_code (activity_group_code, description)
result = ActivityType.where('lower(abbreviation) = :abbr', abbr: activity_group_code[0...-2].downcase).first
if result.nil?
name = description[0...-2]
abbr = activity_group_code[0...-2]
result = ActivityType.create!(name: name, abbreviation: abbr)
end
result
end
def default_cloud_campus_abbr
'Cloud-01'
end
# Doubtfire::Application.config.institution_settings.sync_streams_from_star(Unit.last)
def sync_streams_from_star(unit)
result = {}
tp = unit.teaching_period
url = "#{@star_url}/star-#{tp.year}/rest/activities"
logger.info("Fetching #{unit.name} timetable from #{url}")
response = RestClient.post(url, {username: @star_user, password: @star_secret, where_clause:"subject_code LIKE '#{unit.code}%'"})
if response.code == 200
jsonData = JSON.parse(response.body)
if jsonData["activities"].nil?
logger.error "Failed to sync #{unit.code} - No response from #{url}"
return
end
activityData = jsonData["activities"]
activityData.each do |activity|
# Make sure units match
subject_match = /.*?(?=_)/.match( activity["subject_code"] )
unit_code = subject_match.nil? ? nil : subject_match[0]
unless unit_code == unit.code
logger.error "Failed to sync #{unit.code} - response had unit code #{enrolmentData['unitCode']}"
return
end
stream = unit.tutorial_streams.where(abbreviation: activity['activity_group_code']).first
# Skip classes - unless it is in the unit's current streams
next if stream.nil? && activity_type_for_group_code(activity['activity_group_code'], activity['description']).abbreviation == 'Cls'
if stream.nil?
stream = unit.add_tutorial_stream activity['description'], activity['activity_group_code'], activity_type_for_group_code(activity['activity_group_code'], activity['description'])
end
campus = Campus.find_by(abbreviation: activity['campus'])
abbr = tutorial_abbr_for_star(activity)
tutorial = unit.tutorials.where(abbreviation: abbr, campus_id: campus.id).first
if tutorial.nil?
unit.add_tutorial(
activity['day_of_week'], #day
activity['start_time'], #time
activity['location'], #location
unit.main_convenor_user, #tutor
campus, #campus
-1, #capacity
abbr, #abbrev
stream #tutorial_stream=nil
)
end
end
end
end
def fetch_star_row(row, unit)
email_match = /(.*)(?=@)/.match( row["email_address"] )
subject_match = /.*?(?=_)/.match( row["subject_code"] )
username = email_match.nil? ? nil : email_match[0]
unit_code = subject_match.nil? ? nil : subject_match[0]
tutorial_code = fetch_tutorial unit, row
{
unit_code: unit_code,
username: username,
student_id: row["student_code"],
first_name: row["first_name"],
last_name: row["last_name"],
nickname: row["preferred_name"] == '-' ? nil : row["preferred_name"],
email: row["email_address"],
enrolled: true,
tutorials: tutorial_code.present? ? [ tutorial_code ] : [],
campus: row["campus"]
}
end
def map_callista_to_campus(row)
key = row["unit mode"] == 'OFF' ? 'C' : row['unit location']
Campus.find_by(abbreviation: key)
end
def cloud_campus
Campus.find_by(abbreviation: 'C')
end
def fetch_callista_row(row, unit)
campus = map_callista_to_campus(row)
result = {
unit_code: row["unit code"],
username: row["email"],
student_id: row["person id"],
first_name: row["given names"],
last_name: row["surname"],
nickname: row["preferred given name"] == "-" ? nil : row["preferred given name"],
email: "#{row["email"]}@deakin.edu.au",
enrolled: row["student attempt status"] == 'ENROLLED',
campus: campus.name,
tutorials: []
}
sync_student_user_from_callista(result)
result
end
#
# Ensure that changes in email are propagated to users with matching ids
#
def sync_student_user_from_callista(row_data)
username_user = User.find_by(username: row_data[:username])
student_id_user = User.find_by(student_id: row_data[:student_id])
return username_user if username_user.present? && student_id_user.present? && username_user.id == student_id_user.id
return nil if username_user.nil? && student_id_user.nil?
if username_user.nil? && student_id_user.present?
student_id_user.email = row_data[:email] # update to new emails and...
student_id_user.username = row_data[:username] # switch username - its the same person as the id is the same
student_id_user.login_id = row_data[:username] # reset to make sure not caching old data
if student_id_user.valid?
student_id_user.save
else
logger.error("Unable to fix user #{row_data} - record invalid!")
end
student_id_user
elsif username_user.present? && student_id_user.present?
logger.error("Unable to fix user #{row_data} - both username and student id users present. Need manual fix.")
nil
elsif username_user.present?
logger.error("Unable to fix user #{row_data} - both username users present, but different student id. Need manual fix.")
nil
else
logger.error("Unable to fix user #{row_data} - Need manual fix.")
nil
end
end
def find_cloud_tutorial(unit, tutorial_stats)
if tutorial_stats.count == 1
# There is only one... so return it!
return tutorial_stats.first[:abbreviation]
end
# Sort the tutorials by fill %
# Get the first one
# Return its abbreviation
list = tutorial_stats.sort_by { |r|
capacity = r[:capacity].present? ? r[:capacity] : 0
capacity = 10000 if capacity <= 0
(r[:enrolment_count] + r[:added]) / capacity
}
result = list.first
result[:added] += 1
result[:abbreviation]
end
# Doubtfire::Application.config.institution_settings.sync_enrolments(Unit.last)
def sync_enrolments(unit)
logger.info("Starting sync for #{unit.code}")
result = {
success: [],
ignored: [],
errors: []
}
tp = unit.teaching_period
unless tp.present?
logger.error "Failing to sync unit #{unit.code} as not in teaching period"
return
end
begin
url = "#{@base_url}?academicYear=#{tp.year}&periodType=trimester&period=#{tp.period.last}&unitCode=#{unit.code}"
logger.info("Requesting #{url}")
response = RestClient.get(url, headers={ "client_id" => @client_id, "client_secret" => @client_secret})
if response.code == 200
jsonData = JSON.parse(response.body)
if jsonData["unitEnrolments"].nil?
logger.error "Failed to sync #{unit.code} - No response from #{url}"
return
end
enrolmentData = jsonData["unitEnrolments"].first
# Make sure units match
unless enrolmentData['unitCode'] == unit.code
logger.error "Failed to sync #{unit.code} - response had unit code #{enrolmentData['unitCode']}"
return
end
# Make sure correct trimester
unless enrolmentData['teachingPeriod']['year'].to_i == tp.year && "#{enrolmentData['teachingPeriod']['type'][0].upcase}#{enrolmentData['teachingPeriod']['period']}" == tp.period
logger.error "Failed to sync #{unit.code} - response had trimester #{enrolmentData['teachingPeriod']}"
return
end
logger.info "Syncing enrolment for #{unit.code} - #{tp.year} #{tp.period}"
# Get the list of students
student_list = []
timetable_data = fetch_timetable_data(unit)
enrolmentData['locations'].each do |location|
logger.info " - Syncing #{location['name']}"
campus_name = location['name']
campus = Campus.find_by(name: campus_name)
if campus.nil?
logger.error "Unable to find location #{location['name']}"
next
end
is_cloud = (campus == cloud_campus)
if is_cloud
if unit.tutorials.where(campus_id: campus.id).count == 0
unit.add_tutorial(
'Asynchronous', #day
'', #time
'Cloud', #location
unit.main_convenor_user, #tutor
cloud_campus, #campus
-1, #capacity
default_cloud_campus_abbr, #abbrev
nil #tutorial_stream=nil
)
end
tutorial_stats = unit.tutorials.
joins('LEFT OUTER JOIN tutorial_enrolments ON tutorial_enrolments.tutorial_id = tutorials.id').
where(campus_id: campus.id).
select(
'tutorials.abbreviation AS abbreviation',
'capacity',
'COUNT(tutorial_enrolments.id) AS enrolment_count'
).
group('tutorials.abbreviation', 'capacity').
map { |row|
{
abbreviation: row.abbreviation,
enrolment_count: row.enrolment_count,
added: 0.0, # float to force float division in % full calc
capacity: row.capacity
}
}
end
location['enrolments'].each do |enrolment|
if enrolment['email'].nil?
# Only error if they were enrolled
if ['ENROLLED', 'COMPLETED'].include?(enrolment['status'].upcase)
result[:errors] << { row: enrolment, message: 'Missing email and username!' }
else
result[:ignored] << { row: enrolment, message: 'Not enrolled, but no email/username' }
end
next
end
# Make sure tutorials is not nil - use empty list
tutorials = timetable_data[enrolment['studentId']]
tutorials = [] if tutorials.nil?
row_data = {
unit_code: enrolmentData['unitCode'],
username: enrolment['email'][/[^@]+/],
student_id: enrolment['studentId'],
first_name: enrolment['givenNames'],
last_name: enrolment['surname'],
nickname: enrolment['preferredName'],
email: enrolment['email'],
enrolled: ['ENROLLED', 'COMPLETED'].include?(enrolment['status'].upcase),
tutorials: tutorials,
campus: campus_name,
row: enrolment
}
user = sync_student_user_from_callista(row_data)
# if they are enrolled, but not timetabled and cloud...
if row_data[:enrolled] && timetable_data[enrolment['studentId']].nil? && is_cloud # Is this a cloud user that we have the user data for?
# try to get their exising data
project = unit.projects.where(user_id: user.id).first unless user.nil?
unless project.present? && project.tutorial_enrolments.count > 0
# not present (so new), or has no enrolment...
tutorial = find_cloud_tutorial(unit, tutorial_stats)
row_data[:tutorials] = [ tutorial ] unless tutorial.nil?
end
end
student_list << row_data
end
end
import_settings = {
replace_existing_tutorial: false
}
# Now get unit to sync
unit.sync_enrolment_with(student_list, import_settings, result)
else
logger.error "Failed to sync #{unit.code} - #{response}"
end
rescue Exception => e
logger.error "Failed to sync unit: #{e.message}"
end
result
end
# Doubtfire::Application.config.institution_settings.fetch_timetable_data(Unit.last)
def fetch_timetable_data(unit)
logger.info("Fetching STAR data for #{unit.code}")
sync_streams_from_star(unit)
result = {}
tp = unit.teaching_period
url = "#{@star_url}/star-#{tp.year}/rest/students/allocated"
unit.tutorial_streams.each do |tutorial_stream|
logger.info("Fetching #{tutorial_stream} from #{url}")
response = RestClient.post(url, {username: @star_user, password: @star_secret, where_clause:"subject_code LIKE '#{unit.code}%' AND activity_group_code LIKE '#{tutorial_stream.abbreviation}'"})
if response.code == 200
jsonData = JSON.parse(response.body)
# Switch to the next activity type if this one is empty
next if jsonData['allocations'].count == 0
jsonData['allocations'].each do |allocation|
if result[allocation['student_code'].to_i].nil?
result[allocation['student_code'].to_i] = []
end
tutorial = fetch_tutorial(unit, allocation) unless allocation['student_code'].nil?
result[allocation['student_code'].to_i] << tutorial unless tutorial.nil?
end
end
end
result
end
def tutorial_abbr_for_star(star_data)
"#{star_data['campus']}-#{star_data['activity_group_code']}-#{star_data['activity_code']}"
end
# Returns the tutorial abbr to enrol in for this activity (one in a stream)
def fetch_tutorial(unit, star_data)
tutorial_code = star_data["activity_group_code"].strip() == "" ? nil : tutorial_abbr_for_star(star_data)
unless tutorial_code.nil?
tutorial_code = nil if unit.tutorials.where(abbreviation: tutorial_code).count == 0
end
tutorial_code
end
def details_for_next_tutorial_stream(unit, activity_type)
counter = 1
begin
name = "#{activity_type.name} #{counter}"
abbreviation = "#{activity_type.abbreviation} #{counter}"
counter += 1
end while unit.tutorial_streams.where("abbreviation = :abbr OR name = :name", abbr: abbreviation, name: name).present?
[name, abbreviation]
end
end
Doubtfire::Application.config.institution_settings = DeakinInstitutionSettings.new
ENHANCE: Provide additional user data on import with existing users
require 'rest-client'
#
# This is an institution settings - used for custom imports of users into units.
#
class DeakinInstitutionSettings
def logger
Rails.logger
end
def initialize()
@base_url = ENV['DF_INSTITUTION_SETTINGS_SYNC_BASE_URL']
@client_id = ENV['DF_INSTITUTION_SETTINGS_SYNC_CLIENT_ID']
@client_secret = ENV['DF_INSTITUTION_SETTINGS_SYNC_CLIENT_SECRET']
@star_url = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_URL']
@star_user = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_USER']
@star_secret = ENV['DF_INSTITUTION_SETTINGS_SYNC_STAR_SECRET']
end
def are_callista_headers? (headers)
headers[0] == "person id" && headers.count == 35
end
def are_star_headers? (headers)
headers.include?("student_code") && headers.count == 11
end
def are_headers_institution_users? (headers)
are_callista_headers?(headers) || are_star_headers?(headers)
end
def missing_headers(row, headers)
headers - row.to_hash.keys
end
def user_import_settings_for(headers)
if are_callista_headers?(headers)
{
missing_headers_lambda: ->(row) {
missing_headers(row, ["person id", "surname", "given names", "unit code", "student attempt status", "email", "preferred given name", "campus"])
},
fetch_row_data_lambda: ->(row, unit) { fetch_callista_row(row, unit) },
replace_existing_tutorial: false
}
else
{
missing_headers_lambda: ->(row) {
missing_headers(row, ["student_code","first_name","last_name","email_address","preferred_name","subject_code","activity_code","campus","day_of_week","start_time","location", "campus"])
},
fetch_row_data_lambda: ->(row, unit) { fetch_star_row(row, unit) },
replace_existing_tutorial: true
}
end
end
def day_abbr_to_name(day)
case day.downcase
when 'mon'
'Monday'
when 'tue'
'Tuesday'
when 'wed'
'Wednesday'
when 'thu'
'Thursday'
when 'fri'
'Friday'
else
day
end
end
def activity_type_for_group_code (activity_group_code, description)
result = ActivityType.where('lower(abbreviation) = :abbr', abbr: activity_group_code[0...-2].downcase).first
if result.nil?
name = description[0...-2]
abbr = activity_group_code[0...-2]
result = ActivityType.create!(name: name, abbreviation: abbr)
end
result
end
def default_cloud_campus_abbr
'Cloud-01'
end
# Doubtfire::Application.config.institution_settings.sync_streams_from_star(Unit.last)
def sync_streams_from_star(unit)
result = {}
tp = unit.teaching_period
url = "#{@star_url}/star-#{tp.year}/rest/activities"
logger.info("Fetching #{unit.name} timetable from #{url}")
response = RestClient.post(url, {username: @star_user, password: @star_secret, where_clause:"subject_code LIKE '#{unit.code}%'"})
if response.code == 200
jsonData = JSON.parse(response.body)
if jsonData["activities"].nil?
logger.error "Failed to sync #{unit.code} - No response from #{url}"
return
end
activityData = jsonData["activities"]
activityData.each do |activity|
# Make sure units match
subject_match = /.*?(?=_)/.match( activity["subject_code"] )
unit_code = subject_match.nil? ? nil : subject_match[0]
unless unit_code == unit.code
logger.error "Failed to sync #{unit.code} - response had unit code #{enrolmentData['unitCode']}"
return
end
stream = unit.tutorial_streams.where(abbreviation: activity['activity_group_code']).first
# Skip classes - unless it is in the unit's current streams
next if stream.nil? && activity_type_for_group_code(activity['activity_group_code'], activity['description']).abbreviation == 'Cls'
if stream.nil?
stream = unit.add_tutorial_stream activity['description'], activity['activity_group_code'], activity_type_for_group_code(activity['activity_group_code'], activity['description'])
end
campus = Campus.find_by(abbreviation: activity['campus'])
abbr = tutorial_abbr_for_star(activity)
tutorial = unit.tutorials.where(abbreviation: abbr, campus_id: campus.id).first
if tutorial.nil?
unit.add_tutorial(
activity['day_of_week'], #day
activity['start_time'], #time
activity['location'], #location
unit.main_convenor_user, #tutor
campus, #campus
-1, #capacity
abbr, #abbrev
stream #tutorial_stream=nil
)
end
end
end
end
def fetch_star_row(row, unit)
email_match = /(.*)(?=@)/.match( row["email_address"] )
subject_match = /.*?(?=_)/.match( row["subject_code"] )
username = email_match.nil? ? nil : email_match[0]
unit_code = subject_match.nil? ? nil : subject_match[0]
tutorial_code = fetch_tutorial unit, row
{
unit_code: unit_code,
username: username,
student_id: row["student_code"],
first_name: row["first_name"],
last_name: row["last_name"],
nickname: row["preferred_name"] == '-' ? nil : row["preferred_name"],
email: row["email_address"],
enrolled: true,
tutorials: tutorial_code.present? ? [ tutorial_code ] : [],
campus: row["campus"]
}
end
def map_callista_to_campus(row)
key = row["unit mode"] == 'OFF' ? 'C' : row['unit location']
Campus.find_by(abbreviation: key)
end
def cloud_campus
Campus.find_by(abbreviation: 'C')
end
def fetch_callista_row(row, unit)
campus = map_callista_to_campus(row)
result = {
unit_code: row["unit code"],
username: row["email"],
student_id: row["person id"],
first_name: row["given names"],
last_name: row["surname"],
nickname: row["preferred given name"] == "-" ? nil : row["preferred given name"],
email: "#{row["email"]}@deakin.edu.au",
enrolled: row["student attempt status"] == 'ENROLLED',
campus: campus.name,
tutorials: []
}
sync_student_user_from_callista(result)
result
end
#
# Ensure that changes in email are propagated to users with matching ids
#
def sync_student_user_from_callista(row_data)
username_user = User.find_by(username: row_data[:username])
student_id_user = User.find_by(student_id: row_data[:student_id])
return username_user if username_user.present? && student_id_user.present? && username_user.id == student_id_user.id
return nil if username_user.nil? && student_id_user.nil?
if username_user.nil? && student_id_user.present?
# Have with stidemt_id but not username
student_id_user.email = row_data[:email] # update to new emails and...
student_id_user.username = row_data[:username] # switch username - its the same person as the id is the same
student_id_user.login_id = row_data[:username] # reset to make sure not caching old data
if student_id_user.valid?
student_id_user.save
else
logger.error("Unable to fix user #{row_data} - record invalid!")
end
student_id_user
elsif username_user.present? && student_id_user.nil?
# Have with username but not student id
username_user.student_id = row_data[:student_id] # should just need the student id
if username_user.valid?
username_user.save
else
logger.error("Unable to fix user #{row_data} - record invalid!")
end
username_user
elsif username_user.present? && student_id_user.present?
# Both present, but different
logger.error("Unable to fix user #{row_data} - both username and student id users present. Need manual fix.")
nil
else
logger.error("Unable to fix user #{row_data} - Need manual fix.")
nil
end
end
def find_cloud_tutorial(unit, tutorial_stats)
if tutorial_stats.count == 1
# There is only one... so return it!
return tutorial_stats.first[:abbreviation]
end
# Sort the tutorials by fill %
# Get the first one
# Return its abbreviation
list = tutorial_stats.sort_by { |r|
capacity = r[:capacity].present? ? r[:capacity] : 0
capacity = 10000 if capacity <= 0
(r[:enrolment_count] + r[:added]) / capacity
}
result = list.first
result[:added] += 1
result[:abbreviation]
end
# Doubtfire::Application.config.institution_settings.sync_enrolments(Unit.last)
def sync_enrolments(unit)
logger.info("Starting sync for #{unit.code}")
result = {
success: [],
ignored: [],
errors: []
}
tp = unit.teaching_period
unless tp.present?
logger.error "Failing to sync unit #{unit.code} as not in teaching period"
return
end
begin
url = "#{@base_url}?academicYear=#{tp.year}&periodType=trimester&period=#{tp.period.last}&unitCode=#{unit.code}"
logger.info("Requesting #{url}")
response = RestClient.get(url, headers={ "client_id" => @client_id, "client_secret" => @client_secret})
if response.code == 200
jsonData = JSON.parse(response.body)
if jsonData["unitEnrolments"].nil?
logger.error "Failed to sync #{unit.code} - No response from #{url}"
return
end
enrolmentData = jsonData["unitEnrolments"].first
# Make sure units match
unless enrolmentData['unitCode'] == unit.code
logger.error "Failed to sync #{unit.code} - response had unit code #{enrolmentData['unitCode']}"
return
end
# Make sure correct trimester
unless enrolmentData['teachingPeriod']['year'].to_i == tp.year && "#{enrolmentData['teachingPeriod']['type'][0].upcase}#{enrolmentData['teachingPeriod']['period']}" == tp.period
logger.error "Failed to sync #{unit.code} - response had trimester #{enrolmentData['teachingPeriod']}"
return
end
logger.info "Syncing enrolment for #{unit.code} - #{tp.year} #{tp.period}"
# Get the list of students
student_list = []
timetable_data = fetch_timetable_data(unit)
enrolmentData['locations'].each do |location|
logger.info " - Syncing #{location['name']}"
campus_name = location['name']
campus = Campus.find_by(name: campus_name)
if campus.nil?
logger.error "Unable to find location #{location['name']}"
next
end
is_cloud = (campus == cloud_campus)
if is_cloud
if unit.tutorials.where(campus_id: campus.id).count == 0
unit.add_tutorial(
'Asynchronous', #day
'', #time
'Cloud', #location
unit.main_convenor_user, #tutor
cloud_campus, #campus
-1, #capacity
default_cloud_campus_abbr, #abbrev
nil #tutorial_stream=nil
)
end
tutorial_stats = unit.tutorials.
joins('LEFT OUTER JOIN tutorial_enrolments ON tutorial_enrolments.tutorial_id = tutorials.id').
where(campus_id: campus.id).
select(
'tutorials.abbreviation AS abbreviation',
'capacity',
'COUNT(tutorial_enrolments.id) AS enrolment_count'
).
group('tutorials.abbreviation', 'capacity').
map { |row|
{
abbreviation: row.abbreviation,
enrolment_count: row.enrolment_count,
added: 0.0, # float to force float division in % full calc
capacity: row.capacity
}
}
end
location['enrolments'].each do |enrolment|
if enrolment['email'].nil?
# Only error if they were enrolled
if ['ENROLLED', 'COMPLETED'].include?(enrolment['status'].upcase)
result[:errors] << { row: enrolment, message: 'Missing email and username!' }
else
result[:ignored] << { row: enrolment, message: 'Not enrolled, but no email/username' }
end
next
end
# Make sure tutorials is not nil - use empty list
tutorials = timetable_data[enrolment['studentId']]
tutorials = [] if tutorials.nil?
row_data = {
unit_code: enrolmentData['unitCode'],
username: enrolment['email'][/[^@]+/],
student_id: enrolment['studentId'],
first_name: enrolment['givenNames'],
last_name: enrolment['surname'],
nickname: enrolment['preferredName'],
email: enrolment['email'],
enrolled: ['ENROLLED', 'COMPLETED'].include?(enrolment['status'].upcase),
tutorials: tutorials,
campus: campus_name,
row: enrolment
}
user = sync_student_user_from_callista(row_data)
# if they are enrolled, but not timetabled and cloud...
if row_data[:enrolled] && timetable_data[enrolment['studentId']].nil? && is_cloud # Is this a cloud user that we have the user data for?
# try to get their exising data
project = unit.projects.where(user_id: user.id).first unless user.nil?
unless project.present? && project.tutorial_enrolments.count > 0
# not present (so new), or has no enrolment...
tutorial = find_cloud_tutorial(unit, tutorial_stats)
row_data[:tutorials] = [ tutorial ] unless tutorial.nil?
end
end
student_list << row_data
end
end
import_settings = {
replace_existing_tutorial: false
}
# Now get unit to sync
unit.sync_enrolment_with(student_list, import_settings, result)
else
logger.error "Failed to sync #{unit.code} - #{response}"
end
rescue Exception => e
logger.error "Failed to sync unit: #{e.message}"
end
result
end
# Doubtfire::Application.config.institution_settings.fetch_timetable_data(Unit.last)
def fetch_timetable_data(unit)
logger.info("Fetching STAR data for #{unit.code}")
sync_streams_from_star(unit)
result = {}
tp = unit.teaching_period
url = "#{@star_url}/star-#{tp.year}/rest/students/allocated"
unit.tutorial_streams.each do |tutorial_stream|
logger.info("Fetching #{tutorial_stream} from #{url}")
response = RestClient.post(url, {username: @star_user, password: @star_secret, where_clause:"subject_code LIKE '#{unit.code}%' AND activity_group_code LIKE '#{tutorial_stream.abbreviation}'"})
if response.code == 200
jsonData = JSON.parse(response.body)
# Switch to the next activity type if this one is empty
next if jsonData['allocations'].count == 0
jsonData['allocations'].each do |allocation|
if result[allocation['student_code'].to_i].nil?
result[allocation['student_code'].to_i] = []
end
tutorial = fetch_tutorial(unit, allocation) unless allocation['student_code'].nil?
result[allocation['student_code'].to_i] << tutorial unless tutorial.nil?
end
end
end
result
end
def tutorial_abbr_for_star(star_data)
"#{star_data['campus']}-#{star_data['activity_group_code']}-#{star_data['activity_code']}"
end
# Returns the tutorial abbr to enrol in for this activity (one in a stream)
def fetch_tutorial(unit, star_data)
tutorial_code = star_data["activity_group_code"].strip() == "" ? nil : tutorial_abbr_for_star(star_data)
unless tutorial_code.nil?
tutorial_code = nil if unit.tutorials.where(abbreviation: tutorial_code).count == 0
end
tutorial_code
end
def details_for_next_tutorial_stream(unit, activity_type)
counter = 1
begin
name = "#{activity_type.name} #{counter}"
abbreviation = "#{activity_type.abbreviation} #{counter}"
counter += 1
end while unit.tutorial_streams.where("abbreviation = :abbr OR name = :name", abbr: abbreviation, name: name).present?
[name, abbreviation]
end
end
Doubtfire::Application.config.institution_settings = DeakinInstitutionSettings.new
|
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
head "https://github.com/JuliaLang/julia.git"
stable do
url "https://github.com/JuliaLang/julia/releases/download/v1.6.1/julia-1.6.1.tar.gz"
sha256 "366b8090bd9b2f7817ce132170d569dfa3435d590a1fa5c3e2a75786bd5cdfd5"
# Allow flisp to be built against system utf8proc. Remove in 1.6.2
# https://github.com/JuliaLang/julia/pull/37723
patch do
url "https://github.com/JuliaLang/julia/commit/ba653ecb1c81f1465505c2cea38b4f8149dd20b3.patch?full_index=1"
sha256 "e626ee968e2ce8207c816f39ef9967ab0b5f50cad08a46b1df15d7bf230093cb"
end
end
bottle do
sha256 cellar: :any, big_sur: "d010756c2b3e9bdc72edda8e27078399d779f3b56a2b1c78b28c47f89f269559"
sha256 cellar: :any, catalina: "750cec427377d71a4f8b537a19976e2a63df820216244a0d7d9a8f0a913266f0"
sha256 cellar: :any, mojave: "b5e9f67413ecebdbc92fec00940b84c032ec0f25f1f0a4c1398fad4ed591ef1f"
end
depends_on "python@3.9" => :build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libssh2"
depends_on "llvm"
depends_on "mbedtls"
depends_on "mpfr"
depends_on "nghttp2"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
def install
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
#
# Remove `USE_SYSTEM_SUITESPARSE` in 1.7.0
# https://github.com/JuliaLang/julia/commit/835f65d9b9f54e0a8dd856fc940a188f87a48cda
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_SUITESPARSE=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Stable uses `libosxunwind` which is not in Homebrew/core
# https://github.com/JuliaLang/julia/pull/39127
on_macos { args << "USE_SYSTEM_LIBUNWIND=1" if build.head? }
on_linux { args << "USE_SYSTEM_LIBUNWIND=1" }
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
on_macos do
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
end
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
end
on_linux do
ENV.append "LDFLAGS", "-Wl,-rpath,#{opt_lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{opt_lib}/julia"
# Help Julia find our libunwind. Remove when upstream replace this with LLVM libunwind.
(lib/"julia").mkpath
Formula["libunwind"].opt_lib.glob(shared_library("libunwind", "*")) do |so|
(buildpath/"usr/lib").install_symlink so
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from OpenSSL
(buildpath/"usr/share/julia").install_symlink Formula["openssl@1.1"].pkgetc/"cert.pem"
system "make", *args, "install"
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with OpenSSL's
pkgshare.install_symlink Formula["openssl@1.1"].pkgetc/"cert.pem"
end
test do
assert_equal "4", shell_output("#{bin}/julia -E '2 + 2'").chomp
system bin/"julia", "-e", 'Base.runtests("core")'
end
end
julia: update 1.6.1 bottle.
class Julia < Formula
desc "Fast, Dynamic Programming Language"
homepage "https://julialang.org/"
license all_of: ["MIT", "BSD-3-Clause", "Apache-2.0", "BSL-1.0"]
head "https://github.com/JuliaLang/julia.git"
stable do
url "https://github.com/JuliaLang/julia/releases/download/v1.6.1/julia-1.6.1.tar.gz"
sha256 "366b8090bd9b2f7817ce132170d569dfa3435d590a1fa5c3e2a75786bd5cdfd5"
# Allow flisp to be built against system utf8proc. Remove in 1.6.2
# https://github.com/JuliaLang/julia/pull/37723
patch do
url "https://github.com/JuliaLang/julia/commit/ba653ecb1c81f1465505c2cea38b4f8149dd20b3.patch?full_index=1"
sha256 "e626ee968e2ce8207c816f39ef9967ab0b5f50cad08a46b1df15d7bf230093cb"
end
end
bottle do
sha256 cellar: :any, big_sur: "d010756c2b3e9bdc72edda8e27078399d779f3b56a2b1c78b28c47f89f269559"
sha256 cellar: :any, catalina: "750cec427377d71a4f8b537a19976e2a63df820216244a0d7d9a8f0a913266f0"
sha256 cellar: :any, mojave: "b5e9f67413ecebdbc92fec00940b84c032ec0f25f1f0a4c1398fad4ed591ef1f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cb32ddcea5741436526c36eafff24729f8aa518a2887ef08da15e6adb1716431"
end
depends_on "python@3.9" => :build
# https://github.com/JuliaLang/julia/issues/36617
depends_on arch: :x86_64
depends_on "curl"
depends_on "gcc" # for gfortran
depends_on "gmp"
depends_on "libgit2"
depends_on "libssh2"
depends_on "llvm"
depends_on "mbedtls"
depends_on "mpfr"
depends_on "nghttp2"
depends_on "openblas"
depends_on "openlibm"
depends_on "p7zip"
depends_on "pcre2"
depends_on "suite-sparse"
depends_on "utf8proc"
uses_from_macos "perl" => :build
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
# This dependency can be dropped when upstream resolves
# https://github.com/JuliaLang/julia/issues/30154
depends_on "libunwind"
end
fails_with gcc: "5"
def install
# Build documentation available at
# https://github.com/JuliaLang/julia/blob/v#{version}/doc/build/build.md
#
# Remove `USE_SYSTEM_SUITESPARSE` in 1.7.0
# https://github.com/JuliaLang/julia/commit/835f65d9b9f54e0a8dd856fc940a188f87a48cda
args = %W[
VERBOSE=1
USE_BINARYBUILDER=0
prefix=#{prefix}
USE_SYSTEM_CSL=1
USE_SYSTEM_LLVM=1
USE_SYSTEM_PCRE=1
USE_SYSTEM_OPENLIBM=1
USE_SYSTEM_BLAS=1
USE_SYSTEM_LAPACK=1
USE_SYSTEM_GMP=1
USE_SYSTEM_MPFR=1
USE_SYSTEM_SUITESPARSE=1
USE_SYSTEM_LIBSUITESPARSE=1
USE_SYSTEM_UTF8PROC=1
USE_SYSTEM_MBEDTLS=1
USE_SYSTEM_LIBSSH2=1
USE_SYSTEM_NGHTTP2=1
USE_SYSTEM_CURL=1
USE_SYSTEM_LIBGIT2=1
USE_SYSTEM_PATCHELF=1
USE_SYSTEM_ZLIB=1
USE_SYSTEM_P7ZIP=1
LIBBLAS=-lopenblas
LIBBLASNAME=libopenblas
LIBLAPACK=-lopenblas
LIBLAPACKNAME=libopenblas
USE_BLAS64=0
PYTHON=python3
MACOSX_VERSION_MIN=#{MacOS.version}
]
# Stable uses `libosxunwind` which is not in Homebrew/core
# https://github.com/JuliaLang/julia/pull/39127
on_macos { args << "USE_SYSTEM_LIBUNWIND=1" if build.head? }
on_linux { args << "USE_SYSTEM_LIBUNWIND=1" }
args << "TAGGED_RELEASE_BANNER=Built by #{tap.user} (v#{pkg_version})"
gcc = Formula["gcc"]
gcclibdir = gcc.opt_lib/"gcc"/gcc.any_installed_version.major
on_macos do
deps.map(&:to_formula).select(&:keg_only?).map(&:opt_lib).each do |libdir|
ENV.append "LDFLAGS", "-Wl,-rpath,#{libdir}"
end
ENV.append "LDFLAGS", "-Wl,-rpath,#{gcclibdir}"
# List these two last, since we want keg-only libraries to be found first
ENV.append "LDFLAGS", "-Wl,-rpath,#{HOMEBREW_PREFIX}/lib"
ENV.append "LDFLAGS", "-Wl,-rpath,/usr/lib"
end
on_linux do
ENV.append "LDFLAGS", "-Wl,-rpath,#{opt_lib}"
ENV.append "LDFLAGS", "-Wl,-rpath,#{opt_lib}/julia"
# Help Julia find our libunwind. Remove when upstream replace this with LLVM libunwind.
(lib/"julia").mkpath
Formula["libunwind"].opt_lib.glob(shared_library("libunwind", "*")) do |so|
(buildpath/"usr/lib").install_symlink so
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
end
inreplace "Make.inc" do |s|
s.change_make_var! "LOCALBASE", HOMEBREW_PREFIX
end
# Remove library versions from MbedTLS_jll, nghttp2_jll and libLLVM_jll
# https://git.archlinux.org/svntogit/community.git/tree/trunk/julia-hardcoded-libs.patch?h=packages/julia
%w[MbedTLS nghttp2].each do |dep|
(buildpath/"stdlib").glob("**/#{dep}_jll.jl") do |jll|
inreplace jll, %r{@rpath/lib(\w+)(\.\d+)*\.dylib}, "@rpath/lib\\1.dylib"
inreplace jll, /lib(\w+)\.so(\.\d+)*/, "lib\\1.so"
end
end
inreplace (buildpath/"stdlib").glob("**/libLLVM_jll.jl"), /libLLVM-\d+jl\.so/, "libLLVM.so"
# Make Julia use a CA cert from OpenSSL
(buildpath/"usr/share/julia").install_symlink Formula["openssl@1.1"].pkgetc/"cert.pem"
system "make", *args, "install"
# Create copies of the necessary gcc libraries in `buildpath/"usr/lib"`
system "make", "-C", "deps", "USE_SYSTEM_CSL=1", "install-csl"
# Install gcc library symlinks where Julia expects them
gcclibdir.glob(shared_library("*")) do |so|
next unless (buildpath/"usr/lib"/so.basename).exist?
# Use `ln_sf` instead of `install_symlink` to avoid referencing
# gcc's full version and revision number in the symlink path
ln_sf so.relative_path_from(lib/"julia"), lib/"julia"
end
# Some Julia packages look for libopenblas as libopenblas64_
(lib/"julia").install_symlink shared_library("libopenblas") => shared_library("libopenblas64_")
# Keep Julia's CA cert in sync with OpenSSL's
pkgshare.install_symlink Formula["openssl@1.1"].pkgetc/"cert.pem"
end
test do
assert_equal "4", shell_output("#{bin}/julia -E '2 + 2'").chomp
system bin/"julia", "-e", 'Base.runtests("core")'
end
end
|
require 'mina/bundler'
require 'mina/rails'
require 'mina/git'
require 'mina/rvm'
set :user, 'reckoning'
set :forward_agent, true
set :deploy_to, '/home/reckoning'
set :domain, '10.0.0.10'
set :branch, 'master'
set :repository, 'git@github.com:reckoning/app.git'
if ENV['on'] == "live"
set :domain, 'reckoning.io'
set :branch, 'live'
set :repository, 'git@git.mortik.de:reckoning/app.git'
end
set :shared_paths, [
'.wti',
'vendor/assets',
'public/assets',
'public/uploads',
'files',
'log',
'config/secrets.yml',
'config/database.yml',
'config/skylight.yml'
]
task :environment do
invoke :"rvm:use[ruby-2.1.5@default]"
end
desc "Deploys the current version to the server."
task :deploy => :environment do
deploy do
invoke :'git:clone'
invoke :'deploy:link_shared_paths'
invoke :'bundle:install'
invoke :'rails:db_migrate'
invoke :'rails:assets_precompile'
invoke :'deploy:cleanup'
to :launch do
queue "sudo supervisorctl restart reckoning:*"
end
end
end
task :restart => :environment do
queue "sudo supervisorctl restart reckoning:*"
end
dont link vendor assets for deployment
require 'mina/bundler'
require 'mina/rails'
require 'mina/git'
require 'mina/rvm'
set :user, 'reckoning'
set :forward_agent, true
set :deploy_to, '/home/reckoning'
set :domain, '10.0.0.10'
set :branch, 'master'
set :repository, 'git@github.com:reckoning/app.git'
if ENV['on'] == "live"
set :domain, 'reckoning.io'
set :branch, 'live'
set :repository, 'git@git.mortik.de:reckoning/app.git'
end
set :shared_paths, [
'.wti',
'public/assets',
'public/uploads',
'files',
'log',
'config/secrets.yml',
'config/database.yml',
'config/skylight.yml'
]
task :environment do
invoke :"rvm:use[ruby-2.1.5@default]"
end
desc "Deploys the current version to the server."
task :deploy => :environment do
deploy do
invoke :'git:clone'
invoke :'deploy:link_shared_paths'
invoke :'bundle:install'
invoke :'rails:db_migrate'
invoke :'rails:assets_precompile'
invoke :'deploy:cleanup'
to :launch do
queue "sudo supervisorctl restart reckoning:*"
end
end
end
task :restart => :environment do
queue "sudo supervisorctl restart reckoning:*"
end |
class Kahip < Formula
desc "Karlsruhe High Quality Partitioning"
homepage "https://algo2.iti.kit.edu/documents/kahip/index.html"
url "https://github.com/KaHIP/KaHIP/archive/v3.11.tar.gz"
sha256 "347575d48c306b92ab6e47c13fa570e1af1e210255f470e6aa12c2509a8c13e3"
license "MIT"
head "https://github.com/KaHIP/KaHIP.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "7838960157c7a4dcf2752d9c9ec052bd4ab9e720b9614166496836e27abef22d"
sha256 cellar: :any, big_sur: "3d4062b822961bca86be0cbb658896a73af607f6fa040084041143d283b0a271"
sha256 cellar: :any, catalina: "e4067631417a7a8a09aeb7599be89d6a1bf218bbfcbdebb8a4ed95f2f6f30eef"
sha256 cellar: :any, mojave: "3be779a531ce19ebb82b8adfcbb6a305eeb1f730df44d80cff59fc95f583ab93"
end
depends_on "cmake" => :build
depends_on "open-mpi"
on_macos do
depends_on "gcc"
end
def install
on_macos do
gcc_major_ver = Formula["gcc"].any_installed_version.major
ENV["CC"] = Formula["gcc"].opt_bin/"gcc-#{gcc_major_ver}"
ENV["CXX"] = Formula["gcc"].opt_bin/"g++-#{gcc_major_ver}"
end
mkdir "build" do
system "cmake", *std_cmake_args, ".."
system "make", "install"
end
end
test do
output = shell_output("#{bin}/interface_test")
assert_match "edge cut 2", output
end
end
kahip: update 3.11 bottle.
class Kahip < Formula
desc "Karlsruhe High Quality Partitioning"
homepage "https://algo2.iti.kit.edu/documents/kahip/index.html"
url "https://github.com/KaHIP/KaHIP/archive/v3.11.tar.gz"
sha256 "347575d48c306b92ab6e47c13fa570e1af1e210255f470e6aa12c2509a8c13e3"
license "MIT"
head "https://github.com/KaHIP/KaHIP.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "7dd775db6db3f292630fef80ed2372b302e6d2caaaa1aa36259f9c9cd316bc42"
sha256 cellar: :any, big_sur: "b020b5b9e72805576099c1a4cd13c5bf0ac07c7451f22150bb8b1213029ac83f"
sha256 cellar: :any, catalina: "9d37b651ac2a278ec406cdab07d9c61fbc4ee5fc18b299d9fc640d13ddd3e01e"
sha256 cellar: :any, mojave: "3426ae40721153a746e297e6fc0ceceb6f07fd6df88f2ebdcca830ccc16e9c73"
end
depends_on "cmake" => :build
depends_on "open-mpi"
on_macos do
depends_on "gcc"
end
def install
on_macos do
gcc_major_ver = Formula["gcc"].any_installed_version.major
ENV["CC"] = Formula["gcc"].opt_bin/"gcc-#{gcc_major_ver}"
ENV["CXX"] = Formula["gcc"].opt_bin/"g++-#{gcc_major_ver}"
end
mkdir "build" do
system "cmake", *std_cmake_args, ".."
system "make", "install"
end
end
test do
output = shell_output("#{bin}/interface_test")
assert_match "edge cut 2", output
end
end
|
#-*- coding: utf-8 -*-
require "bundler/capistrano"
require "capistrano_colors"
set :stages, %w(production staging)
set :default_stage, "staging"
require 'capistrano/ext/multistage'
set :application, "portalshit"
set :repository, "https://github.com/morygonzalez/lokka.git"
set :branch, "portalshit"
set :scm, :git
# Or: `accurev`, `bzr`, `cvs`, `darcs`, `git`, `mercurial`, `perforce`, `subversion` or `none`
set :user, "morygonzalez"
set :use_sudo, false
# role :web, "54.248.96.173" # Your HTTP server, Apache/etc
# role :app, "54.248.96.173" # This may be the same as your `Web` server
# role :db, "your primary db-server here", :primary => true # This is where Rails migrations will run
# role :db, "your slave db-server here"
set :deploy_to, "/home/morygonzalez/sites/deploys/#{application}"
set :ruby_path, "/home/morygonzalez/.rbenv/shims"
set :normalize_asset_timestamps, false
set :bundle_without, [:development, :test, :postgresql, :sqlite]
# if you're still using the script/reaper helper you will need
# these http://github.com/rails/irs_process_scripts
# If you are using Passenger mod_rails uncomment this:
# namespace :deploy do
# task :start do ; end
# task :stop do ; end
# task :restart, :roles => :app, :except => { :no_release => true } do
# run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
# end
# end
namespace :deploy do
task :start, :roles => :app, :except => { :no_release => true } do
set :db_user, -> { Capistrano::CLI.ui.ask('MySQL User: ') }
set :db_password, -> { Capistrano::CLI.password_prompt('MySQL Password: ') }
set :db_path, "mysql://#{db_user}:#{db_password}@#{db_host}/portalshit"
run "cd #{current_path}; env RACK_ENV=#{stage} env NEWRELIC_ENABLE=#{stage == 'production' ? true : false} env DATABASE_URL=#{db_path} bundle exec unicorn -c #{current_path}/config/unicorn.rb -D -E production"
end
task :stop, :roles => :app, :except => { :no_release => true } do
run "kill -KILL `cat #{current_path}/tmp/pids/unicorn-lokka.pid`"
end
task :restart, :role => :app, :except => { :no_release => true } do
run "kill -USR2 `cat #{current_path}/tmp/pids/unicorn-lokka.pid`"
end
task :migrate, :role => :app, :except => { :no_release => true } do
run "env DATABASE_URL=#{db_path} RACK_ENV=production #{ruby_path}/bundle exec rake db:migrate"
end
desc "Do git checkout public dir to work app correctly"
task :git_checkout_public, :role => :app, :except => { :no_release => true } do
run "cd #{current_path}; git checkout public"
end
desc "Creates sockets symlink"
task :socket_symlink, :role => :app, :except => { :no_release => true } do
run "ln -sfn #{shared_path}/sockets #{current_path}/tmp/sockets"
end
desc "Create amazon product advertisng api cache files"
task :amazon_symlink, :role => :app, :except => { :no_release => true } do
run "ln -sfn #{shared_path}/amazon #{current_path}/tmp/amazon"
end
before [:"deploy:start", :"deploy:restart"], :"deploy:socket_symlink"
before [:"deploy:start", :"deploy:restart"], :"deploy:amazon_symlink"
after :"deploy:create_symlink", :"deploy:git_checkout_public"
after :deploy, :"deploy:cleanup"
end
Use here doc in config/deploy.rb
#-*- coding: utf-8 -*-
require "bundler/capistrano"
require "capistrano_colors"
set :stages, %w(production staging)
set :default_stage, "staging"
require 'capistrano/ext/multistage'
set :application, "portalshit"
set :repository, "https://github.com/morygonzalez/lokka.git"
set :branch, "portalshit"
set :scm, :git
# Or: `accurev`, `bzr`, `cvs`, `darcs`, `git`, `mercurial`, `perforce`, `subversion` or `none`
set :user, "morygonzalez"
set :use_sudo, false
# role :web, "54.248.96.173" # Your HTTP server, Apache/etc
# role :app, "54.248.96.173" # This may be the same as your `Web` server
# role :db, "your primary db-server here", :primary => true # This is where Rails migrations will run
# role :db, "your slave db-server here"
set :deploy_to, "/home/morygonzalez/sites/deploys/#{application}"
set :ruby_path, "/home/morygonzalez/.rbenv/shims"
set :normalize_asset_timestamps, false
set :bundle_without, [:development, :test, :postgresql, :sqlite]
# if you're still using the script/reaper helper you will need
# these http://github.com/rails/irs_process_scripts
# If you are using Passenger mod_rails uncomment this:
# namespace :deploy do
# task :start do ; end
# task :stop do ; end
# task :restart, :roles => :app, :except => { :no_release => true } do
# run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
# end
# end
namespace :deploy do
task :start, :roles => :app, :except => { :no_release => true } do
set :db_user, -> { Capistrano::CLI.ui.ask('MySQL User: ') }
set :db_password, -> { Capistrano::CLI.password_prompt('MySQL Password: ') }
set :db_path, "mysql://#{db_user}:#{db_password}@#{db_host}/portalshit"
run <<-EOC
cd #{current_path};
env RACK_ENV=#{stage}
env NEWRELIC_ENABLE=#{stage == 'production' ? true : false}
env DATABASE_URL=#{db_path}
bundle exec unicorn -c #{current_path}/config/unicorn.rb -D -E production
EOC
end
task :stop, :roles => :app, :except => { :no_release => true } do
run "kill -KILL `cat #{current_path}/tmp/pids/unicorn-lokka.pid`"
end
task :restart, :role => :app, :except => { :no_release => true } do
run "kill -USR2 `cat #{current_path}/tmp/pids/unicorn-lokka.pid`"
end
task :migrate, :role => :app, :except => { :no_release => true } do
run "env DATABASE_URL=#{db_path} RACK_ENV=production #{ruby_path}/bundle exec rake db:migrate"
end
desc "Do git checkout public dir to work app correctly"
task :git_checkout_public, :role => :app, :except => { :no_release => true } do
run "cd #{current_path}; git checkout public"
end
desc "Creates sockets symlink"
task :socket_symlink, :role => :app, :except => { :no_release => true } do
run "ln -sfn #{shared_path}/sockets #{current_path}/tmp/sockets"
end
desc "Create amazon product advertisng api cache files"
task :amazon_symlink, :role => :app, :except => { :no_release => true } do
run "ln -sfn #{shared_path}/amazon #{current_path}/tmp/amazon"
end
before [:"deploy:start", :"deploy:restart"], :"deploy:socket_symlink"
before [:"deploy:start", :"deploy:restart"], :"deploy:amazon_symlink"
after :"deploy:create_symlink", :"deploy:git_checkout_public"
after :deploy, :"deploy:cleanup"
end
|
# Job to generate meme images and create thumbnails.
class GendImageProcessJob < ActiveJob::Base
queue_as do
gend_image = arguments.first
if gend_image.src_image.is_animated
:gend_image_process_animated
else
:gend_image_process
end
end
def perform(gend_image)
gend_image.image = MemeCaptain.meme(
gend_image.src_image.image,
gend_image.captions.map(&:text_pos)).to_blob
gend_image.gend_thumb = make_gend_thumb(gend_image)
gend_image.work_in_progress = false
gend_image.save!
end
private
def make_gend_thumb(gend_image)
thumb_img = gend_image.magick_image_list
thumb_img.resize_to_fit_anim!(MemeCaptainWeb::Config::THUMB_SIDE)
gend_thumb = GendThumb.new(image: thumb_img.to_blob)
thumb_img.destroy!
gend_thumb
end
end
Revert "Move thumbnail generation into separate method."
This reverts commit 888b3dca1800747075255e6765b789492d795035.
# Job to generate meme images and create thumbnails.
class GendImageProcessJob < ActiveJob::Base
queue_as do
gend_image = arguments.first
if gend_image.src_image.is_animated
:gend_image_process_animated
else
:gend_image_process
end
end
def perform(gend_image)
gend_image.image = MemeCaptain.meme(
gend_image.src_image.image,
gend_image.captions.map(&:text_pos)).to_blob
thumb_img = gend_image.magick_image_list
thumb_img.resize_to_fit_anim!(MemeCaptainWeb::Config::THUMB_SIDE)
gend_image.gend_thumb = GendThumb.new(image: thumb_img.to_blob)
thumb_img.destroy!
gend_image.work_in_progress = false
gend_image.save!
end
end
|
class Keptn < Formula
desc "Is the CLI for keptn.sh a message-driven control-plane for application delivery"
homepage "https://keptn.sh"
url "https://github.com/keptn/keptn/archive/0.11.3.tar.gz"
sha256 "48c38569735aca7ba287c4cc16a02fe27cabec24cc077bfba4798c02f6972e98"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bb25a124cf886aeb601ea8b44d9d2259f413ea1443dd25709beb8da024226e4b"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a0face66e81013a059015062aed3855d75c151bd683f3f5b00eec5abd23ae0d6"
sha256 cellar: :any_skip_relocation, monterey: "4bbb3474e80d5bdd86d5b017a5c9421950920d3d2214ca11332ec9a3c99d62d8"
sha256 cellar: :any_skip_relocation, big_sur: "b8d83d543e45bf2a3287abc268d677cf33c79245a735146f12fec42e07278b1b"
sha256 cellar: :any_skip_relocation, catalina: "718d29d52f0e5780d0067f9b5eafad4a08a648b3bf605ab83ff939c547492b5c"
sha256 cellar: :any_skip_relocation, mojave: "920e3054b80aabed5310763a63c1af4a76ad680943771260cf77f4bffe4ab2b9"
sha256 cellar: :any_skip_relocation, x86_64_linux: "bdba46209177a40c557bf9a4a517da9ec045e91065d34c46545b7d0d12f989e8"
end
depends_on "go" => :build
def install
ldflags = %W[
-s -w
-X github.com/keptn/keptn/cli/cmd.Version=#{version}
-X main.KubeServerVersionConstraints=""
]
cd buildpath/"cli" do
system "go", "build", *std_go_args(ldflags: ldflags)
end
end
test do
system bin/"keptn", "set", "config", "AutomaticVersionCheck", "false"
system bin/"keptn", "set", "config", "kubeContextCheck", "false"
assert_match "Keptn CLI version: #{version}", shell_output(bin/"keptn version 2>&1")
on_macos do
assert_match "Error: credentials not found in native keychain",
shell_output(bin/"keptn status 2>&1", 1)
end
on_linux do
assert_match ".keptn/.keptn____keptn: no such file or directory",
shell_output(bin/"keptn status 2>&1", 1)
end
end
end
keptn: update 0.11.3 bottle.
class Keptn < Formula
desc "Is the CLI for keptn.sh a message-driven control-plane for application delivery"
homepage "https://keptn.sh"
url "https://github.com/keptn/keptn/archive/0.11.3.tar.gz"
sha256 "48c38569735aca7ba287c4cc16a02fe27cabec24cc077bfba4798c02f6972e98"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "cb234499511e6048462a93fd4453b76c86a60be85f0957be4a2c0216eded0287"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ad3ceb5b6ddb946a8f4e3ab1130940cc0efe0ced3b0a4bcb32dfd091191b4aa0"
sha256 cellar: :any_skip_relocation, monterey: "133844c9739c9cdcf50c7f961e609a9d832c99fd256f7ca4ab694dafd203731d"
sha256 cellar: :any_skip_relocation, big_sur: "58e6374628559934ee6ddef4ab4b4048b20796c49c9995750872069c35174232"
sha256 cellar: :any_skip_relocation, catalina: "543d1e51e82aa9b1c08a2fd38e2e9a9bed29636968960e9547d2894dae048923"
sha256 cellar: :any_skip_relocation, x86_64_linux: "b99b7b86361c2eeb120f6df246b789a7f13c51fcac6d8425dcc0724039a464c5"
end
depends_on "go" => :build
def install
ldflags = %W[
-s -w
-X github.com/keptn/keptn/cli/cmd.Version=#{version}
-X main.KubeServerVersionConstraints=""
]
cd buildpath/"cli" do
system "go", "build", *std_go_args(ldflags: ldflags)
end
end
test do
system bin/"keptn", "set", "config", "AutomaticVersionCheck", "false"
system bin/"keptn", "set", "config", "kubeContextCheck", "false"
assert_match "Keptn CLI version: #{version}", shell_output(bin/"keptn version 2>&1")
on_macos do
assert_match "Error: credentials not found in native keychain",
shell_output(bin/"keptn status 2>&1", 1)
end
on_linux do
assert_match ".keptn/.keptn____keptn: no such file or directory",
shell_output(bin/"keptn status 2>&1", 1)
end
end
end
|
lock '3.2.1'
set :application, 'rails_survey'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt config/local_env.yml}
set :linked_dirs, %w(bin log tmp/pids tmp/cache tmp/sockets vendor/bundle)
set :linked_dirs, fetch(:linked_dirs) + %w{ files updates }
set :branch, 'master'
set :sidekiq_pid, File.join(shared_path, 'tmp', 'pids', 'sidekiq.pid')
set :sidekiq_log, File.join(shared_path, 'log', 'sidekiq.log')
set :sidekiq_concurrency, 25
set :sidekiq_processes, 2
namespace :deploy do
desc 'Restart Application'
task :restart do
desc "restart redis"
on roles(:app) do
execute "sudo /etc/init.d/redis-server restart"
end
desc "restart node"
on roles(:app), in: :sequence, wait: 5 do
execute "sudo restart realtime-app || sudo start realtime-app"
end
desc "restart phusion passenger"
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
task :npm_install do
on roles(:app) do
execute "cd #{release_path}/node && sudo rm -rf node_modules && npm install"
end
end
after :finishing, 'deploy:cleanup'
after 'deploy:publishing', 'deploy:restart'
after 'deploy:updated', 'deploy:npm_install'
after 'deploy:published', 'sidekiq:monit:config'
after 'deploy:published', 'monit:restart_monit'
end
namespace :monit do
desc "Restart monit service"
task :restart_monit do
on roles(:app) do
execute 'sudo service monit restart'
end
end
end
sidekiq monit restart after config
lock '3.2.1'
set :application, 'rails_survey'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt config/local_env.yml}
set :linked_dirs, %w(bin log tmp/pids tmp/cache tmp/sockets vendor/bundle)
set :linked_dirs, fetch(:linked_dirs) + %w{ files updates }
set :branch, 'master'
set :sidekiq_pid, File.join(shared_path, 'tmp', 'pids', 'sidekiq.pid')
set :sidekiq_log, File.join(shared_path, 'log', 'sidekiq.log')
set :sidekiq_concurrency, 25
set :sidekiq_processes, 2
namespace :deploy do
desc 'Restart Application'
task :restart do
desc "restart redis"
on roles(:app) do
execute "sudo /etc/init.d/redis-server restart"
end
desc "restart node"
on roles(:app), in: :sequence, wait: 5 do
execute "sudo restart realtime-app || sudo start realtime-app"
end
desc "restart phusion passenger"
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
task :npm_install do
on roles(:app) do
execute "cd #{release_path}/node && sudo rm -rf node_modules && npm install"
end
end
desc "Restart monit service"
task :restart_monit do
on roles(:app) do
execute "sudo service monit restart"
end
end
after :finishing, 'deploy:cleanup'
after 'deploy:publishing', 'deploy:restart'
after 'deploy:updated', 'deploy:npm_install'
after 'deploy:published', 'sidekiq:monit:config'
after 'deploy:published', 'deploy:restart_monit'
end
|
class Ktmpl < Formula
desc "Parameterized templates for Kubernetes manifests"
homepage "https://github.com/jimmycuadra/ktmpl"
url "https://github.com/jimmycuadra/ktmpl/archive/0.9.0.tar.gz"
sha256 "b2f05ae4b36f31f6801f4dcd2f5aec31d7b53b8b6dea6ddf974b22c88d8bc62b"
license "MIT"
head "https://github.com/jimmycuadra/ktmpl.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "3bd2b7dcd24cfdf57ea6bffdbbd9c9380a084e4df9ffcd6231a4af8089781b59" => :big_sur
sha256 "d8c1e6fd18fc96f615e119c64cd7df67940cb0a9b3113450e49f567b9875c9ee" => :catalina
sha256 "7c91c4a9674effc29e0ef187fc05163500a81ac5a7c0502552b12098c72633dd" => :mojave
sha256 "2cc0b69a68bbd12cfd02e17d079363f773006a7bd07b77588cf83d7207950b3f" => :high_sierra
end
depends_on "rust" => :build
def install
system "cargo", "install", *std_cargo_args
end
test do
(testpath/"test.yml").write <<~EOS
---
kind: "Template"
apiVersion: "v1"
metadata:
name: "test"
objects:
- kind: "Service"
apiVersion: "v1"
metdata:
name: "test"
spec:
ports:
- name: "test"
protocol: "TCP"
targetPort: "$((PORT))"
selector:
app: "test"
parameters:
- name: "PORT"
description: "The port the service should run on"
required: true
parameterType: "int"
EOS
system bin/"ktmpl", "test.yml", "-p", "PORT", "8080"
end
end
ktmpl: fix typo in formula (found by codespell)
Closes #66914.
Signed-off-by: Stefan Weil <8d4c780fcfdc41841e5070f4c43da8958ba6aec0@weilnetz.de>
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Ktmpl < Formula
desc "Parameterized templates for Kubernetes manifests"
homepage "https://github.com/jimmycuadra/ktmpl"
url "https://github.com/jimmycuadra/ktmpl/archive/0.9.0.tar.gz"
sha256 "b2f05ae4b36f31f6801f4dcd2f5aec31d7b53b8b6dea6ddf974b22c88d8bc62b"
license "MIT"
head "https://github.com/jimmycuadra/ktmpl.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "3bd2b7dcd24cfdf57ea6bffdbbd9c9380a084e4df9ffcd6231a4af8089781b59" => :big_sur
sha256 "d8c1e6fd18fc96f615e119c64cd7df67940cb0a9b3113450e49f567b9875c9ee" => :catalina
sha256 "7c91c4a9674effc29e0ef187fc05163500a81ac5a7c0502552b12098c72633dd" => :mojave
sha256 "2cc0b69a68bbd12cfd02e17d079363f773006a7bd07b77588cf83d7207950b3f" => :high_sierra
end
depends_on "rust" => :build
def install
system "cargo", "install", *std_cargo_args
end
test do
(testpath/"test.yml").write <<~EOS
---
kind: "Template"
apiVersion: "v1"
metadata:
name: "test"
objects:
- kind: "Service"
apiVersion: "v1"
metadata:
name: "test"
spec:
ports:
- name: "test"
protocol: "TCP"
targetPort: "$((PORT))"
selector:
app: "test"
parameters:
- name: "PORT"
description: "The port the service should run on"
required: true
parameterType: "int"
EOS
system bin/"ktmpl", "test.yml", "-p", "PORT", "8080"
end
end
|
set :application, 'yonodesperdicio.org'
set :repo_url, 'git@github.com:mijailr/YND.git'
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
set :scm, :git
set :format, :pretty
set :log_level, :debug
set :pty, true
set :bundle_flags, '--quiet'
set :bundle_bins, %w(rake rails)
set :bundle_path, nil
#set :deploy_via, :copy
set :deploy_via, :remote_cache
set :ssh_options, { :forward_agent => true }
set :linked_files, %w{config/database.yml config/secrets.yml config/newrelic.yml vendor/geolite/GeoLiteCity.dat}
set :linked_dirs, %w{db/sphinx log tmp/pids tmp/cache tmp/sockets tmp/cachedir vendor/bundle public/system public/legacy public/.well-known public/assets}
set :tmp_dir, "/home/yonodesp/tmp"
set :keep_releases, 5
# Logical flow for deploying an app
after 'deploy:finished', 'thinking_sphinx:index'
after 'deploy:finished', 'thinking_sphinx:restart'
after 'deploy:finished', 'deploy:restart'
namespace :deploy do
desc 'Perform migrations'
task :migrations do
on roles(:db) do
within release_path do
with rails_env:
fetch(:rails_env) do execute :rake, 'db:migrate'
end
end
end
end
desc 'restart app'
task :restart do
on roles(:all) do
execute "touch #{ current_path }/tmp/restart.txt"
end
end
before :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
within release_path do
with rails_env: fetch(:rails_env) do
execute :rake, 'nolotiro:cache:clear'
end
end
end
end
after :finishing, 'deploy:cleanup'
end
remove sphinx tasks
set :application, 'yonodesperdicio.org'
set :repo_url, 'git@github.com:mijailr/YND.git'
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
set :scm, :git
set :format, :pretty
set :log_level, :debug
set :pty, true
set :bundle_flags, '--quiet'
set :bundle_bins, %w(rake rails)
set :bundle_path, nil
#set :deploy_via, :copy
set :deploy_via, :remote_cache
set :ssh_options, { :forward_agent => true }
set :linked_files, %w{config/database.yml config/secrets.yml config/newrelic.yml vendor/geolite/GeoLiteCity.dat}
set :linked_dirs, %w{db/sphinx log tmp/pids tmp/cache tmp/sockets tmp/cachedir vendor/bundle public/system public/legacy public/.well-known public/assets}
set :tmp_dir, "/home/yonodesp/tmp"
set :keep_releases, 5
# Logical flow for deploying an app
# after 'deploy:finished', 'thinking_sphinx:index'
# after 'deploy:finished', 'thinking_sphinx:restart'
after 'deploy:finished', 'deploy:restart'
namespace :deploy do
desc 'Perform migrations'
task :migrations do
on roles(:db) do
within release_path do
with rails_env:
fetch(:rails_env) do execute :rake, 'db:migrate'
end
end
end
end
desc 'restart app'
task :restart do
on roles(:all) do
execute "touch #{ current_path }/tmp/restart.txt"
end
end
before :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
within release_path do
with rails_env: fetch(:rails_env) do
execute :rake, 'nolotiro:cache:clear'
end
end
end
end
after :finishing, 'deploy:cleanup'
end
|
# Brimir is a helpdesk system to handle email support requests.
# Copyright (C) 2012-2015 Ivaldi http://ivaldi.nl
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class VerificationMailer < ActionMailer::Base
def verify(email_address)
headers['X-Brimir-Verification'] = email_address.verification_token
mail(to: email_address)
end
def receive(email)
to_verify = EmailAddress.where.not(verification_token: nil)
if to_verify.count > 0
to_verify.each do |email_address|
if email['X-Brimir-Verification'].to_s == email_address.verification_token
email_address.verification_token = nil
email_address.save!
return true
end
end
end
return false
end
end
Fix mail to
# Brimir is a helpdesk system to handle email support requests.
# Copyright (C) 2012-2015 Ivaldi http://ivaldi.nl
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
class VerificationMailer < ActionMailer::Base
def verify(email_address)
headers['X-Brimir-Verification'] = email_address.verification_token
mail(to: email_address.email)
end
def receive(email)
to_verify = EmailAddress.where.not(verification_token: nil)
if to_verify.count > 0
to_verify.each do |email_address|
if email['X-Brimir-Verification'].to_s == email_address.verification_token
email_address.verification_token = nil
email_address.save!
return true
end
end
end
return false
end
end
|
class Kubie < Formula
desc "Much more powerful alternative to kubectx and kubens"
homepage "https://blog.sbstp.ca/introducing-kubie/"
url "https://github.com/sbstp/kubie/archive/v0.17.2.tar.gz"
sha256 "97a6481e1afa1f942be26637a11185d85b3beb834955b642d1769f1777ff39a6"
license "Zlib"
head "https://github.com/sbstp/kubie.git", branch: "master"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "fb9c2ce957d07c514c00dabb0e0b0644b5fafc765e6c76bfa1a4b9a4253bb54f"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "58198534d1572d7071e7be7a8800777081280bf6a14bda52a6b4396071f06bf8"
sha256 cellar: :any_skip_relocation, monterey: "bc9325f5c11920d30905b4b821f5469ccb288b6d2a4f5d59151b139413c6f087"
sha256 cellar: :any_skip_relocation, big_sur: "9d838fd49be700ef4a176890208f0530e8872302fc3d4203cc3c813ad2cb625f"
sha256 cellar: :any_skip_relocation, catalina: "a6bf5f6847df1dcb2fbabcd3e183f31ede25a5942c10eb00ab7c9c504fae9717"
sha256 cellar: :any_skip_relocation, x86_64_linux: "f52ae97647ed4fa8216e27cc8ac10fe4cfa60db5a05d5b7e94927d56806c0507"
end
depends_on "rust" => :build
depends_on "kubernetes-cli" => :test
def install
system "cargo", "install", *std_cargo_args
bash_completion.install "./completion/kubie.bash"
fish_completion.install "./completion/kubie.fish"
end
test do
(testpath/".kube/kubie-test.yaml").write <<~EOS
apiVersion: v1
clusters:
- cluster:
server: http://0.0.0.0/
name: kubie-test-cluster
contexts:
- context:
cluster: kubie-test-cluster
user: kubie-test-user
namespace: kubie-test-namespace
name: kubie-test
current-context: baz
kind: Config
preferences: {}
users:
- user:
name: kubie-test-user
EOS
assert_match "The connection to the server 0.0.0.0 was refused - did you specify the right host or port?",
shell_output("#{bin}/kubie exec kubie-test kubie-test-namespace kubectl get pod 2>&1")
end
end
kubie: update 0.17.2 bottle.
Closes #106856.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Kubie < Formula
desc "Much more powerful alternative to kubectx and kubens"
homepage "https://blog.sbstp.ca/introducing-kubie/"
url "https://github.com/sbstp/kubie/archive/v0.17.2.tar.gz"
sha256 "97a6481e1afa1f942be26637a11185d85b3beb834955b642d1769f1777ff39a6"
license "Zlib"
head "https://github.com/sbstp/kubie.git", branch: "master"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "465deaadc3ad04c088b7b7932cb6c65fa8f557ee453db8e7f45fd473129ec223"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ae8987cb210d5b4f3c2f67009d43d53b4b7518e69bef415f20e23c6315194089"
sha256 cellar: :any_skip_relocation, monterey: "d34240e629ebe363f55c494809a6c656add6ce6c5ef4ef2c236ce2e402da16df"
sha256 cellar: :any_skip_relocation, big_sur: "50d5422834212d85eb01122877d24cee3557f5cf8119f2ad0d1ad0e1beb00df9"
sha256 cellar: :any_skip_relocation, catalina: "48cd994100fd020c37f5a361fde3277f9c3b0381636381fc9682249d8ee136dc"
sha256 cellar: :any_skip_relocation, x86_64_linux: "eb936b3fb509ecf7d2653141ff9a073f504c284ce667744360a62e3e3ac5f16f"
end
depends_on "rust" => :build
depends_on "kubernetes-cli" => :test
def install
system "cargo", "install", *std_cargo_args
bash_completion.install "./completion/kubie.bash"
fish_completion.install "./completion/kubie.fish"
end
test do
(testpath/".kube/kubie-test.yaml").write <<~EOS
apiVersion: v1
clusters:
- cluster:
server: http://0.0.0.0/
name: kubie-test-cluster
contexts:
- context:
cluster: kubie-test-cluster
user: kubie-test-user
namespace: kubie-test-namespace
name: kubie-test
current-context: baz
kind: Config
preferences: {}
users:
- user:
name: kubie-test-user
EOS
assert_match "The connection to the server 0.0.0.0 was refused - did you specify the right host or port?",
shell_output("#{bin}/kubie exec kubie-test kubie-test-namespace kubectl get pod 2>&1")
end
end
|
$:.unshift(File.expand_path('./lib', ENV['rvm_path'])) # Add RVM's lib directory to the load path.
require "rvm/capistrano"
set :rvm_type, :user
set :rvm_ruby_string, 'ruby-1.9.3-p0@kochiku-worker'
require 'bundler/capistrano' # adds bundle:install step to deploy pipeline
default_run_options[:env] = {'PATH' => '/usr/local/bin:$PATH'}
set :application, "Kochiku Worker"
set :repository, "git@git.squareup.com:square/kochiku-worker.git"
set :branch, "master"
set :scm, :git
set :scm_command, 'git'
set :user, "square"
set :deploy_to, "/Users/square/kochiku-worker"
set :deploy_via, :remote_cache
set :keep_releases, 5
set :use_sudo, false
macbuilds = (1..26).map {|n| "macbuild%02d.sfo.squareup.com" % n }
role :worker, *macbuilds
after "deploy:setup", "kochiku:setup"
after "deploy:create_symlink", "kochiku:symlinks"
after "deploy:create_symlink", "kochiku:create_kochiku_worker_yaml"
namespace :deploy do
desc "Restart the web application server and all of the build workers"
task :restart do
restart_workers
end
desc "Restart the build workers"
task :restart_workers, :roles => :worker do
# the trailing semicolons are required because this is passed to the shell as a single string
run <<-CMD
resque1_pid=$(cat #{shared_path}/pids/resque1.pid);
resque2_pid=$(cat #{shared_path}/pids/resque2.pid);
kill -QUIT $resque1_pid;
kill -QUIT $resque2_pid;
while ps x | egrep -q "^($resque1_pid|$resque2_pid)"; do
echo "Waiting for Resque workers to stop on $HOSTNAME...";
sleep 5;
done;
CMD
end
end
namespace :kochiku do
task :setup, :roles => [:worker] do
run "rvm gemset create 'kochiku-worker'"
run "gem install bundler -v '~> 1.1.0' --conservative"
run "mkdir -p #{shared_path}/build-partition"
run "[ -d #{shared_path}/build-partition/web-cache ] || #{scm_command} clone --recursive git@git.squareup.com:square/web.git #{shared_path}/build-partition/web-cache"
end
task :symlinks, :roles => [:worker] do
run "ln -nfFs #{shared_path}/build-partition #{current_path}/tmp/build-partition"
end
task :create_kochiku_worker_yaml, :roles => [:worker] do
worker_config = <<-EOF
build_master: macbuild-master.sfo.squareup.com
build_strategy: build_all
redis_host: macbuild-master.sfo.squareup.com
EOF
put worker_config, "#{current_path}/config/kochiku-worker.yml"
end
task :cleanup_zombies, :roles => [:worker] do
run "ps -eo 'pid ppid comm' |grep -i resque |grep Paused | awk '$2 == 1 { print $1 }' | xargs kill"
end
end
Bump 1.9.3 from p0 to p194
$:.unshift(File.expand_path('./lib', ENV['rvm_path'])) # Add RVM's lib directory to the load path.
require "rvm/capistrano"
set :rvm_type, :user
set :rvm_ruby_string, 'ruby-1.9.3-p194@kochiku-worker'
require 'bundler/capistrano' # adds bundle:install step to deploy pipeline
default_run_options[:env] = {'PATH' => '/usr/local/bin:$PATH'}
set :application, "Kochiku Worker"
set :repository, "git@git.squareup.com:square/kochiku-worker.git"
set :branch, "master"
set :scm, :git
set :scm_command, 'git'
set :user, "square"
set :deploy_to, "/Users/square/kochiku-worker"
set :deploy_via, :remote_cache
set :keep_releases, 5
set :use_sudo, false
macbuilds = (1..26).map {|n| "macbuild%02d.sfo.squareup.com" % n }
role :worker, *macbuilds
after "deploy:setup", "kochiku:setup"
after "deploy:create_symlink", "kochiku:symlinks"
after "deploy:create_symlink", "kochiku:create_kochiku_worker_yaml"
namespace :deploy do
desc "Restart the web application server and all of the build workers"
task :restart do
restart_workers
end
desc "Restart the build workers"
task :restart_workers, :roles => :worker do
# the trailing semicolons are required because this is passed to the shell as a single string
run <<-CMD
resque1_pid=$(cat #{shared_path}/pids/resque1.pid);
resque2_pid=$(cat #{shared_path}/pids/resque2.pid);
kill -QUIT $resque1_pid;
kill -QUIT $resque2_pid;
while ps x | egrep -q "^($resque1_pid|$resque2_pid)"; do
echo "Waiting for Resque workers to stop on $HOSTNAME...";
sleep 5;
done;
CMD
end
end
namespace :kochiku do
task :setup, :roles => [:worker] do
run "rvm gemset create 'kochiku-worker'"
run "gem install bundler -v '~> 1.1.0' --conservative"
run "mkdir -p #{shared_path}/build-partition"
run "[ -d #{shared_path}/build-partition/web-cache ] || #{scm_command} clone --recursive git@git.squareup.com:square/web.git #{shared_path}/build-partition/web-cache"
end
task :symlinks, :roles => [:worker] do
run "ln -nfFs #{shared_path}/build-partition #{current_path}/tmp/build-partition"
end
task :create_kochiku_worker_yaml, :roles => [:worker] do
worker_config = <<-EOF
build_master: macbuild-master.sfo.squareup.com
build_strategy: build_all
redis_host: macbuild-master.sfo.squareup.com
EOF
put worker_config, "#{current_path}/config/kochiku-worker.yml"
end
task :cleanup_zombies, :roles => [:worker] do
run "ps -eo 'pid ppid comm' |grep -i resque |grep Paused | awk '$2 == 1 { print $1 }' | xargs kill"
end
end
|
module ChooChoo
# QUESTION: Do we need a sub-event or should they be baked together somehow?
module ChildNode
extend ActiveSupport::Concern
# NOTE: This class is still very unfinished
included do
cattr_reader :my_parent
after_create :on_create
after_update :on_update
before_destroy :on_destroy
def self.set_parent(parent)
@@my_parent = parent
end
end
# does not belong to activity
private
def on_create
self.send(@@my_parent).activity.event_happened('created', self)
end
def on_update
self.send(@@my_parent).activity.event_happened('updated', self)
end
end
end
on_destroy activity for child node
module ChooChoo
# QUESTION: Do we need a sub-event or should they be baked together somehow?
module ChildNode
extend ActiveSupport::Concern
# NOTE: This class is still very unfinished
included do
cattr_reader :my_parent
after_create :on_create
after_update :on_update
before_destroy :on_destroy
def self.set_parent(parent)
@@my_parent = parent
end
end
# does not belong to activity
private
def on_create
self.send(@@my_parent).activity.event_happened('created', self)
end
def on_update
self.send(@@my_parent).activity.event_happened('updated', self)
end
def on_destroy
self.send(@@my_parent).activity.event_happened('deleted', self)
end
end
end
|
class Legit < Formula
include Language::Python::Virtualenv
desc "Command-line interface for Git, optimized for workflow simplicity"
homepage "http://www.git-legit.org/"
url "https://github.com/kennethreitz/legit/archive/v0.2.1.tar.gz"
sha256 "3b30e47262f3a727cc7aeb7e4842d82e9e2f9cc29145a361c097d7cc372a9a66"
head "https://github.com/kennethreitz/legit.git", :branch => "develop"
bottle do
cellar :any_skip_relocation
sha256 "9c6bbed14527c371e67b24258acb60429aa0182af1369012fce3a44f472970f7" => :sierra
sha256 "f4f9a9bf8fa183a111980dc8601b71369e586c461434a034d975fa408802a055" => :el_capitan
sha256 "a27c8ca66c3f479f40e69a5c4fbdb32198421145ceccb0fe87d800982613f409" => :yosemite
end
depends_on :python if MacOS.version <= :snow_leopard
resource "args" do
url "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz"
sha256 "a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
end
resource "clint" do
url "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz"
sha256 "05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/e3/95/7e5d7261feb46c0539ac5e451be340ddd64d78c5118f2d893b052c76fe8c/gitdb-0.6.4.tar.gz"
sha256 "a3ebbc27be035a2e874ed904df516e35f4a29a778a764385de09de9e0f139658"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/cb/a0/9b063d09bbc847b98df115571041287d7e38ff1b45ed1c91534d15057cf6/GitPython-2.0.8.tar.gz"
sha256 "7c03d1130f903aafba6ae5b89ccf8eb433a995cd3120cbb781370e53fc4eb222"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/bc/aa/b744b3761fff1b10579df996a2d2e87f124ae07b8336e37edc89cc502f86/smmap-0.9.0.tar.gz"
sha256 "0e2b62b497bd5f0afebc002eda4d90df9d209c30ef257e8673c90a6b5c119d62"
end
def install
virtualenv_install_with_resources
bash_completion.install "extra/bash-completion/legit"
zsh_completion.install "extra/zsh-completion/_legit"
man1.install "extra/man/legit.1"
end
test do
(testpath/".gitconfig").write <<-EOS.undent
[user]
name = Real Person
email = notacat@hotmail.cat
EOS
system "git", "init"
touch "foo"
system "git", "add", "foo"
system "git", "commit", "-m", "init"
system "git", "remote", "add", "origin", "https://github.com/git/git.git"
system "#{bin}/legit", "sprout", "test"
assert_match(/test/, shell_output("#{bin}/legit branches"))
end
end
legit: use https for homepage (#11008)
class Legit < Formula
include Language::Python::Virtualenv
desc "Command-line interface for Git, optimized for workflow simplicity"
homepage "https://www.git-legit.org/"
url "https://github.com/kennethreitz/legit/archive/v0.2.1.tar.gz"
sha256 "3b30e47262f3a727cc7aeb7e4842d82e9e2f9cc29145a361c097d7cc372a9a66"
head "https://github.com/kennethreitz/legit.git", :branch => "develop"
bottle do
cellar :any_skip_relocation
sha256 "9c6bbed14527c371e67b24258acb60429aa0182af1369012fce3a44f472970f7" => :sierra
sha256 "f4f9a9bf8fa183a111980dc8601b71369e586c461434a034d975fa408802a055" => :el_capitan
sha256 "a27c8ca66c3f479f40e69a5c4fbdb32198421145ceccb0fe87d800982613f409" => :yosemite
end
depends_on :python if MacOS.version <= :snow_leopard
resource "args" do
url "https://files.pythonhosted.org/packages/e5/1c/b701b3f4bd8d3667df8342f311b3efaeab86078a840fb826bd204118cc6b/args-0.1.0.tar.gz"
sha256 "a785b8d837625e9b61c39108532d95b85274acd679693b71ebb5156848fcf814"
end
resource "clint" do
url "https://files.pythonhosted.org/packages/3d/b4/41ecb1516f1ba728f39ee7062b9dac1352d39823f513bb6f9e8aeb86e26d/clint-0.5.1.tar.gz"
sha256 "05224c32b1075563d0b16d0015faaf9da43aa214e4a2140e51f08789e7a4c5aa"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/e3/95/7e5d7261feb46c0539ac5e451be340ddd64d78c5118f2d893b052c76fe8c/gitdb-0.6.4.tar.gz"
sha256 "a3ebbc27be035a2e874ed904df516e35f4a29a778a764385de09de9e0f139658"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/cb/a0/9b063d09bbc847b98df115571041287d7e38ff1b45ed1c91534d15057cf6/GitPython-2.0.8.tar.gz"
sha256 "7c03d1130f903aafba6ae5b89ccf8eb433a995cd3120cbb781370e53fc4eb222"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/bc/aa/b744b3761fff1b10579df996a2d2e87f124ae07b8336e37edc89cc502f86/smmap-0.9.0.tar.gz"
sha256 "0e2b62b497bd5f0afebc002eda4d90df9d209c30ef257e8673c90a6b5c119d62"
end
def install
virtualenv_install_with_resources
bash_completion.install "extra/bash-completion/legit"
zsh_completion.install "extra/zsh-completion/_legit"
man1.install "extra/man/legit.1"
end
test do
(testpath/".gitconfig").write <<-EOS.undent
[user]
name = Real Person
email = notacat@hotmail.cat
EOS
system "git", "init"
touch "foo"
system "git", "add", "foo"
system "git", "commit", "-m", "init"
system "git", "remote", "add", "origin", "https://github.com/git/git.git"
system "#{bin}/legit", "sprout", "test"
assert_match(/test/, shell_output("#{bin}/legit branches"))
end
end
|
require "bundler/capistrano"
server "murd.ch", :web, :app, :db, primary: true
set :application, "home_page"
set :user, "deployer"
set :deploy_to, "/home/#{user}/apps/#{application}"
set :deploy_via, :remote_cache
set :use_sudo, false
set :scm, "git"
set :repository, "git@github.com:gawlista/#{application}.git"
set :branch, "master"
set :rake, "#{rake} --trace"
set :bundle_flags, '--deployment'
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
after "deploy", "deploy:cleanup" # keep only the last 5 releases
namespace :deploy do
%w[start stop restart].each do |command|
desc "#{command} unicorn server"
task command, roles: :app, except: {no_release: true} do
run "/etc/init.d/unicorn_#{application} #{command}"
end
end
task :setup_config, roles: :app do
sudo "ln -nfs #{current_path}/config/nginx.conf /etc/nginx/sites-enabled/#{application}"
sudo "ln -nfs #{current_path}/config/unicorn_init.sh /etc/init.d/unicorn_#{application}"
run "mkdir -p #{shared_path}/config"
put File.read("config/application.yml"), "#{shared_path}/config/application.yml"
put File.read("config/database.yml"), "#{shared_path}/config/database.yml"
end
after "deploy:setup", "deploy:setup_config"
task :symlink_config, roles: :app do
run "ln -nfs #{shared_path}/config/application.yml #{release_path}/config/application.yml"
run "ln -nfs #{shared_path}/config/database.yml #{release_path}/config/database.yml"
end
after "deploy:finalize_update", "deploy:symlink_config"
desc "Make sure local git is in sync with remote."
task :check_revision, roles: :web do
unless `git rev-parse HEAD` == `git rev-parse origin/master`
puts "WARNING: HEAD is not the same as origin/master"
puts "Run `git push` to sync changes."
exit
end
end
before "deploy", "deploy:check_revision"
end
require './config/boot'
require 'airbrake/capistrano'
Removes Airbrake code from deploy.rb.
require "bundler/capistrano"
server "murd.ch", :web, :app, :db, primary: true
set :application, "home_page"
set :user, "deployer"
set :deploy_to, "/home/#{user}/apps/#{application}"
set :deploy_via, :remote_cache
set :use_sudo, false
set :scm, "git"
set :repository, "git@github.com:gawlista/#{application}.git"
set :branch, "master"
set :rake, "#{rake} --trace"
set :bundle_flags, '--deployment'
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
after "deploy", "deploy:cleanup" # keep only the last 5 releases
namespace :deploy do
%w[start stop restart].each do |command|
desc "#{command} unicorn server"
task command, roles: :app, except: {no_release: true} do
run "/etc/init.d/unicorn_#{application} #{command}"
end
end
task :setup_config, roles: :app do
sudo "ln -nfs #{current_path}/config/nginx.conf /etc/nginx/sites-enabled/#{application}"
sudo "ln -nfs #{current_path}/config/unicorn_init.sh /etc/init.d/unicorn_#{application}"
run "mkdir -p #{shared_path}/config"
put File.read("config/application.yml"), "#{shared_path}/config/application.yml"
put File.read("config/database.yml"), "#{shared_path}/config/database.yml"
end
after "deploy:setup", "deploy:setup_config"
task :symlink_config, roles: :app do
run "ln -nfs #{shared_path}/config/application.yml #{release_path}/config/application.yml"
run "ln -nfs #{shared_path}/config/database.yml #{release_path}/config/database.yml"
end
after "deploy:finalize_update", "deploy:symlink_config"
desc "Make sure local git is in sync with remote."
task :check_revision, roles: :web do
unless `git rev-parse HEAD` == `git rev-parse origin/master`
puts "WARNING: HEAD is not the same as origin/master"
puts "Run `git push` to sync changes."
exit
end
end
before "deploy", "deploy:check_revision"
end
require './config/boot' |
module Ci
class BuildTraceChunk < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
after_destroy :redis_delete_data, if: :redis?
default_value_for :data_store, :redis
WriteError = Class.new(StandardError)
CHUNK_SIZE = 128.kilobytes
CHUNK_REDIS_TTL = 1.week
LOCK_RETRY = 100
LOCK_SLEEP = 1
LOCK_TTL = 5.minutes
enum data_store: {
redis: 1,
db: 2
}
def data
if redis?
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
save! if changed?
end
schedule_to_db if fullfilled?
end
def truncate(offset = 0)
self.append("", offset)
end
def append(new_data, offset)
current_data = self.data.to_s
raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0
raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize
self.set_data(current_data.byteslice(0, offset) + new_data)
end
def size
data&.bytesize.to_i
end
def start_offset
chunk_index * CHUNK_SIZE
end
def end_offset
start_offset + size
end
def range
(start_offset...end_offset)
end
def use_database!
in_lock do
break if db?
break unless size > 0
self.update!(raw_data: data, data_store: :db)
redis_delete_data
end
end
private
def schedule_to_db
return if db?
BuildTraceSwapChunkWorker.perform_async(id)
end
def fullfilled?
size == CHUNK_SIZE
end
def redis_data
Gitlab::Redis::SharedState.with do |redis|
redis.get(redis_data_key)
end
end
def redis_set_data(data)
Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL)
end
end
def redis_delete_data
Gitlab::Redis::SharedState.with do |redis|
redis.del(redis_data_key)
end
end
def redis_data_key
"gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data"
end
def redis_lock_key
"gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:lock"
end
def in_lock
lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: LOCK_TTL)
retry_count = 0
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit between retries.
sleep(LOCK_SLEEP)
break if LOCK_RETRY < (retry_count += 1)
end
raise WriteError, 'Failed to obtain write lock' unless uuid
self.reload if self.persisted?
return yield
ensure
Gitlab::ExclusiveLease.cancel(redis_lock_key, uuid)
end
end
end
Rename ExclusiveLease for trace write locking
module Ci
class BuildTraceChunk < ActiveRecord::Base
extend Gitlab::Ci::Model
belongs_to :build, class_name: "Ci::Build", foreign_key: :build_id
after_destroy :redis_delete_data, if: :redis?
default_value_for :data_store, :redis
WriteError = Class.new(StandardError)
CHUNK_SIZE = 128.kilobytes
CHUNK_REDIS_TTL = 1.week
WRITE_LOCK_RETRY = 100
WRITE_LOCK_SLEEP = 1
WRITE_LOCK_TTL = 5.minutes
enum data_store: {
redis: 1,
db: 2
}
def data
if redis?
redis_data
elsif db?
raw_data
else
raise 'Unsupported data store'
end&.force_encoding(Encoding::BINARY) # Redis/Database return UTF-8 string as default
end
def set_data(value)
raise ArgumentError, 'too much data' if value.bytesize > CHUNK_SIZE
in_lock do
if redis?
redis_set_data(value)
elsif db?
self.raw_data = value
else
raise 'Unsupported data store'
end
save! if changed?
end
schedule_to_db if fullfilled?
end
def truncate(offset = 0)
self.append("", offset)
end
def append(new_data, offset)
current_data = self.data.to_s
raise ArgumentError, 'Offset is out of bound' if offset > current_data.bytesize || offset < 0
raise ArgumentError, 'Outside of chunk size' if CHUNK_SIZE < offset + new_data.bytesize
self.set_data(current_data.byteslice(0, offset) + new_data)
end
def size
data&.bytesize.to_i
end
def start_offset
chunk_index * CHUNK_SIZE
end
def end_offset
start_offset + size
end
def range
(start_offset...end_offset)
end
def use_database!
in_lock do
break if db?
break unless size > 0
self.update!(raw_data: data, data_store: :db)
redis_delete_data
end
end
private
def schedule_to_db
return if db?
BuildTraceSwapChunkWorker.perform_async(id)
end
def fullfilled?
size == CHUNK_SIZE
end
def redis_data
Gitlab::Redis::SharedState.with do |redis|
redis.get(redis_data_key)
end
end
def redis_set_data(data)
Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_data_key, data, ex: CHUNK_REDIS_TTL)
end
end
def redis_delete_data
Gitlab::Redis::SharedState.with do |redis|
redis.del(redis_data_key)
end
end
def redis_data_key
"gitlab:ci:trace:#{build_id}:chunks:#{chunk_index}:data"
end
def redis_lock_key
"trace_write:#{build_id}:chunks:#{chunk_index}"
end
def in_lock
lease = Gitlab::ExclusiveLease.new(redis_lock_key, timeout: WRITE_LOCK_TTL)
retry_count = 0
until uuid = lease.try_obtain
# Keep trying until we obtain the lease. To prevent hammering Redis too
# much we'll wait for a bit between retries.
sleep(WRITE_LOCK_SLEEP)
break if WRITE_LOCK_RETRY < (retry_count += 1)
end
raise WriteError, 'Failed to obtain write lock' unless uuid
self.reload if self.persisted?
return yield
ensure
Gitlab::ExclusiveLease.cancel(redis_lock_key, uuid)
end
end
end
|
require "language/node"
class Lerna < Formula
desc "Tool for managing JavaScript projects with multiple packages"
homepage "https://lerna.js.org"
url "https://registry.npmjs.org/lerna/-/lerna-4.0.0.tgz"
sha256 "64330ffdb7b7d879e40ca2520028958b9d6daff34a32547ced138b5896633bd4"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "c83c1729e22b25ee574dd777aeafa6609354b28677a84edb4871c6316ec695a8"
sha256 cellar: :any_skip_relocation, big_sur: "0284f238fa3b15213745e9b41112bba211478c88c6912a8cc5cdaddae626f5ea"
sha256 cellar: :any_skip_relocation, catalina: "5f2b51b458e4379c8f4bf192eb532e567fa2a209eff59fc78aa4a73a2c95c9a4"
sha256 cellar: :any_skip_relocation, mojave: "363088564849de9b6c79ac5cdbbb872ca43841b0d80af27c85c696cbd2dc75bb"
sha256 cellar: :any_skip_relocation, high_sierra: "833823b45ebd250a74b170f980861ae9cc6831040e5362309e637d13291a97af"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
assert_match version.to_s, shell_output("#{bin}/lerna --version")
output = shell_output("#{bin}/lerna init --independent 2>&1")
assert_match "lerna success Initialized Lerna files", output
end
end
lerna: update 4.0.0 bottle.
require "language/node"
class Lerna < Formula
desc "Tool for managing JavaScript projects with multiple packages"
homepage "https://lerna.js.org"
url "https://registry.npmjs.org/lerna/-/lerna-4.0.0.tgz"
sha256 "64330ffdb7b7d879e40ca2520028958b9d6daff34a32547ced138b5896633bd4"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "0e7264f3be53a6d765ed2578342a1e7909d39a99fb28ba431928fdfec6cd2955"
sha256 cellar: :any_skip_relocation, big_sur: "53885b6ebfeae441afa66d521667427dea0c4188f4ffb1cfe64e29639987d4a9"
sha256 cellar: :any_skip_relocation, catalina: "67e710cabaa7060f1ef2cf6cbafc8734daa93c4c21889a381b7551218e205b34"
sha256 cellar: :any_skip_relocation, mojave: "e32988735ca0475d93000c7a2d163289da595263c1d6d416321f9f84f1ce5f3b"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
assert_match version.to_s, shell_output("#{bin}/lerna --version")
output = shell_output("#{bin}/lerna init --independent 2>&1")
assert_match "lerna success Initialized Lerna files", output
end
end
|
require 'capistrano/ext/multistage'
require 'capistrano_colors'
require 'capistrano-helpers/specs'
require 'capistrano-helpers/version'
require 'bundler/capistrano'
require 'open-uri'
require 'rest_client'
require 'new_relic/recipes'
default_run_options[:pty] = true
set :stages, Dir['config/deploy/*.rb'].map { |f| File.basename(f, '.rb') }
set :default_stage, "development"
set :bundle_without, [:development, :test, :automation, :assets]
# set :bundle_cmd, "jruby -S bundle"
# set :bundle_dir, fetch(:shared_path)+"/bundle"
# set :bundle_flags, "--deployment --quiet"
set :application, "adaptor-vmware"
set :user, "deploy"
set :group, "deploy"
set :ssh_options, { forward_agent: true }
set :scm, "git"
set :use_sudo, true
set :repository, "git@github.com:6fusion/#{application}.git"
set :branch, ENV['TAG'] || ENV['BRANCH'] || `git branch --no-color 2> /dev/null`.chomp.split("\n").grep(/^[*]/).first[/(\S+)$/, 1]
set :deploy_to, "/var/6fusion/#{application}"
set :deploy_via, :remote_cache
set :deploy_env, lambda { fetch(:stage) }
set :rails_env, lambda { fetch(:stage) }
set :keep_releases, 2
set :tail_logs_location, "#{shared_path}/log/#{application}.log"
set :context_path, ""
set :hipchat_alert, true
set :use_default_branch, ENV['USE_DEFAULT_BRANCH'] || false
set :password, ENV['PASSWORD'] if ENV['PASSWORD']
# Adaptor-VMware Specifics
set :ssh_port, 22
set :copy_exclude do
%w{Capfile Vagrantfile README.* spec config/deploy.rb
config/deploy .rvmrc .rspec data .git .gitignore **/test.* .yardopts} +
(stages - [deploy_env]).map { |e| "**/#{e}.*" }
end
# Additional Deployment Actions
before "verify:rules", "build:get_tag"
before "deploy", "verify:rules"
after "deploy:cleanup", "alert:hipchat"
after "deploy:cleanup", "newrelic:notice_deployment"
after("deploy") do
# Setup data directory
run "#{sudo} mkdir -p #{shared_path}/data"
run "#{sudo} chmod 0755 #{shared_path}/data"
run "#{sudo} chown -R torquebox:torquebox #{shared_path}/data"
# Symlink data directory to the current path
run "#{sudo} ln -sfn #{shared_path}/data #{current_path}/data"
run "#{sudo} chmod 0755 #{current_path}/data"
run "#{sudo} chown -R torquebox:torquebox #{current_path}/data"
# Setup logs
run "#{sudo} touch #{tail_logs_location}"
run "#{sudo} chmod 0666 #{tail_logs_location}"
run "#{sudo} chown -R torquebox:torquebox #{tail_logs_location}"
# Setup dead letters directory
run "#{sudo} mkdir -p #{shared_path}/dead_letters"
run "#{sudo} chmod 0755 #{shared_path}/dead_letters"
run "#{sudo} chown -R torquebox:torquebox #{shared_path}/dead_letters"
# Setup the tmp directory
run "#{sudo} mkdir -p #{current_path}/tmp"
run "#{sudo} chmod 0755 #{current_path}/tmp"
run "#{sudo} chown -R torquebox:torquebox #{current_path}/tmp"
# compile any java resources
run "cd #{current_path} && #{sudo} rake"
# Deploy the application
run "#{sudo} torquebox deploy #{current_path} --name #{application} --env #{deploy_env} --context-path=#{context_path}"
# Setup New Relic
run "if [ -f #{shared_path}/newrelic.yml ]; then #{sudo} ln -sfn #{shared_path}/newrelic.yml #{current_path}/config; fi"
deploy.cleanup
end
before("deploy:restart") do
run "#{sudo} touch #{shared_path}/inodes.yml"
run "#{sudo} chown torquebox:torquebox -R #{shared_path}/inodes.yml"
end
after("deploy:rollback") do
run "#{sudo} torquebox undeploy #{current_path} --name #{application}"
end
namespace :verify do
task :rules, roles: :app do
next if stage == :development
if tag == "master"
puts "Skipping verification since you are deploying master."
next
end
deployed_branch = capture("#{sudo} cat #{deploy_to}/current/VERSION || true").split("\r\n").last
next if deployed_branch.nil? || deployed_branch.empty? || deployed_branch.include?('No such file or directory')
puts "'#{deployed_branch}' branch is currently deployed to #{rails_env}."
if deployed_branch == tag
puts "Skipping verification since you are deploying the same branch."
next
end
if deployed_branch == "master"
puts "Skipping verification since master is currently deployed."
next
end
puts "Updating local commit logs to check the status of the found commit."
`git fetch origin`
puts "Looking at master branch to determine if commit exists."
branches = `git branch -r --contains #{deployed_branch}`.split(/\r\n|\n/).map { |branch| branch.strip! }
unless branches.include?('origin/master') || branches.include?("origin/#{tag}")
action_requested = Capistrano::CLI.ui.ask "If you continue deploying this branch you will be overwriting someone else's work. Would you like to [c]ontinue, [s]top, or [r]eset the environment back to master? [stop]: "
case action_requested.to_s
when "c"
puts "Overriding default rules and deploying your branch, you evil evil coder. You were warned!"
next
when "r"
puts "Reseting the environment to master."
set :tag, "master"
else
puts "Aborting deploy..."
abort = true
end
end
abort "Since #{deployed_branch} is currently deployed to #{rails_env}. Please either merge #{deployed_branch} to master OR re-deploy either #{deployed_branch} or master branch to this environment." unless branches.include?('origin/master') || branches.include?("origin/#{tag}") if abort
puts "All rules have passed, continuing with deployment."
end
end
namespace :build do
task :get_tag, roles: :builder do
default_tag = `git branch --no-color 2> /dev/null`.chomp.split("\n").grep(/^[*]/).first[/(\S+)$/, 1]
unless use_default_branch
branch_tag = Capistrano::CLI.ui.ask "Branch/Tag to deploy (make sure to push the branch/tag to origin first) [#{default_tag}]: "
end
branch_tag = default_tag if branch_tag.to_s == ''
set :tag, branch_tag
end
end
namespace :logs do
desc "tail log files"
task :tail, roles: :app do
run "tail -f #{tail_logs_location}" do |channel, stream, data|
data.split("\n").each do |line|
puts "[#{channel[:host]}] #{line}"
end
break if stream == :err
end
puts
end
desc 'truncate logs'
task :truncate, roles: :app do
run "#{sudo} truncate -s 0 /var/log/torquebox/torquebox.log"
run "#{sudo} truncate -s 0 #{tail_logs_location}"
run "#{sudo} rm -f /opt/torquebox/jboss/standalone/log/**/*.log"
run "#{sudo} rm -f /opt/torquebox/jboss/standalone/log/*.{log,log.*}"
end
alias_task :default, :tail
end
desc "run chef-client"
task :chef_run, roles: :app do
run "#{sudo} chef-client"
end
namespace :torquebox do
desc 'start'
task :start, roles: :app do
run "#{sudo} start torquebox"
end
desc 'stop'
task :stop, roles: :app do
run "#{sudo} stop torquebox"
end
desc 'restart'
task :restart, roles: :app do
run "#{sudo} restart torquebox"
end
desc 'deploy application'
task :deploy, roles: :app do
run "#{sudo} torquebox deploy #{current_path} --name #{application} --env #{deploy_env}"
sleep 2
run "#{sudo} test ! -f /opt/torquebox/jboss/standalone/deployments/#{application}-knob.yml.failed"
end
desc 'undeploy application'
task :undeploy, roles: :app do
run "#{sudo} torquebox undeploy #{current_path} --name #{application}"
end
desc 'undeploy then deploy application'
task :redeploy, roles: :app do
torquebox.undeploy
torquebox.deploy
end
end
namespace :alert do
desc 'Alert Hipchat development room of successful deploy'
task :hipchat, roles: :app do
if hipchat_alert
hipchat_token = "06e70aeee31facbcbedafa466f5a90"
hipchat_url = URI.escape("https://api.hipchat.com/v1/rooms/message?format=json&auth_token=#{hipchat_token}")
message = "@#{ENV['USER']} deployed #{branch} of #{application} to #{stage}"
RestClient.post(hipchat_url, { room_id: "59147", from: "DeployBot", color: "green", message_format: "text", message: message })
end
end
end
namespace :iptables do
desc 'start'
task :start do
run "#{sudo} /etc/init.d/iptables start"
end
desc 'stop'
task :stop do
run "#{sudo} /etc/init.d/iptables stop"
end
desc 'restart'
task :restart do
run "#{sudo} /etc/init.d/iptables restart"
end
end
# SSH configuration
task :configure, roles: :app do
system "ssh configure@#{find_servers_for_task(self).first} -p #{ssh_port}"
end
Remove the need to compile java on deploy
require 'capistrano/ext/multistage'
require 'capistrano_colors'
require 'capistrano-helpers/specs'
require 'capistrano-helpers/version'
require 'bundler/capistrano'
require 'open-uri'
require 'rest_client'
require 'new_relic/recipes'
default_run_options[:pty] = true
set :stages, Dir['config/deploy/*.rb'].map { |f| File.basename(f, '.rb') }
set :default_stage, "development"
set :bundle_without, [:development, :test, :automation, :assets]
# set :bundle_cmd, "jruby -S bundle"
# set :bundle_dir, fetch(:shared_path)+"/bundle"
# set :bundle_flags, "--deployment --quiet"
set :application, "adaptor-vmware"
set :user, "deploy"
set :group, "deploy"
set :ssh_options, { forward_agent: true }
set :scm, "git"
set :use_sudo, true
set :repository, "git@github.com:6fusion/#{application}.git"
set :branch, ENV['TAG'] || ENV['BRANCH'] || `git branch --no-color 2> /dev/null`.chomp.split("\n").grep(/^[*]/).first[/(\S+)$/, 1]
set :deploy_to, "/var/6fusion/#{application}"
set :deploy_via, :remote_cache
set :deploy_env, lambda { fetch(:stage) }
set :rails_env, lambda { fetch(:stage) }
set :keep_releases, 2
set :tail_logs_location, "#{shared_path}/log/#{application}.log"
set :context_path, ""
set :hipchat_alert, true
set :use_default_branch, ENV['USE_DEFAULT_BRANCH'] || false
set :password, ENV['PASSWORD'] if ENV['PASSWORD']
# Adaptor-VMware Specifics
set :ssh_port, 22
set :copy_exclude do
%w{Capfile Vagrantfile README.* spec config/deploy.rb
config/deploy .rvmrc .rspec data .git .gitignore **/test.* .yardopts} +
(stages - [deploy_env]).map { |e| "**/#{e}.*" }
end
# Additional Deployment Actions
before "verify:rules", "build:get_tag"
before "deploy", "verify:rules"
after "deploy:cleanup", "alert:hipchat"
after "deploy:cleanup", "newrelic:notice_deployment"
after("deploy") do
# Setup data directory
run "#{sudo} mkdir -p #{shared_path}/data"
run "#{sudo} chmod 0755 #{shared_path}/data"
run "#{sudo} chown -R torquebox:torquebox #{shared_path}/data"
# Symlink data directory to the current path
run "#{sudo} ln -sfn #{shared_path}/data #{current_path}/data"
run "#{sudo} chmod 0755 #{current_path}/data"
run "#{sudo} chown -R torquebox:torquebox #{current_path}/data"
# Setup logs
run "#{sudo} touch #{tail_logs_location}"
run "#{sudo} chmod 0666 #{tail_logs_location}"
run "#{sudo} chown -R torquebox:torquebox #{tail_logs_location}"
# Setup dead letters directory
run "#{sudo} mkdir -p #{shared_path}/dead_letters"
run "#{sudo} chmod 0755 #{shared_path}/dead_letters"
run "#{sudo} chown -R torquebox:torquebox #{shared_path}/dead_letters"
# Setup the tmp directory
run "#{sudo} mkdir -p #{current_path}/tmp"
run "#{sudo} chmod 0755 #{current_path}/tmp"
run "#{sudo} chown -R torquebox:torquebox #{current_path}/tmp"
# Deploy the application
run "#{sudo} torquebox deploy #{current_path} --name #{application} --env #{deploy_env} --context-path=#{context_path}"
# Setup New Relic
run "if [ -f #{shared_path}/newrelic.yml ]; then #{sudo} ln -sfn #{shared_path}/newrelic.yml #{current_path}/config; fi"
deploy.cleanup
end
before("deploy:restart") do
run "#{sudo} touch #{shared_path}/inodes.yml"
run "#{sudo} chown torquebox:torquebox -R #{shared_path}/inodes.yml"
end
after("deploy:rollback") do
run "#{sudo} torquebox undeploy #{current_path} --name #{application}"
end
namespace :verify do
task :rules, roles: :app do
next if stage == :development
if tag == "master"
puts "Skipping verification since you are deploying master."
next
end
deployed_branch = capture("#{sudo} cat #{deploy_to}/current/VERSION || true").split("\r\n").last
next if deployed_branch.nil? || deployed_branch.empty? || deployed_branch.include?('No such file or directory')
puts "'#{deployed_branch}' branch is currently deployed to #{rails_env}."
if deployed_branch == tag
puts "Skipping verification since you are deploying the same branch."
next
end
if deployed_branch == "master"
puts "Skipping verification since master is currently deployed."
next
end
puts "Updating local commit logs to check the status of the found commit."
`git fetch origin`
puts "Looking at master branch to determine if commit exists."
branches = `git branch -r --contains #{deployed_branch}`.split(/\r\n|\n/).map { |branch| branch.strip! }
unless branches.include?('origin/master') || branches.include?("origin/#{tag}")
action_requested = Capistrano::CLI.ui.ask "If you continue deploying this branch you will be overwriting someone else's work. Would you like to [c]ontinue, [s]top, or [r]eset the environment back to master? [stop]: "
case action_requested.to_s
when "c"
puts "Overriding default rules and deploying your branch, you evil evil coder. You were warned!"
next
when "r"
puts "Reseting the environment to master."
set :tag, "master"
else
puts "Aborting deploy..."
abort = true
end
end
abort "Since #{deployed_branch} is currently deployed to #{rails_env}. Please either merge #{deployed_branch} to master OR re-deploy either #{deployed_branch} or master branch to this environment." unless branches.include?('origin/master') || branches.include?("origin/#{tag}") if abort
puts "All rules have passed, continuing with deployment."
end
end
namespace :build do
task :get_tag, roles: :builder do
default_tag = `git branch --no-color 2> /dev/null`.chomp.split("\n").grep(/^[*]/).first[/(\S+)$/, 1]
unless use_default_branch
branch_tag = Capistrano::CLI.ui.ask "Branch/Tag to deploy (make sure to push the branch/tag to origin first) [#{default_tag}]: "
end
branch_tag = default_tag if branch_tag.to_s == ''
set :tag, branch_tag
end
end
namespace :logs do
desc "tail log files"
task :tail, roles: :app do
run "tail -f #{tail_logs_location}" do |channel, stream, data|
data.split("\n").each do |line|
puts "[#{channel[:host]}] #{line}"
end
break if stream == :err
end
puts
end
desc 'truncate logs'
task :truncate, roles: :app do
run "#{sudo} truncate -s 0 /var/log/torquebox/torquebox.log"
run "#{sudo} truncate -s 0 #{tail_logs_location}"
run "#{sudo} rm -f /opt/torquebox/jboss/standalone/log/**/*.log"
run "#{sudo} rm -f /opt/torquebox/jboss/standalone/log/*.{log,log.*}"
end
alias_task :default, :tail
end
desc "run chef-client"
task :chef_run, roles: :app do
run "#{sudo} chef-client"
end
namespace :torquebox do
desc 'start'
task :start, roles: :app do
run "#{sudo} start torquebox"
end
desc 'stop'
task :stop, roles: :app do
run "#{sudo} stop torquebox"
end
desc 'restart'
task :restart, roles: :app do
run "#{sudo} restart torquebox"
end
desc 'deploy application'
task :deploy, roles: :app do
run "#{sudo} torquebox deploy #{current_path} --name #{application} --env #{deploy_env}"
sleep 2
run "#{sudo} test ! -f /opt/torquebox/jboss/standalone/deployments/#{application}-knob.yml.failed"
end
desc 'undeploy application'
task :undeploy, roles: :app do
run "#{sudo} torquebox undeploy #{current_path} --name #{application}"
end
desc 'undeploy then deploy application'
task :redeploy, roles: :app do
torquebox.undeploy
torquebox.deploy
end
end
namespace :alert do
desc 'Alert Hipchat development room of successful deploy'
task :hipchat, roles: :app do
if hipchat_alert
hipchat_token = "06e70aeee31facbcbedafa466f5a90"
hipchat_url = URI.escape("https://api.hipchat.com/v1/rooms/message?format=json&auth_token=#{hipchat_token}")
message = "@#{ENV['USER']} deployed #{branch} of #{application} to #{stage}"
RestClient.post(hipchat_url, { room_id: "59147", from: "DeployBot", color: "green", message_format: "text", message: message })
end
end
end
namespace :iptables do
desc 'start'
task :start do
run "#{sudo} /etc/init.d/iptables start"
end
desc 'stop'
task :stop do
run "#{sudo} /etc/init.d/iptables stop"
end
desc 'restart'
task :restart do
run "#{sudo} /etc/init.d/iptables restart"
end
end
# SSH configuration
task :configure, roles: :app do
system "ssh configure@#{find_servers_for_task(self).first} -p #{ssh_port}"
end
|
module Concerns
module Event
module Dates
extend ActiveSupport::Concern
included do
before_save :set_end_date
end
module ClassMethods
# Return list of every year that has at least one event
def find_all_years
years = [ ::RacingAssociation.current.effective_year ] +
connection.select_values(
"select distinct extract(year from date) from events"
).map(&:to_i)
years = years.uniq.sort
if years.size == 1
years
else
((years.first)..(years.last)).to_a.reverse
end
end
end
def default_date
if parent.present?
parent.date
else
Time.zone.today
end
end
# Format for schedule page primarily
def short_date
return '' unless date
prefix = ' ' if date.month < 10
suffix = ' ' if date.day < 10
"#{prefix}#{date.month}/#{date.day}#{suffix}"
end
def date_range_s(format = :short)
if format == :long
date.strftime('%-m/%-d/%Y')
else
"#{date.month}/#{date.day}"
end
end
def date_range_long_s=(value)
# Ignore
end
def date_range_long_s
date.to_s :long_with_week_day
end
def human_date
if @human_date
@human_date
elsif date
date.to_s(:long_with_week_day)
else
nil
end
end
# Handle 7/25/2013, 7-25-2013, 7/25/13, 7-25-13
def human_date=(value)
@human_date = value.try(:strip)
set_date_from_human_date
end
def set_date_from_human_date
parsed_date = HumanDate::Parser.new.parse(@human_date)
if parsed_date
self.date = parsed_date
else
errors.add :human_date
end
end
# +date+
def start_date
date
end
def start_date=(date)
self.date = date
end
def end_date
set_end_date
self[:end_date]
end
def year
return nil unless date
date.year
end
def multiple_days?
end_date > start_date
end
def set_end_date
if self[:end_date].nil?
self.end_date = date
end
end
# Does nothing. Allows us to treat Events and MultiDayEvents the same.
def update_date
end
end
end
end
Update end_date when Event date changes
module Concerns
module Event
module Dates
extend ActiveSupport::Concern
included do
before_save :set_end_date
end
module ClassMethods
# Return list of every year that has at least one event
def find_all_years
years = [ ::RacingAssociation.current.effective_year ] +
connection.select_values(
"select distinct extract(year from date) from events"
).map(&:to_i)
years = years.uniq.sort
if years.size == 1
years
else
((years.first)..(years.last)).to_a.reverse
end
end
end
def default_date
if parent.present?
parent.date
else
Time.zone.today
end
end
# Format for schedule page primarily
def short_date
return '' unless date
prefix = ' ' if date.month < 10
suffix = ' ' if date.day < 10
"#{prefix}#{date.month}/#{date.day}#{suffix}"
end
def date_range_s(format = :short)
if format == :long
date.strftime('%-m/%-d/%Y')
else
"#{date.month}/#{date.day}"
end
end
def date_range_long_s=(value)
# Ignore
end
def date_range_long_s
date.to_s :long_with_week_day
end
def human_date
if @human_date
@human_date
elsif date
date.to_s(:long_with_week_day)
else
nil
end
end
# Handle 7/25/2013, 7-25-2013, 7/25/13, 7-25-13
def human_date=(value)
@human_date = value.try(:strip)
set_date_from_human_date
end
def set_date_from_human_date
parsed_date = HumanDate::Parser.new.parse(@human_date)
if parsed_date
self.date = parsed_date
else
errors.add :human_date
end
end
# +date+
def start_date
date
end
def start_date=(date)
self.date = date
end
def end_date
set_end_date
self[:end_date]
end
def year
return nil unless date
date.year
end
def multiple_days?
end_date > start_date
end
def set_end_date
if self[:end_date].nil? || date != self[:end_date]
self.end_date = date
end
end
# Does nothing. Allows us to treat Events and MultiDayEvents the same.
def update_date
end
end
end
end
|
class Libao < Formula
desc "Cross-platform Audio Library"
homepage "https://www.xiph.org/ao/"
url "https://github.com/xiph/libao/archive/1.2.2.tar.gz"
sha256 "df8a6d0e238feeccb26a783e778716fb41a801536fe7b6fce068e313c0e2bf4d"
head "https://gitlab.xiph.org/xiph/libao.git"
bottle do
rebuild 2
sha256 "703bfcae17a364ad0e526d5556b3583d1864c6db4c52ba85ef64dc0600039372" => :catalina
sha256 "932b3a41565e678489471dae66b29fea1ca2de6013c2559f9c34cc5e9bd5a33f" => :mojave
sha256 "d7144edd6dc64b987d9a9d584799fe20a76ed92f2b1b18c074a6846926f23169" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
def install
ENV["AUTOMAKE_FLAGS"] = "--include-deps"
system "./autogen.sh"
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--enable-static
]
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <ao/ao.h>
int main() {
ao_initialize();
return 0;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lao", "-o", "test"
system "./test"
end
end
libao: update 1.2.2 bottle.
class Libao < Formula
desc "Cross-platform Audio Library"
homepage "https://www.xiph.org/ao/"
url "https://github.com/xiph/libao/archive/1.2.2.tar.gz"
sha256 "df8a6d0e238feeccb26a783e778716fb41a801536fe7b6fce068e313c0e2bf4d"
head "https://gitlab.xiph.org/xiph/libao.git"
bottle do
rebuild 2
sha256 "703bfcae17a364ad0e526d5556b3583d1864c6db4c52ba85ef64dc0600039372" => :catalina
sha256 "932b3a41565e678489471dae66b29fea1ca2de6013c2559f9c34cc5e9bd5a33f" => :mojave
sha256 "d7144edd6dc64b987d9a9d584799fe20a76ed92f2b1b18c074a6846926f23169" => :high_sierra
sha256 "15cc75f026413a6866aab48694f9f72eaa751247a934a950d3624213f09644f3" => :x86_64_linux
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
def install
ENV["AUTOMAKE_FLAGS"] = "--include-deps"
system "./autogen.sh"
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--enable-static
]
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <ao/ao.h>
int main() {
ao_initialize();
return 0;
}
EOS
system ENV.cc, "test.cpp", "-I#{include}", "-L#{lib}", "-lao", "-o", "test"
system "./test"
end
end
|
# config valid only for current version of Capistrano
lock '3.3.5'
set :application, 'lorry'
# Git repository
set :scm, :git
set :repo_url, 'git@github.com:beheh/clonklorry.git'
# Default branch is :master, can be overridden
set :branch, ENV['REVISION'] || ENV['BRANCH_NAME'] || 'master'
# Logging
set :format, :pretty
set :log_level, :info
# Symlinks shared between deployments
set :linked_files, fetch(:linked_files, []) + %w(config/lorry.yml config/tracking.html)
set :linked_dirs, fetch(:linked_dirs, []) + %w(upload logs)
# Example config files
set :config_example_suffix, '.example'
# Default value for keep_releases is 5
set :keep_releases, 5
# Add composer command
SSHKit.config.command_map[:composer] = "composer"
# Custom composer options
# set :composer_install_flags, '--no-dev --prefer-dist --no-interaction --quit --optimize-autoloader'
# Transfer config files to server
before 'deploy:check:linked_files', 'config:push'
# Setup console
set :lorry_console_path, "app/console"
improve composer logging on deployment
# config valid only for current version of Capistrano
lock '3.3.5'
set :application, 'lorry'
# Git repository
set :scm, :git
set :repo_url, 'git@github.com:beheh/clonklorry.git'
# Default branch is :master, can be overridden
set :branch, ENV['REVISION'] || ENV['BRANCH_NAME'] || 'master'
# Logging
set :format, :pretty
set :log_level, :info
# Symlinks shared between deployments
set :linked_files, fetch(:linked_files, []) + %w(config/lorry.yml config/tracking.html)
set :linked_dirs, fetch(:linked_dirs, []) + %w(upload logs)
# Example config files
set :config_example_suffix, '.example'
# Default value for keep_releases is 5
set :keep_releases, 5
# Add composer command
SSHKit.config.command_map[:composer] = "composer"
# Custom composer options
set :composer_install_flags, '--no-dev --prefer-dist --no-interaction --optimize-autoloader'
# Transfer config files to server
before 'deploy:check:linked_files', 'config:push'
# Setup console
set :lorry_console_path, "app/console" |
require 'faraday'
require 'faraday_middleware'
require 'net/http'
require 'excon'
require 'uri'
module Networkable
extend ActiveSupport::Concern
included do
def get_result(url, options={})
options[:content_type] ||= 'json'
options[:headers] ||= {}
options[:headers] = set_request_headers(url, options)
conn = faraday_conn(options[:content_type], options)
conn.options[:timeout] = options[:timeout] || DEFAULT_TIMEOUT
if options[:data]
response = conn.post url, {}, options[:headers] do |request|
request.body = options[:data]
end
else
response = conn.get url, {}, options[:headers]
end
# set number of available API calls for agents
if options[:agent_id].present?
agent = Agent.where(id: options[:agent_id]).first
agent.update_attributes(rate_limit_remaining: get_rate_limit_remaining(response.headers),
rate_limit_reset: get_rate_limit_reset(response.headers),
last_response: Time.zone.now)
end
# parsing by content type is not reliable, so we check the response format
if is_json?(response.body)
JSON.parse(response.body)
elsif is_xml?(response.body)
Hash.from_xml(response.body)
else
response.body
end
rescue *NETWORKABLE_EXCEPTIONS => e
rescue_faraday_error(url, e, options)
end
def set_request_headers(url, options)
options[:headers] ||= {}
options[:headers]['Host'] = URI.parse(url).host
if options[:bearer].present?
options[:headers]['Authorization'] = "Bearer #{options[:bearer]}"
elsif options[:token].present?
options[:headers]["Authorization"] = "Token token=#{options[:token]}"
elsif options[:username].present?
basic = Base64.encode64("#{options[:username]}:#{options[:password].to_s}")
options[:headers]["Authorization"] = "Basic #{basic}"
end
options[:headers]
end
def faraday_conn(content_type = 'json', options = {})
content_types = { "html" => 'text/html; charset=UTF-8',
"xml" => 'application/xml',
"json" => 'application/json' }
accept_header = content_types.fetch(content_type, content_type)
limit = options[:limit] || 10
Faraday.new do |c|
c.headers['Accept'] = accept_header
c.headers['User-Agent'] = "Lagotto - http://#{ENV['SERVERNAME']}"
c.use FaradayMiddleware::FollowRedirects, limit: limit, cookie: :all
c.request :multipart
c.request :json if accept_header == 'application/json'
c.use Faraday::Response::RaiseError
c.adapter Faraday.default_adapter
end
end
def rescue_faraday_error(url, error, options={})
if error.is_a?(Faraday::ResourceNotFound)
not_found_error(url, error, options)
else
details = nil
headers = {}
if error.is_a?(Faraday::Error::TimeoutError)
status = 408
elsif error.respond_to?('status')
status = error[:status]
elsif error.respond_to?('response') && error.response.present?
status = error.response[:status]
details = error.response[:body]
headers = error.response[:headers]
else
status = 400
end
# Some sources use a different status for rate-limiting errors
status = 429 if status == 403 && details.include?("Excessive use detected")
if error.respond_to?('exception')
exception = error.exception
else
exception = ""
end
class_name = class_name_by_status(status) || error.class
level = level_by_status(status)
message = parse_error_response(error.message)
message = "#{message} for #{url}"
message = "#{message}. Rate-limit #{get_rate_limit_limit(headers)} exceeded." if class_name == Net::HTTPTooManyRequests
Notification.where(message: message).where(unresolved: true).first_or_create(
exception: exception,
class_name: class_name.to_s,
details: details,
status: status,
target_url: url,
level: level,
work_id: options[:work_id],
source_id: options[:source_id])
{ error: message, status: status }
end
end
def not_found_error(url, error, options={})
status = 404
# we raise an error if we find a canonical URL mismatch
# or a DOI can't be resolved
if options[:doi_mismatch] || options[:doi_lookup]
work = Work.where(id: options[:work_id]).first
if options[:doi_mismatch]
message = error.response[:message]
else
message = "DOI #{work.doi} could not be resolved"
end
Notification.where(message: message).where(unresolved: true).first_or_create(
exception: error.exception,
class_name: "Net::HTTPNotFound",
details: error.response[:body],
status: status,
work_id: work.id,
target_url: url)
{ error: message, status: status }
else
if error.response.blank? && error.response[:body].blank?
message = "resource not found"
else
message = parse_error_response(error.response[:body])
end
{ error: message, status: status }
end
end
def class_name_by_status(status)
{ 400 => Net::HTTPBadRequest,
401 => Net::HTTPUnauthorized,
403 => Net::HTTPForbidden,
404 => Net::HTTPNotFound,
406 => Net::HTTPNotAcceptable,
408 => Net::HTTPRequestTimeOut,
409 => Net::HTTPConflict,
417 => Net::HTTPExpectationFailed,
429 => Net::HTTPTooManyRequests,
500 => Net::HTTPInternalServerError,
502 => Net::HTTPBadGateway,
503 => Net::HTTPServiceUnavailable,
504 => Net::HTTPGatewayTimeOut }.fetch(status, nil)
end
def level_by_status(status)
case status
# temporary network problems should be WARN not ERROR
when 408, 502, 503, 504 then 2
else 3
end
end
# currently supported by twitter, github, ads and ads_fulltext
# sources with slightly different header names
def get_rate_limit_remaining(headers)
headers["X-Rate-Limit-Remaining"] || headers["X-RateLimit-Remaining"]
end
def get_rate_limit_limit(headers)
headers["X-Rate-Limit-Limit"] || headers["X-RateLimit-Limit"]
end
def get_rate_limit_reset(headers)
headers["X-Rate-Limit-Reset"] || headers["X-RateLimit-Reset"]
end
def parse_error_response(string)
if is_json?(string)
string = JSON.parse(string)
elsif is_xml?(string)
string = Hash.from_xml(string)
end
string = string['error'] if string.is_a?(Hash) && string['error']
string
end
def is_xml?(string)
Nokogiri::XML(string).errors.empty?
end
def is_json?(string)
JSON.parse(string)
rescue JSON::ParserError
false
end
end
end
fixed basic authentication
require 'faraday'
require 'faraday_middleware'
require 'net/http'
require 'excon'
require 'uri'
module Networkable
extend ActiveSupport::Concern
included do
def get_result(url, options={})
options[:headers] ||= {}
options[:headers] = set_request_headers(url, options)
conn = faraday_conn(options)
conn.options[:timeout] = options[:timeout] || DEFAULT_TIMEOUT
if options[:data]
response = conn.post url, {}, options[:headers] do |request|
request.body = options[:data]
end
else
response = conn.get url, {}, options[:headers]
end
# set number of available API calls for agents
if options[:agent_id].present?
agent = Agent.where(id: options[:agent_id]).first
agent.update_attributes(rate_limit_remaining: get_rate_limit_remaining(response.headers),
rate_limit_reset: get_rate_limit_reset(response.headers),
last_response: Time.zone.now)
end
# parsing by content type is not reliable, so we check the response format
if is_json?(response.body)
JSON.parse(response.body)
elsif is_xml?(response.body)
Hash.from_xml(response.body)
else
response.body
end
rescue *NETWORKABLE_EXCEPTIONS => e
rescue_faraday_error(url, e, options)
end
def set_request_headers(url, options)
options[:headers] ||= {}
options[:headers]['Host'] = URI.parse(url).host
if options[:content_type].present?
accept_headers = { "html" => 'text/html; charset=UTF-8',
"xml" => 'application/xml',
"json" => 'application/json' }
options[:headers]['Accept'] = accept_headers.fetch(options[:content_type], options[:content_type])
end
if options[:bearer].present?
options[:headers]['Authorization'] = "Bearer #{options[:bearer]}"
elsif options[:token].present?
options[:headers]["Authorization"] = "Token token=#{options[:token]}"
elsif options[:username].present?
options[:headers]["Authorization"] = ActionController::HttpAuthentication::Basic.encode_credentials(options[:username], options[:password])
end
options[:headers]
end
def faraday_conn(options = {})
options[:headers] ||= {}
options[:headers]['Accept'] ||= "application/json"
limit = options[:limit] || 10
Faraday.new do |c|
c.headers['Accept'] = options[:headers]['Accept']
c.headers['User-Agent'] = "Lagotto - http://#{ENV['SERVERNAME']}"
c.use FaradayMiddleware::FollowRedirects, limit: limit, cookie: :all
c.request :multipart
c.request :json if options[:headers]['Accept'] == 'application/json'
c.use Faraday::Response::RaiseError
c.adapter Faraday.default_adapter
end
end
def rescue_faraday_error(url, error, options={})
if error.is_a?(Faraday::ResourceNotFound)
not_found_error(url, error, options)
else
details = nil
headers = {}
if error.is_a?(Faraday::Error::TimeoutError)
status = 408
elsif error.respond_to?('status')
status = error[:status]
elsif error.respond_to?('response') && error.response.present?
status = error.response[:status]
details = error.response[:body]
headers = error.response[:headers]
else
status = 400
end
# Some sources use a different status for rate-limiting errors
status = 429 if status == 403 && details.include?("Excessive use detected")
if error.respond_to?('exception')
exception = error.exception
else
exception = ""
end
class_name = class_name_by_status(status) || error.class
level = level_by_status(status)
message = parse_error_response(error.message)
message = "#{message} for #{url}"
message = "#{message}. Rate-limit #{get_rate_limit_limit(headers)} exceeded." if class_name == Net::HTTPTooManyRequests
Notification.where(message: message).where(unresolved: true).first_or_create(
exception: exception,
class_name: class_name.to_s,
details: details,
status: status,
target_url: url,
level: level,
work_id: options[:work_id],
source_id: options[:source_id])
{ error: message, status: status }
end
end
def not_found_error(url, error, options={})
status = 404
# we raise an error if we find a canonical URL mismatch
# or a DOI can't be resolved
if options[:doi_mismatch] || options[:doi_lookup]
work = Work.where(id: options[:work_id]).first
if options[:doi_mismatch]
message = error.response[:message]
else
message = "DOI #{work.doi} could not be resolved"
end
Notification.where(message: message).where(unresolved: true).first_or_create(
exception: error.exception,
class_name: "Net::HTTPNotFound",
details: error.response[:body],
status: status,
work_id: work.id,
target_url: url)
{ error: message, status: status }
else
if error.response.blank? && error.response[:body].blank?
message = "resource not found"
else
message = parse_error_response(error.response[:body])
end
{ error: message, status: status }
end
end
def class_name_by_status(status)
{ 400 => Net::HTTPBadRequest,
401 => Net::HTTPUnauthorized,
403 => Net::HTTPForbidden,
404 => Net::HTTPNotFound,
406 => Net::HTTPNotAcceptable,
408 => Net::HTTPRequestTimeOut,
409 => Net::HTTPConflict,
417 => Net::HTTPExpectationFailed,
429 => Net::HTTPTooManyRequests,
500 => Net::HTTPInternalServerError,
502 => Net::HTTPBadGateway,
503 => Net::HTTPServiceUnavailable,
504 => Net::HTTPGatewayTimeOut }.fetch(status, nil)
end
def level_by_status(status)
case status
# temporary network problems should be WARN not ERROR
when 408, 502, 503, 504 then 2
else 3
end
end
# currently supported by twitter, github, ads and ads_fulltext
# sources with slightly different header names
def get_rate_limit_remaining(headers)
headers["X-Rate-Limit-Remaining"] || headers["X-RateLimit-Remaining"]
end
def get_rate_limit_limit(headers)
headers["X-Rate-Limit-Limit"] || headers["X-RateLimit-Limit"]
end
def get_rate_limit_reset(headers)
headers["X-Rate-Limit-Reset"] || headers["X-RateLimit-Reset"]
end
def parse_error_response(string)
if is_json?(string)
string = JSON.parse(string)
elsif is_xml?(string)
string = Hash.from_xml(string)
end
string = string['error'] if string.is_a?(Hash) && string['error']
string
end
def is_xml?(string)
Nokogiri::XML(string).errors.empty?
end
def is_json?(string)
JSON.parse(string)
rescue JSON::ParserError
false
end
end
end
|
class Libev < Formula
desc "Asynchronous event library"
homepage "http://software.schmorp.de/pkg/libev.html"
url "http://dist.schmorp.de/libev/Attic/libev-4.33.tar.gz"
mirror "https://fossies.org/linux/misc/libev-4.33.tar.gz"
sha256 "507eb7b8d1015fbec5b935f34ebed15bf346bed04a11ab82b8eee848c4205aea"
livecheck do
url "http://dist.schmorp.de/libev/"
regex(/href=.*?libev[._-]v?([\d.]+)\./i)
end
bottle do
cellar :any
sha256 "95ddf4b85924a6a10d4a88b6eb52616fa8375e745c99d0752618d5bb82f5248a" => :big_sur
sha256 "e5481e2ba48282bffb5ecc059f0ddddd9807400593e849ed4b48b1fed3a14698" => :catalina
sha256 "f6cfb8c6bb1219f4a54d36113ada7cc7e1e446d5a207bc77d69ac30d9cfe391f" => :mojave
sha256 "f623fc2f4dc3a0980b4733945eb2025cd40636a6d4f5e5d75ae5f89e0b7b07bd" => :high_sierra
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
# Remove compatibility header to prevent conflict with libevent
(include/"event.h").unlink
end
test do
(testpath/"test.c").write <<~'EOS'
/* Wait for stdin to become readable, then read and echo the first line. */
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <ev.h>
ev_io stdin_watcher;
static void stdin_cb (EV_P_ ev_io *watcher, int revents) {
char *buf;
size_t nbytes = 255;
buf = (char *)malloc(nbytes + 1);
getline(&buf, &nbytes, stdin);
printf("%s", buf);
ev_io_stop(EV_A_ watcher);
ev_break(EV_A_ EVBREAK_ALL);
}
int main() {
ev_io_init(&stdin_watcher, stdin_cb, STDIN_FILENO, EV_READ);
ev_io_start(EV_DEFAULT, &stdin_watcher);
ev_run(EV_DEFAULT, 0);
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lev", "-o", "test"
input = "hello, world\n"
assert_equal input, pipe_output("./test", input, 0)
end
end
libev: update 4.33 bottle.
class Libev < Formula
desc "Asynchronous event library"
homepage "http://software.schmorp.de/pkg/libev.html"
url "http://dist.schmorp.de/libev/Attic/libev-4.33.tar.gz"
mirror "https://fossies.org/linux/misc/libev-4.33.tar.gz"
sha256 "507eb7b8d1015fbec5b935f34ebed15bf346bed04a11ab82b8eee848c4205aea"
livecheck do
url "http://dist.schmorp.de/libev/"
regex(/href=.*?libev[._-]v?([\d.]+)\./i)
end
bottle do
cellar :any
sha256 "95ddf4b85924a6a10d4a88b6eb52616fa8375e745c99d0752618d5bb82f5248a" => :big_sur
sha256 "8ed86bdd0ff3b47f8802b251a9ca61770ffc4c9b0be964f41f50955256b5bb53" => :arm64_big_sur
sha256 "e5481e2ba48282bffb5ecc059f0ddddd9807400593e849ed4b48b1fed3a14698" => :catalina
sha256 "f6cfb8c6bb1219f4a54d36113ada7cc7e1e446d5a207bc77d69ac30d9cfe391f" => :mojave
sha256 "f623fc2f4dc3a0980b4733945eb2025cd40636a6d4f5e5d75ae5f89e0b7b07bd" => :high_sierra
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
# Remove compatibility header to prevent conflict with libevent
(include/"event.h").unlink
end
test do
(testpath/"test.c").write <<~'EOS'
/* Wait for stdin to become readable, then read and echo the first line. */
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <ev.h>
ev_io stdin_watcher;
static void stdin_cb (EV_P_ ev_io *watcher, int revents) {
char *buf;
size_t nbytes = 255;
buf = (char *)malloc(nbytes + 1);
getline(&buf, &nbytes, stdin);
printf("%s", buf);
ev_io_stop(EV_A_ watcher);
ev_break(EV_A_ EVBREAK_ALL);
}
int main() {
ev_io_init(&stdin_watcher, stdin_cb, STDIN_FILENO, EV_READ);
ev_io_start(EV_DEFAULT, &stdin_watcher);
ev_run(EV_DEFAULT, 0);
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lev", "-o", "test"
input = "hello, world\n"
assert_equal input, pipe_output("./test", input, 0)
end
end
|
# config valid only for current version of Capistrano
lock '3.5.0'
set :application, 'app'
set :repo_url, 'https://github.com/koshigoe/transpotter.git'
# Default branch is :master
# ask :branch, `git rev-parse --abbrev-ref HEAD`.chomp
# Default deploy_to directory is /var/www/my_app_name
# set :deploy_to, '/var/www/my_app_name'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :airbrussh.
# set :format, :airbrussh
# You can configure the Airbrussh format using :format_options.
# These are the defaults.
# set :format_options, command_output: true, log_file: 'log/capistrano.log', color: :auto, truncate: :auto
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, fetch(:linked_files, []).push('config/database.yml', 'config/secrets.yml')
set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for linked_dirs is []
# set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'public/system')
# Default value for default_env is {}
set :default_env, { path: '/usr/pgsql-9.5/bin:$PATH' }
set :migration_role, :api
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
task :setup_deploy_to do
on release_roles(:all) do |host|
sudo "install --owner=#{host.user} --mode=0755 -d #{deploy_to}" if test("[ ! -d #{deploy_to} ]")
end
end
before 'deploy:starting', 'deploy:setup_deploy_to'
task :restart do
on roles(:api), in: :groups, limit: 1, wait: 15 do
within current_path do
invoke 'puma:restart'
end
end
end
after 'deploy:publishing', 'deploy:restart'
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
Set puma_role
# config valid only for current version of Capistrano
lock '3.5.0'
set :application, 'app'
set :repo_url, 'https://github.com/koshigoe/transpotter.git'
# Default branch is :master
# ask :branch, `git rev-parse --abbrev-ref HEAD`.chomp
# Default deploy_to directory is /var/www/my_app_name
# set :deploy_to, '/var/www/my_app_name'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :airbrussh.
# set :format, :airbrussh
# You can configure the Airbrussh format using :format_options.
# These are the defaults.
# set :format_options, command_output: true, log_file: 'log/capistrano.log', color: :auto, truncate: :auto
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, fetch(:linked_files, []).push('config/database.yml', 'config/secrets.yml')
set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for linked_dirs is []
# set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'public/system')
# Default value for default_env is {}
set :default_env, { path: '/usr/pgsql-9.5/bin:$PATH' }
set :migration_role, :api
set :puma_role, :api
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
task :setup_deploy_to do
on release_roles(:all) do |host|
sudo "install --owner=#{host.user} --mode=0755 -d #{deploy_to}" if test("[ ! -d #{deploy_to} ]")
end
end
before 'deploy:starting', 'deploy:setup_deploy_to'
task :restart do
on release_roles(:api), in: :groups, limit: 1, wait: 15 do
within current_path do
invoke 'puma:restart'
end
end
end
after 'deploy:publishing', 'deploy:restart'
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
|
module Extensions
module Page
module Tree
extend ActiveSupport::Concern
included do
include Mongoid::Acts::Tree
## fields ##
field :position, :type => Integer
## behaviours ##
acts_as_tree :order => ['position', 'asc']
## callbacks ##
before_validation :reset_parent
before_save { |p| p.send(:write_attribute, :parent_id, nil) if p.parent_id.blank? }
before_save :change_parent
before_create { |p| p.send(:fix_position, false) }
before_create :add_to_list_bottom
before_destroy :remove_from_list
# Fixme (Didier L.): Instances methods are defined before the include itself
alias :fix_position :hacked_fix_position
alias :descendants :hacked_descendants
end
module ClassMethods
# Warning: should be used only in read-only
def quick_tree(site, minimal_attributes = true)
pages = (minimal_attributes ? site.pages.minimal_attributes : site.pages).order_by([[:depth, :asc], [:position, :asc]]).to_a
tmp = []
while !pages.empty?
tmp << _quick_tree(pages.delete_at(0), pages)
end
tmp
end
def _quick_tree(current_page, pages)
i, children = 0, []
while !pages.empty?
page = pages[i]
break if page.nil?
if page.parent_id == current_page.id
page = pages.delete_at(i)
children << _quick_tree(page, pages)
else
i += 1
end
end
current_page.instance_eval do
def children=(list); @children = list; end
def children; @children || []; end
end
current_page.children = children
current_page
end
end
module InstanceMethods
def children?
self.class.where(self.parent_id_field => self.id).count
end
def children_with_minimal_attributes
self.class.where(self.parent_id_field => self.id).
order_by(self.tree_order).
minimal_attributes
end
def sort_children!(ids)
ids.each_with_index do |id, position|
child = self.children.detect { |p| p._id == BSON::ObjectId(id) }
child.position = position
child.save
end
end
def parent=(owner) # missing in acts_as_tree
@_parent = owner
self.fix_position(false)
self.instance_variable_set :@_will_move, true
end
def hacked_descendants
return [] if new_record?
self.class.all_in(path_field => [self._id]).order_by tree_order
end
protected
def change_parent
if self.parent_id_changed?
self.fix_position(false)
unless self.parent_id_was.nil?
self.position = nil # make it move to bottom
self.add_to_list_bottom
end
self.instance_variable_set :@_will_move, true
end
end
def hacked_fix_position(perform_save = true)
if parent.nil?
self.write_attribute parent_id_field, nil
self[path_field] = []
self[depth_field] = 0
else
self.write_attribute parent_id_field, parent._id
self[path_field] = parent[path_field] + [parent._id]
self[depth_field] = parent[depth_field] + 1
self.save if perform_save
end
end
def reset_parent
if self.parent_id_changed?
@_parent = nil
end
end
def add_to_list_bottom
self.position ||= (::Page.where(:_id.ne => self._id).and(:parent_id => self.parent_id).max(:position) || 0) + 1
end
def remove_from_list
return if (self.site rescue nil).nil?
::Page.where(:parent_id => self.parent_id).and(:position.gt => self.position).each do |p|
p.position -= 1
p.save
end
end
end
end
end
end
Adding indexes on :position and [[:depth, :asc], [:position, :asc]] to avoid Mongo::OperationFailure: too much data for sort() with no index
module Extensions
module Page
module Tree
extend ActiveSupport::Concern
included do
include Mongoid::Acts::Tree
## fields ##
field :position, :type => Integer
## indexes ##
index :position
index [[:depth, :asc], [:position, :asc]]
## behaviours ##
acts_as_tree :order => ['position', 'asc']
## callbacks ##
before_validation :reset_parent
before_save { |p| p.send(:write_attribute, :parent_id, nil) if p.parent_id.blank? }
before_save :change_parent
before_create { |p| p.send(:fix_position, false) }
before_create :add_to_list_bottom
before_destroy :remove_from_list
# Fixme (Didier L.): Instances methods are defined before the include itself
alias :fix_position :hacked_fix_position
alias :descendants :hacked_descendants
end
module ClassMethods
# Warning: should be used only in read-only
def quick_tree(site, minimal_attributes = true)
pages = (minimal_attributes ? site.pages.minimal_attributes : site.pages).order_by([[:depth, :asc], [:position, :asc]]).to_a
tmp = []
while !pages.empty?
tmp << _quick_tree(pages.delete_at(0), pages)
end
tmp
end
def _quick_tree(current_page, pages)
i, children = 0, []
while !pages.empty?
page = pages[i]
break if page.nil?
if page.parent_id == current_page.id
page = pages.delete_at(i)
children << _quick_tree(page, pages)
else
i += 1
end
end
current_page.instance_eval do
def children=(list); @children = list; end
def children; @children || []; end
end
current_page.children = children
current_page
end
end
module InstanceMethods
def children?
self.class.where(self.parent_id_field => self.id).count
end
def children_with_minimal_attributes
self.class.where(self.parent_id_field => self.id).
order_by(self.tree_order).
minimal_attributes
end
def sort_children!(ids)
ids.each_with_index do |id, position|
child = self.children.detect { |p| p._id == BSON::ObjectId(id) }
child.position = position
child.save
end
end
def parent=(owner) # missing in acts_as_tree
@_parent = owner
self.fix_position(false)
self.instance_variable_set :@_will_move, true
end
def hacked_descendants
return [] if new_record?
self.class.all_in(path_field => [self._id]).order_by tree_order
end
protected
def change_parent
if self.parent_id_changed?
self.fix_position(false)
unless self.parent_id_was.nil?
self.position = nil # make it move to bottom
self.add_to_list_bottom
end
self.instance_variable_set :@_will_move, true
end
end
def hacked_fix_position(perform_save = true)
if parent.nil?
self.write_attribute parent_id_field, nil
self[path_field] = []
self[depth_field] = 0
else
self.write_attribute parent_id_field, parent._id
self[path_field] = parent[path_field] + [parent._id]
self[depth_field] = parent[depth_field] + 1
self.save if perform_save
end
end
def reset_parent
if self.parent_id_changed?
@_parent = nil
end
end
def add_to_list_bottom
self.position ||= (::Page.where(:_id.ne => self._id).and(:parent_id => self.parent_id).max(:position) || 0) + 1
end
def remove_from_list
return if (self.site rescue nil).nil?
::Page.where(:parent_id => self.parent_id).and(:position.gt => self.position).each do |p|
p.position -= 1
p.save
end
end
end
end
end
end |
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
license "LGPL-2.1"
livecheck do
url :stable
end
bottle do
cellar :any
sha256 "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444" => :catalina
sha256 "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e" => :mojave
sha256 "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb" => :high_sierra
end
head do
url "https://git.code.sf.net/p/liblo/git.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
liblo: update 0.31 bottle.
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
license "LGPL-2.1"
livecheck do
url :stable
end
bottle do
cellar :any
sha256 "19eef0619f05faa15a7d5368973dcd3e5ed2e44291b56cc6ff72825fe8879845" => :big_sur
sha256 "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444" => :catalina
sha256 "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e" => :mojave
sha256 "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb" => :high_sierra
end
head do
url "https://git.code.sf.net/p/liblo/git.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
|
# config valid only for current version of Capistrano
lock '3.4.0'
set :application, 'blog'
set :repo_url, 'git@github.com:marciotoshio/blog.git'
# Default branch is :master
# ask :branch, `git rev-parse --abbrev-ref HEAD`.chomp
# Default deploy_to directory is /var/www/my_app_name
set :deploy_to, '/var/www/blog'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, fetch(:linked_files, []).push('config/database.yml', 'config/secrets.yml')
# Default value for linked_dirs is []
# set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
set :keep_releases, 2
#rvm
set :rvm_type, :system
set :rvm_ruby_version, 'ruby-2.2.1@blog'
#passenger
set :passenger_restart_with_sudo, true
set :format, :pretty
namespace :deploy do
task :update_jekyll do
on roles(:app) do
within "#{deploy_to}/current" do
execute '/usr/local/rvm/bin/rvm ruby-2.2.1@blog do jekyll', 'build'
end
end
end
end
after "deploy:symlink:release", "deploy:update_jekyll"
adjust deploy
# config valid only for current version of Capistrano
lock '3.4.0'
set :application, 'blog'
set :repo_url, 'git@github.com:marciotoshio/blog.git'
# Default branch is :master
# ask :branch, `git rev-parse --abbrev-ref HEAD`.chomp
# Default deploy_to directory is /var/www/my_app_name
set :deploy_to, '/var/www/blog'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, fetch(:linked_files, []).push('config/database.yml', 'config/secrets.yml')
# Default value for linked_dirs is []
# set :linked_dirs, fetch(:linked_dirs, []).push('log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
set :keep_releases, 2
#rvm
set :rvm_type, :system
set :rvm_ruby_version, 'ruby-2.2.1@blog'
#passenger
set :passenger_restart_with_sudo, true
set :format, :pretty
namespace :deploy do
task :update_jekyll do
on roles(:app) do
within "#{deploy_to}/current" do
execute :jekyll, 'build'
end
end
end
end
after "deploy:symlink:release", "deploy:update_jekyll"
|
module GiveyRails
class Donation
include GiveyRails::GiveyModel
attr_accessor :node_type,
:user,
:donation_type,
:donation_string_textile,
:donation_string_share,
:donation_images_textile,
:charity,
:channel,
:giver_target,
:story,
:id,
:activity_id,
:created_at,
:donated_at,
:user_id,
:user_name,
:user_thumb_url,
:amount,
:currency,
:charity_name,
:charity_id,
:any_stories,
:channel_id,
:channel_name,
:channel_givey_tag,
:giver_target_id,
:giver_target_name,
:giver_target_givey_tag,
:combined_amount,
:applied_rules,
:stream_thumb_url,
:pp_invoice_id,
:canonical_url
validates_presence_of :amount
def donated_entity
channel || charity
end
def display_donated_entity
if channel && channel.user
channel.charity
elsif channel
channel
else
charity
end
end
def display_amount
Money.new(amount, currency).cents / 100
end
def giftaid_amount
display_amount.to_f * 1.25
end
def giftaid_display_amount
sprintf("%0.2f", giftaid_amount)
end
def display_combined_amount
Money.new(combined_amount, currency).cents / 100
end
# default currency
def currency
@currency.blank? ? 'GBP' : @currency
end
def display_currency
Money.new(amount, currency).currency
end
def donation_string_html
RedCloth.new(donation_string_textile, [:lite_mode]).to_html
end
def donation_string_plain
replaced_str.gsub(/\[[^\]]+:http:\/\/givey.com\/(\w+)\]/){|m| "##{$1}" }
end
def donation_images_html
full_string = donation_images_textile.map do |s|
css_classes = s.match(/\((.+)\)/)[1]
"div(#{css_classes}). " + s
end.join("\n\n")
RedCloth.new(full_string).to_html
end
def time_donation_format(seconds)
mm, ss = seconds.divmod(60)
hh, mm = mm.divmod(60)
"%d hours and %d minutes" % [hh, mm]
end
end
end
Revert "Don't bother replacing donation string"
This reverts commit 4b84abbc8720fb7a1db735bdf9b7905c13e3c457.
module GiveyRails
class Donation
include GiveyRails::GiveyModel
attr_accessor :node_type,
:user,
:donation_type,
:donation_string_textile,
:donation_string_share,
:donation_images_textile,
:charity,
:channel,
:giver_target,
:story,
:id,
:activity_id,
:created_at,
:donated_at,
:user_id,
:user_name,
:user_thumb_url,
:amount,
:currency,
:charity_name,
:charity_id,
:any_stories,
:channel_id,
:channel_name,
:channel_givey_tag,
:giver_target_id,
:giver_target_name,
:giver_target_givey_tag,
:combined_amount,
:applied_rules,
:stream_thumb_url,
:pp_invoice_id,
:canonical_url
validates_presence_of :amount
def donated_entity
channel || charity
end
def display_donated_entity
if channel && channel.user
channel.charity
elsif channel
channel
else
charity
end
end
def display_amount
Money.new(amount, currency).cents / 100
end
def giftaid_amount
display_amount.to_f * 1.25
end
def giftaid_display_amount
sprintf("%0.2f", giftaid_amount)
end
def display_combined_amount
Money.new(combined_amount, currency).cents / 100
end
# default currency
def currency
@currency.blank? ? 'GBP' : @currency
end
def display_currency
Money.new(amount, currency).currency
end
def donation_string_html
RedCloth.new(replaced_str, [:lite_mode]).to_html
end
def donation_string_plain
replaced_str.gsub(/\[[^\]]+:http:\/\/givey.com\/(\w+)\]/){|m| "##{$1}" }
end
def replaced_str
if donation_string_textile =~ /time-\d+-time/
str_amt = donation_string_textile.match(/time-(\d+)-time/).captures[0].to_i
time_str = time_donation_format(str_amt)
donation_string_textile.gsub(/time-\d+-time/, time_str)
elsif donation_string_textile =~ /amt-.+-amt/
str_cur, str_amt = donation_string_textile.match(/amt-([a-z]{3}|[A-Z]{3})(\d+)-amt/).captures
money_object = Money.new(str_amt, str_cur)
donation_string_textile.gsub(/amt-([a-z]{3}|[A-Z]{3})/, money_object.currency.symbol).gsub(/\d+-amt/, (money_object.cents / 100).to_s)
else
donation_string_textile
end
end
def donation_images_html
full_string = donation_images_textile.map do |s|
css_classes = s.match(/\((.+)\)/)[1]
"div(#{css_classes}). " + s
end.join("\n\n")
RedCloth.new(full_string).to_html
end
def time_donation_format(seconds)
mm, ss = seconds.divmod(60)
hh, mm = mm.divmod(60)
"%d hours and %d minutes" % [hh, mm]
end
end
end
|
class Libxo < Formula
desc "Allows an application to generate text, XML, JSON, and HTML output"
homepage "https://juniper.github.io/libxo/libxo-manual.html"
url "https://github.com/Juniper/libxo/releases/download/0.8.4/libxo-0.8.4.tar.gz"
sha256 "3b3a33b706075fdc478782fdc65db3e7f9167550f7d71b95e704236a797e7fd3"
bottle do
sha256 "dc715f036d728b24c98a451fc7a27802e14dc6132d43e4bc484c3f3d0d3eb277" => :high_sierra
sha256 "f70c0997985665361bf4a11e274eebcc8038f44fcc527c3f2ad3cc2b8c9f4d61" => :sierra
sha256 "d6e6bc08ad85bc51405a3d4fdbc6f39fb41e55b46149dbcf93fa5170672a442d" => :el_capitan
end
depends_on "libtool" => :build
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <libxo/xo.h>
int main() {
xo_set_flags(NULL, XOF_KEYS);
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lxo", "-o", "test"
system "./test"
end
end
libxo 0.9.0
Closes #28100.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Libxo < Formula
desc "Allows an application to generate text, XML, JSON, and HTML output"
homepage "https://juniper.github.io/libxo/libxo-manual.html"
url "https://github.com/Juniper/libxo/releases/download/0.9.0/libxo-0.9.0.tar.gz"
sha256 "81fa2843e9d2695b6308a900e52e67d0489979f42e77dae1a5b0c6a4c584fc63"
bottle do
sha256 "dc715f036d728b24c98a451fc7a27802e14dc6132d43e4bc484c3f3d0d3eb277" => :high_sierra
sha256 "f70c0997985665361bf4a11e274eebcc8038f44fcc527c3f2ad3cc2b8c9f4d61" => :sierra
sha256 "d6e6bc08ad85bc51405a3d4fdbc6f39fb41e55b46149dbcf93fa5170672a442d" => :el_capitan
end
depends_on "libtool" => :build
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <libxo/xo.h>
int main() {
xo_set_flags(NULL, XOF_KEYS);
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lxo", "-o", "test"
system "./test"
end
end
|
# config valid only for current version of Capistrano
lock '3.3.3'
set :application, 'scoobar'
set :repo_url, 'https://github.com/hwuethrich/scoobar.git'
# Default branch is :master
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }.call
# Default deploy_to directory is /var/www/my_app_name
# set :deploy_to, '/var/www/my_app_name'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
set :linked_files, fetch(:linked_files, []).push('config/secrets.yml')
# Default value for linked_dirs is []
set :linked_dirs, fetch(:linked_dirs, []).push('bin', 'log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
Use REVISION env variable from semaphore
# config valid only for current version of Capistrano
lock '3.3.3'
set :application, 'scoobar'
set :repo_url, 'https://github.com/hwuethrich/scoobar.git'
# Default branch is :master
set :branch, ENV['REVISION'] || ENV['BRANCH_NAME'] || 'master'
# Default deploy_to directory is /var/www/my_app_name
# set :deploy_to, '/var/www/my_app_name'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
set :linked_files, fetch(:linked_files, []).push('config/secrets.yml')
# Default value for linked_dirs is []
set :linked_dirs, fetch(:linked_dirs, []).push('bin', 'log', 'tmp/pids', 'tmp/cache', 'tmp/sockets', 'vendor/bundle', 'public/system')
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
|
class IucnMappingManager
class << self
def sync
config_location = Rails.root.join('config/secrets.yml')
config = YAML.load_file(config_location)[Rails.env]
@token = config['iucn_redlist']['token']
@url = config['iucn_redlist']['url']
species = Rank.where(:name => Rank::SPECIES).first
subspecies = Rank.where(:name => Rank::SUBSPECIES).first
taxonomy = Taxonomy.where(:name => Taxonomy::CITES_EU).first
TaxonConcept.where(:rank_id => [species.id, subspecies.id], :name_status => 'A',
:taxonomy_id => taxonomy.id).each do |tc|
map = IucnMapping.find_or_create_by_taxon_concept_id(tc.id)
full_name = if tc.rank_id == subspecies.id
tc.full_name.insert(tc.full_name.rindex(/ /), " ssp.")
else
tc.full_name
end
data = fetch_data_for_name full_name
if data["result"].empty?
puts "#{tc.full_name} NO MATCH trying synonyms"
match_on_synonyms tc, map, subspecies.id
else
map_taxon_concept tc, map, data
end
end
end
def match_on_synonyms tc, map, subspecies_rank_id
tc.synonyms.each do |syn|
full_name = if syn.rank_id == subspecies_rank_id
syn.full_name.insert(syn.full_name.rindex(/ /), " ssp.")
else
syn.full_name
end
data = fetch_data_for_name full_name
if data["result"] && !data["result"].empty?
map_taxon_concept tc, map, data, syn
return
end
end
end
def fetch_data_for_name full_name
url = URI.escape("#{@url}#{full_name.downcase}?token=#{@token}")
JSON.parse(RestClient.get(url))
end
def map_taxon_concept tc, map, data, synonym=nil
begin
match = data["result"].first
puts "#{tc.full_name} #{tc.author_year} <=> #{match["scientific_name"]} #{match["authority"]}"
map.update_attributes(
:iucn_taxon_name => match['scientific_name'],
:iucn_taxon_id => match['taxonid'],
:iucn_author => match['authority'],
:iucn_category => match['category'],
:details => {
:match => type_of_match(tc, match, synonym),
:no_matches => data["result"].size
},
:synonym_id => synonym.try(:id)
)
rescue Exception => e
puts "#######################################################################"
puts "########################## EXCEPTION Taxon Concept #{tc.id} ###########"
puts e.message
end
end
def type_of_match tc, match, synonym
if tc.full_name == match["scientific_name"]
if strip_authors(tc.author_year) == strip_authors(match["authority"])
puts "FULL_MATCH!"
"FULL_MATCH"
else
puts "NAME_MATCH"
"NAME_MATCH"
end
elsif synonym.full_name == match["scientific_name"]
if strip_authors(synonym.author_year) == strip_authors(match["authority"])
puts "FULL_SYNONYM_MATCH"
"FULL_SYNONYM_MATCH"
else
puts "SYNONYM_MATCH"
"SYNONYM_MATCH"
end
end
end
def strip_authors author
author.split(" ").
reject{|p| ["and", "&", "&", ","].include?(p)}.
join(" ")
end
end
end
Reshuffle things a bit. Making it possible to fetch the mapping information for one taxon_concept
class IucnMappingManager
class << self
def sync
config_location = Rails.root.join('config/secrets.yml')
config = YAML.load_file(config_location)[Rails.env]
@token = config['iucn_redlist']['token']
@url = config['iucn_redlist']['url']
species = Rank.where(:name => Rank::SPECIES).first
@subspecies = Rank.where(:name => Rank::SUBSPECIES).first
taxonomy = Taxonomy.where(:name => Taxonomy::CITES_EU).first
TaxonConcept.where(:rank_id => [species.id, @subspecies.id], :name_status => 'A',
:taxonomy_id => taxonomy.id).each do |taxon_concept|
sync_taxon_concept taxon_concept
end
end
def sync_taxon_concept taxon_concept
mapping = IucnMapping.find_or_create_by_taxon_concept_id(taxon_concept.id)
full_name = if taxon_concept.rank_id == @subspecies.id
taxon_concept.full_name.insert(taxon_concept.full_name.rindex(/ /), " ssp.")
else
taxon_concept.full_name
end
data = fetch_data_for_name full_name
if data["result"].empty?
puts "#{taxon_concept.full_name} NO MATCH trying synonyms"
match_on_synonyms taxon_concept, mapping
else
map_taxon_concept taxon_concept, mapping, data
end
end
def match_on_synonyms taxon_concept, map
taxon_concept.synonyms.each do |syn|
full_name = if syn.rank_id == @subspecies.id
syn.full_name.insert(syn.full_name.rindex(/ /), " ssp.")
else
syn.full_name
end
data = fetch_data_for_name full_name
if data["result"] && !data["result"].empty?
map_taxon_concept taxon_concept, map, data, syn
return
end
end
end
def fetch_data_for_name full_name
url = URI.escape("#{@url}#{full_name.downcase}?token=#{@token}")
JSON.parse(RestClient.get(url))
end
def map_taxon_concept taxon_concept, map, data, synonym=nil
begin
match = data["result"].first
puts "#{taxon_concept.full_name} #{taxon_concept.author_year} <=> #{match["scientific_name"]} #{match["authority"]}"
map.update_attributes(
:iucn_taxon_name => match['scientific_name'],
:iucn_taxon_id => match['taxonid'],
:iucn_author => match['authority'],
:iucn_category => match['category'],
:details => {
:match => type_of_match(taxon_concept, match, synonym),
:no_matches => data["result"].size
},
:synonym_id => synonym.try(:id)
)
rescue Exception => e
puts "#######################################################################"
puts "########################## EXCEPTION Taxon Concept #{taxon_concept.id} ###########"
puts e.message
end
end
def type_of_match tc, match, synonym
if tc.full_name == match["scientific_name"]
if strip_authors(tc.author_year) == strip_authors(match["authority"])
puts "FULL_MATCH!"
"FULL_MATCH"
else
puts "NAME_MATCH"
"NAME_MATCH"
end
elsif synonym.full_name == match["scientific_name"]
if strip_authors(synonym.author_year) == strip_authors(match["authority"])
puts "FULL_SYNONYM_MATCH"
"FULL_SYNONYM_MATCH"
else
puts "SYNONYM_MATCH"
"SYNONYM_MATCH"
end
end
end
def strip_authors author
author.split(" ").
reject{|p| ["and", "&", "&", ","].include?(p)}.
join(" ")
end
end
end
|
class Libxt < Formula
desc "X.Org: X Toolkit Intrinsics library"
homepage "https://www.x.org/"
url "https://www.x.org/archive/individual/lib/libXt-1.2.1.tar.bz2"
sha256 "679cc08f1646dbd27f5e48ffe8dd49406102937109130caab02ca32c083a3d60"
license "MIT"
bottle do
cellar :any
sha256 "db76d4efdf96e00af7b4230245b81c26539b4ec20e93c1d379a3c92b39217885" => :big_sur
sha256 "0fd900ad5097946ee343ba7c15311a3b85540dcd058233e05f198c15405b9da0" => :arm64_big_sur
sha256 "a1bcc92d37e1602ef760fecb79f7729db7e38aee2835879689596d478480217b" => :catalina
sha256 "4bd6052344cc12c674d40f44c31083264f5ce097ec0d2f6111e726862a8a2b04" => :mojave
end
depends_on "pkg-config" => :build
depends_on "libice"
depends_on "libsm"
depends_on "libx11"
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--with-appdefaultdir=#{etc}/X11/app-defaults
--disable-dependency-tracking
--disable-silent-rules
--enable-specs=no
]
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include "X11/IntrinsicP.h"
#include "X11/CoreP.h"
int main(int argc, char* argv[]) {
CoreClassPart *range;
return 0;
}
EOS
system ENV.cc, "test.c"
assert_equal 0, $CHILD_STATUS.exitstatus
end
end
libxt: update 1.2.1 bottle.
class Libxt < Formula
desc "X.Org: X Toolkit Intrinsics library"
homepage "https://www.x.org/"
url "https://www.x.org/archive/individual/lib/libXt-1.2.1.tar.bz2"
sha256 "679cc08f1646dbd27f5e48ffe8dd49406102937109130caab02ca32c083a3d60"
license "MIT"
bottle do
cellar :any_skip_relocation
sha256 "db76d4efdf96e00af7b4230245b81c26539b4ec20e93c1d379a3c92b39217885" => :big_sur
sha256 "0fd900ad5097946ee343ba7c15311a3b85540dcd058233e05f198c15405b9da0" => :arm64_big_sur
sha256 "a1bcc92d37e1602ef760fecb79f7729db7e38aee2835879689596d478480217b" => :catalina
sha256 "4bd6052344cc12c674d40f44c31083264f5ce097ec0d2f6111e726862a8a2b04" => :mojave
sha256 "80cf4172680aeb30f00d79f4058df28915598ecc9cfed5e63e447b9b63507dbb" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "libice"
depends_on "libsm"
depends_on "libx11"
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--with-appdefaultdir=#{etc}/X11/app-defaults
--disable-dependency-tracking
--disable-silent-rules
--enable-specs=no
]
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include "X11/IntrinsicP.h"
#include "X11/CoreP.h"
int main(int argc, char* argv[]) {
CoreClassPart *range;
return 0;
}
EOS
system ENV.cc, "test.c"
assert_equal 0, $CHILD_STATUS.exitstatus
end
end
|
# config valid only for Capistrano 3.1
lock '3.2.1'
set :application, 'ldt'
set :repo_url, 'git@github.com:allantokuda/ldt.git'
set :ssh_options, { :forward_agent => true }
# Default branch is :master
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }.call
# Default deploy_to directory is /var/www/my_app
set :deploy_to, "/home/ruby/#{fetch(:application)}"
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
set :linked_files, %w{config/database.yml}
# Default value for linked_dirs is []
set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system sockets}
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
invoke 'unicorn:restart'
end
end
after :publishing, :restart
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
Allow latest Capistrano version
# config valid only for this Capistrano version
lock '3.14.1'
set :application, 'ldt'
set :repo_url, 'git@github.com:allantokuda/ldt.git'
set :ssh_options, { :forward_agent => true }
# Default branch is :master
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }.call
# Default deploy_to directory is /var/www/my_app
set :deploy_to, "/home/ruby/#{fetch(:application)}"
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
set :linked_files, %w{config/database.yml}
# Default value for linked_dirs is []
set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system sockets}
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
invoke 'unicorn:restart'
end
end
after :publishing, :restart
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
|
Fixed legislation proposal title length in form
|
class Lldpd < Formula
desc "Implementation of IEEE 802.1ab (LLDP)"
homepage "https://vincentbernat.github.io/lldpd/"
url "https://media.luffy.cx/files/lldpd/lldpd-0.9.4.tar.gz"
sha256 "eb1f5beff2ff5c13c5e0342b5b9da815ed4a63866262445e1168a79ee65c9079"
revision 2
bottle do
sha256 "68513191e42cb6c5b8c0b17eb07d553d1d5b9f949dc82ba3e3c94ab02907820b" => :sierra
sha256 "91db17ee1b90ebfe754dce063443d6ce1e0315b3b6b202685773983be3250f07" => :el_capitan
sha256 "b2810c86f3cafe0d9771bb56fcc93b05189f2842e77c82ff159266ca33ba1b05" => :yosemite
end
option "with-snmp", "Build SNMP subagent support"
option "with-json", "Build JSON support for lldpcli"
depends_on "pkg-config" => :build
depends_on "readline"
depends_on "libevent"
depends_on "net-snmp" if build.with? "snmp"
depends_on "jansson" if build.with? "json"
def install
readline = Formula["readline"]
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--with-xml",
"--with-readline",
"--with-privsep-chroot=/var/empty",
"--with-privsep-user=nobody",
"--with-privsep-group=nogroup",
"--with-launchddaemonsdir=no",
"CPPFLAGS=-I#{readline.include} -DRONLY=1",
"LDFLAGS=-L#{readline.lib}",
]
args << (build.with?("snmp") ? "--with-snmp" : "--without-snmp")
args << (build.with?("json") ? "--with-json" : "--without-json")
system "./configure", *args
system "make"
system "make", "install"
end
def post_install
(var/"run").mkpath
end
plist_options :startup => true
def plist
additional_args = ""
if build.with? "snmp"
additional_args += "<string>-x</string>"
end
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/lldpd</string>
#{additional_args}
</array>
<key>RunAtLoad</key><true/>
<key>KeepAlive</key><true/>
</dict>
</plist>
EOS
end
end
lldpd 0.9.6
Closes #10893.
Signed-off-by: Tomasz Pajor <ea73344294b1c6e2cb529d7fc98a4971de7607ac@polishgeeks.com>
class Lldpd < Formula
desc "Implementation of IEEE 802.1ab (LLDP)"
homepage "https://vincentbernat.github.io/lldpd/"
url "https://media.luffy.cx/files/lldpd/lldpd-0.9.6.tar.gz"
sha256 "e74e2dd7e2a233ca1ff385c925ddae2a916d302819d1433741407d2f8fb0ddd8"
bottle do
sha256 "68513191e42cb6c5b8c0b17eb07d553d1d5b9f949dc82ba3e3c94ab02907820b" => :sierra
sha256 "91db17ee1b90ebfe754dce063443d6ce1e0315b3b6b202685773983be3250f07" => :el_capitan
sha256 "b2810c86f3cafe0d9771bb56fcc93b05189f2842e77c82ff159266ca33ba1b05" => :yosemite
end
option "with-snmp", "Build SNMP subagent support"
option "with-json", "Build JSON support for lldpcli"
depends_on "pkg-config" => :build
depends_on "readline"
depends_on "libevent"
depends_on "net-snmp" if build.with? "snmp"
depends_on "jansson" if build.with? "json"
def install
readline = Formula["readline"]
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--with-xml",
"--with-readline",
"--with-privsep-chroot=/var/empty",
"--with-privsep-user=nobody",
"--with-privsep-group=nogroup",
"--with-launchddaemonsdir=no",
"CPPFLAGS=-I#{readline.include} -DRONLY=1",
"LDFLAGS=-L#{readline.lib}",
]
args << (build.with?("snmp") ? "--with-snmp" : "--without-snmp")
args << (build.with?("json") ? "--with-json" : "--without-json")
system "./configure", *args
system "make"
system "make", "install"
end
def post_install
(var/"run").mkpath
end
plist_options :startup => true
def plist
additional_args = ""
if build.with? "snmp"
additional_args += "<string>-x</string>"
end
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/lldpd</string>
#{additional_args}
</array>
<key>RunAtLoad</key><true/>
<key>KeepAlive</key><true/>
</dict>
</plist>
EOS
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'watchbuild/version'
Gem::Specification.new do |spec|
spec.name = "watchbuild"
spec.version = WatchBuild::VERSION
spec.authors = ["Felix Krause"]
spec.email = ["watchbuild@krausefx.com"]
spec.summary = WatchBuild::DESCRIPTION
spec.description = WatchBuild::DESCRIPTION
spec.homepage = "https://fastlane.tools"
spec.license = "MIT"
spec.required_ruby_version = '>= 2.0.0'
spec.files = Dir["lib/**/*"] + %w( bin/watchbuild README.md LICENSE )
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "fastlane_core", ">= 0.52.1", "< 1.0.0" # all shared code and dependencies
spec.add_dependency "spaceship", ">= 0.35.0", "< 1.0.0" # communication with Apple
spec.add_dependency 'terminal-notifier' # show a notification once the build is ready
# Development only
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec', '~> 3.1.0'
spec.add_development_dependency 'rspec_junit_formatter', '~> 0.2.3'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'yard', '~> 0.8.7.4'
spec.add_development_dependency 'webmock', '~> 1.19.0'
spec.add_development_dependency 'coveralls'
spec.add_development_dependency 'fastlane'
spec.add_development_dependency "rubocop", '~> 0.38.0'
end
Update rubocop and update styling rules (#6573)
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'watchbuild/version'
Gem::Specification.new do |spec|
spec.name = "watchbuild"
spec.version = WatchBuild::VERSION
spec.authors = ["Felix Krause"]
spec.email = ["watchbuild@krausefx.com"]
spec.summary = WatchBuild::DESCRIPTION
spec.description = WatchBuild::DESCRIPTION
spec.homepage = "https://fastlane.tools"
spec.license = "MIT"
spec.required_ruby_version = '>= 2.0.0'
spec.files = Dir["lib/**/*"] + %w( bin/watchbuild README.md LICENSE )
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "fastlane_core", ">= 0.52.1", "< 1.0.0" # all shared code and dependencies
spec.add_dependency "spaceship", ">= 0.35.0", "< 1.0.0" # communication with Apple
spec.add_dependency 'terminal-notifier' # show a notification once the build is ready
# Development only
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec', '~> 3.1.0'
spec.add_development_dependency 'rspec_junit_formatter', '~> 0.2.3'
spec.add_development_dependency 'pry'
spec.add_development_dependency 'yard', '~> 0.8.7.4'
spec.add_development_dependency 'webmock', '~> 1.19.0'
spec.add_development_dependency 'coveralls'
spec.add_development_dependency 'fastlane'
spec.add_development_dependency 'rubocop', '~> 0.44.0'
end
|
module MiqReport::Generator
extend ActiveSupport::Concern
include_concern 'Aggregation'
include_concern 'Async'
include_concern 'Html'
include_concern 'Sorting'
include_concern 'Trend'
include_concern 'Utilization'
DATE_TIME_BREAK_SUFFIXES = [
[_("Hour"), "hour"],
[_("Day"), "day"],
[_("Week"), "week"],
[_("Month"), "month"],
[_("Quarter"), "quarter"],
[_("Year"), "year"],
[_("Hour of the Day"), "hour_of_day"],
[_("Day of the Week"), "day_of_week"],
[_("Day of the Month"), "day_of_month"],
[_("Week of the Year"), "week_of_year"],
[_("Month of the Year"), "month_of_year"]
].freeze
module ClassMethods
def date_time_break_suffixes
DATE_TIME_BREAK_SUFFIXES
end
def get_col_break_suffixes(col)
col_type = MiqExpression.parse_field_or_tag(col).try(:column_type)
case col_type
when :date
date_time_break_suffixes.select { |_name, suffix| !suffix.to_s.starts_with?("hour") }
when :datetime
date_time_break_suffixes
else
[]
end
end
def all_break_suffixes
date_time_break_suffixes.collect(&:last)
end
def is_break_suffix?(suffix)
all_break_suffixes.include?(suffix)
end
def default_queue_timeout
::Settings.reporting.queue_timeout.to_i_with_method
end
end
def col_to_expression_col(col)
parts = col.split(".")
if parts.length == 1
table = db
else
table, col = parts[-2..-1]
end
"#{table2class(table)}-#{col}"
end
def table2class(table)
@table2class ||= {}
@table2class[table] ||= begin
case table.to_sym
when :ports, :nics, :storage_adapters
"GuestDevice"
when :"<compare>"
self.class.name
else
ref = db_class.reflection_with_virtual(table.to_sym)
ref ? ref.class_name : table.singularize.camelize
end
end
@table2class[table]
end
def get_include_for_find
include_as_hash(include.presence || invent_report_includes).deep_merge(include_for_find || {}).presence
end
def invent_includes
include_as_hash(invent_report_includes)
end
# would like this format to go away
# will go away when we drop build_reportable_data
def invent_report_includes
return {} unless col_order
col_order.each_with_object({}) do |col, ret|
next unless col.include?(".")
*rels, column = col.split(".")
if col !~ /managed\./ && col !~ /virtual_custom/
(rels.inject(ret) { |h, rel| h[rel] ||= {} }["columns"] ||= []) << column
end
end
end
def include_as_hash(includes = include, klass = db_class, klass_cols = cols)
result = {}
if klass_cols && klass && klass.respond_to?(:virtual_attribute?)
klass_cols.each do |c|
result[c.to_sym] = {} if klass.virtual_attribute?(c) && !klass.attribute_supported_by_sql?(c)
end
end
if includes.kind_of?(Hash)
includes.each do |k, v|
k = k.to_sym
if k == :managed
result[:tags] = {}
else
assoc_reflection = klass.reflect_on_association(k)
assoc_klass = (assoc_reflection.options[:polymorphic] ? k : assoc_reflection.klass) if assoc_reflection
result[k] = include_as_hash(v && v["include"], assoc_klass, v && v["columns"])
end
end
elsif includes.kind_of?(Array)
includes.each { |i| result[i.to_sym] = {} }
end
result
end
def queue_generate_table(options = {})
options[:userid] ||= "system"
options[:mode] ||= "async"
options[:report_source] ||= "Requested by user"
sync = options.delete(:report_sync) || ::Settings.product.report_sync
task = MiqTask.create(:name => "Generate Report: '#{name}'", :userid => options[:userid])
report_result = MiqReportResult.create(
:name => title,
:userid => options[:userid],
:report_source => options[:report_source],
:db => db,
:miq_report_id => id,
:miq_task_id => task.id
)
AuditEvent.success(
:event => "generate_table",
:target_class => self.class.base_class.name,
:target_id => id,
:userid => options[:userid],
:message => "#{task.name}, successfully initiated"
)
task.update_status("Queued", "Ok", "Task has been queued")
if sync
_async_generate_table(task.id, options)
else
MiqQueue.submit_job(
:service => "reporting",
:class_name => self.class.name,
:instance_id => id,
:method_name => "_async_generate_table",
:args => [task.id, options],
:msg_timeout => queue_timeout
)
end
report_result.id
end
def generate_table(options = {})
if options[:user]
User.with_user(options[:user]) { _generate_table(options) }
elsif options[:userid]
userid = MiqReportResult.parse_userid(options[:userid])
user = User.find_by_userid(userid)
User.with_user(user, userid) { _generate_table(options) }
else
_generate_table(options)
end
end
def _generate_table(options = {})
return build_table_from_report(options) if db == self.class.name # Build table based on data from passed in report object
_generate_table_prep
results = if custom_results_method
generate_custom_method_results(options)
elsif performance
generate_performance_results(options)
elsif interval == 'daily' && db_klass <= MetricRollup
generate_daily_metric_rollup_results(options)
elsif interval
generate_interval_metric_results(options)
else
generate_basic_results(options)
end
if db_options && db_options[:long_term_averages] && results.first.kind_of?(MetricRollup)
# Calculate long_term_averages and save in extras
extras[:long_term_averages] = Metric::LongTermAverages.get_averages_over_time_period(results.first.resource, db_options[:long_term_averages].merge(:ext_options => ext_options))
end
build_apply_time_profile(results)
build_table(results, db, options)
end
def generate_custom_method_results(options = {})
if db_klass.respond_to?(custom_results_method)
# Use custom method in DB class to get report results if defined
results, ext = db_klass.send(custom_results_method, db_options[:options].merge(:userid => options[:userid],
:ext_options => ext_options,
:report_cols => cols))
elsif respond_to?(custom_results_method)
# Use custom method in MiqReport class to get report results if defined
results, ext = send(custom_results_method, options)
else
raise _("Unsupported report type '%{type}'") % {:type => db_options[:rpt_type]}
end
# TODO: results = results.select(only_cols)
extras.merge!(ext) if ext && ext.kind_of?(Hash)
results
end
# Original C&U charts breakdown by tags
def generate_performance_results(options = {})
if performance[:group_by_category] && performance[:interval_name]
results, extras[:interval] = db_class.vms_by_category(performance)
else
results, extras[:group_by_tag_cols], extras[:group_by_tags] = db_class.group_by_tags(
db_class.find_entries(ext_options).where(where_clause).where(options[:where_clause]),
:category => performance[:group_by_category],
:cat_model => options[:cat_model],
)
build_correlate_tag_cols
end
results
end
# Ad-hoc daily performance reports
# Daily for: Performance - Clusters...
def generate_daily_metric_rollup_results(options = {})
unless conditions.nil?
conditions.preprocess_options = {:vim_performance_daily_adhoc => (time_profile && time_profile.rollup_daily_metrics)}
exp_sql, exp_includes = conditions.to_sql
# only_cols += conditions.columns_for_sql # Add cols references in expression to ensure they are present for evaluation
end
time_range = Metric::Helper.time_range_from_offset(interval, db_options[:start_offset], db_options[:end_offset], tz)
# TODO: add .select(only_cols)
db_includes = get_include_for_find
results = Metric::Helper.find_for_interval_name('daily', time_profile || tz, db_klass)
.where(where_clause).where(exp_sql)
.where(options[:where_clause])
.where(:timestamp => time_range)
.includes(db_includes)
.references(db_includes)
.includes(exp_includes || [])
.limit(options[:limit])
results = Rbac.filtered(results, :class => db,
:filter => conditions,
:userid => options[:userid],
:miq_group_id => options[:miq_group_id])
Metric::Helper.remove_duplicate_timestamps(results)
end
# Ad-hoc performance reports
def generate_interval_metric_results(options = {})
time_range = Metric::Helper.time_range_from_offset(interval, db_options[:start_offset], db_options[:end_offset])
# Only build where clause from expression for hourly report. It will not work properly for daily because many values are rolled up from hourly.
exp_sql, exp_includes = conditions.to_sql(tz) unless conditions.nil? || db_klass.respond_to?(:instances_are_derived?)
results = db_klass.with_interval_and_time_range(interval, time_range)
.where(where_clause)
.where(options[:where_clause])
.where(exp_sql)
.includes(get_include_for_find)
.includes(exp_includes || [])
.limit(options[:limit])
results = Rbac.filtered(results, :class => db,
:filter => conditions,
:userid => options[:userid],
:miq_group_id => options[:miq_group_id])
Metric::Helper.remove_duplicate_timestamps(results)
end
# Basic report
# Daily and Hourly for: C&U main reports go through here too
def generate_basic_results(options = {})
# TODO: need to enhance only_cols to better support virtual columns
# only_cols += conditions.columns_for_sql if conditions # Add cols references in expression to ensure they are present for evaluation
# NOTE: using search to get user property "managed", otherwise this is overkill
targets = db_class
targets = db_class.find_entries(ext_options) if targets.respond_to?(:find_entries)
# TODO: add once only_cols is fixed
# targets = targets.select(only_cols)
where_clause = MiqExpression.merge_where_clauses(self.where_clause, options[:where_clause])
# Remove custom_attributes as part of the `includes` if all of them exist
# in the select statement
if all_custom_attributes_are_virtual_sql_attributes?
remove_loading_relations_for_virtual_custom_attributes
end
rbac_opts = options.merge(
:targets => targets,
:filter => conditions,
:include_for_find => get_include_for_find,
:where_clause => where_clause,
:skip_counts => true
)
## add in virtual attributes that can be calculated from sql
rbac_opts[:extra_cols] = va_sql_cols unless va_sql_cols.blank?
rbac_opts[:use_sql_view] = if db_options.nil? || db_options[:use_sql_view].nil?
MiqReport.default_use_sql_view
else
db_options[:use_sql_view]
end
results, attrs = Rbac.search(rbac_opts)
results = Metric::Helper.remove_duplicate_timestamps(results)
results = BottleneckEvent.remove_duplicate_find_results(results) if db == "BottleneckEvent"
@user_categories = attrs[:user_filters]["managed"]
results
end
def build_create_results(options, taskid = nil)
ts = Time.now.utc
attrs = {
:name => title,
:userid => options[:userid],
:report_source => options[:report_source],
:db => db,
:last_run_on => ts,
:last_accessed_on => ts,
:miq_report_id => id,
:miq_group_id => options[:miq_group_id]
}
_log.info("Creating report results with hash: [#{attrs.inspect}]")
res = MiqReportResult.find_by_miq_task_id(taskid) unless taskid.nil?
res ||= MiqReportResult.find_by_userid(options[:userid]) if options[:userid].include?("|") # replace results if adhoc (<userid>|<session_id|<mode>) user report
res ||= MiqReportResult.new
res.attributes = attrs
res.report_results = self
curr_tz = Time.zone # Save current time zone setting
userid = options[:userid].split("|").first if options[:userid]
user = User.find_by_userid(userid) if userid
# TODO: user is nil from MiqWidget#generate_report_result due to passing the username as the second part of :userid, such as widget_id_735|admin...
# Looks like widget generation for a user doesn't expect multiple timezones, could be an issue with MiqGroups.
timezone = options[:timezone]
timezone ||= user.respond_to?(:get_timezone) ? user.get_timezone : User.server_timezone
Time.zone = timezone
html_rows = build_html_rows
Time.zone = curr_tz # Restore current time zone setting
res.report_html = html_rows
self.extras ||= {}
self.extras[:total_html_rows] = html_rows.length
append_user_filters_to_title(user)
report = dup
report.table = nil
res.report = report
res.save
_log.info("Finished creating report result with id [#{res.id}] for report id: [#{id}], name: [#{name}]")
res
end
def build_table(data, db, options = {})
data = data.to_a
objs = data[0] && data[0].kind_of?(Integer) ? db_klass.where(:id => data) : data.compact
remove_loading_relations_for_virtual_custom_attributes
# Add resource columns to performance reports cols and col_order arrays for widget click thru support
if db_klass.to_s.ends_with?("Performance")
res_cols = ['resource_name', 'resource_type', 'resource_id']
self.cols = (cols + res_cols).uniq
orig_col_order = col_order.dup
self.col_order = (col_order + res_cols).uniq
end
only_cols = options[:only] || cols_for_report(['id'])
self.col_order = cols_for_report if col_order.blank?
build_trend_data(objs)
build_trend_limits(objs)
# Add missing timestamps after trend calculation to prevent timestamp adjustment for added timestamps.
objs = build_add_missing_timestamps(objs)
data = build_includes(objs)
inc = include.presence || invent_report_includes
result = data.collect do |entry|
build_reportable_data(entry, {:only => only_cols, "include" => inc}, nil)
end.flatten
if rpt_options && rpt_options[:pivot]
result = build_pivot(result)
column_names = col_order
else
column_names = only_cols
end
result = build_apply_display_filter(result) unless display_filter.nil?
@table = Ruport::Data::Table.new(:data => result, :column_names => column_names)
@table.reorder(column_names) unless @table.data.empty?
# Remove any resource columns that were added earlier to col_order so they won't appear in the report
col_order.delete_if { |c| res_cols.include?(c) && !orig_col_order.include?(c) } if res_cols
build_sort_table unless options[:no_sort]
if options[:limit]
options[:offset] ||= 0
self.extras[:target_ids_for_paging] = @table.data.collect { |d| d["id"] } # Save ids of targets, since we have then all, to avoid going back to SQL for the next page
@table = @table.sub_table(@table.column_names, options[:offset]..options[:offset] + options[:limit] - 1)
end
build_subtotals
end
def build_table_from_report(options = {})
unless db_options && db_options[:report]
raise _("No %{class_name} object provided") % {:class_name => self.class.name}
end
unless db_options[:report].kind_of?(self.class)
raise _("DB option :report must be a %{class_name} object") % {:class_name => self.class.name}
end
result = generate_rows_from_data(get_data_from_report(db_options[:report]))
self.cols ||= []
only_cols = options[:only] || cols_for_report(generate_cols)
column_names = result.empty? ? self.cols : result.first.keys
@table = Ruport::Data::Table.new(:data => result, :column_names => column_names)
@table.reorder(only_cols) unless @table.data.empty?
build_sort_table
end
def get_data_from_report(rpt)
raise _("Report table is nil") if rpt.table.nil?
if db_options[:row_col] && db_options[:row_val]
rpt.table.find_all { |d| d.data.key?(db_options[:row_col]) && (d.data[db_options[:row_col]] == db_options[:row_val]) }.collect(&:data)
else
rpt.table.collect(&:data)
end
end
def generate_rows_from_data(data)
data.inject([]) do |arr, d|
generate_rows.each do |gen_row|
row = {}
gen_row.each_with_index do |col_def, col_idx|
new_col_name = generate_cols[col_idx]
row[new_col_name] = generate_col_from_data(col_def, d)
end
arr << row
end
arr
end
end
def generate_col_from_data(col_def, data)
if col_def.kind_of?(Hash)
unless data.key?(col_def[:col_name])
raise _("Column '%{name} does not exist in data") % {:name => col_def[:col_name]}
end
return col_def.key?(:function) ? apply_col_function(col_def, data) : data[col_def[:col_name]]
else
return col_def
end
end
def apply_col_function(col_def, data)
case col_def[:function]
when 'percent_of_col'
unless data.key?(col_def[:col_name])
raise _("Column '%{name} does not exist in data") % {:name => gen_row[:col_name]}
end
unless data.key?(col_def[:pct_col_name])
raise _("Column '%{name} does not exist in data") % {:name => gen_row[:pct_col_name]}
end
col_val = data[col_def[:col_name]] || 0
pct_val = data[col_def[:pct_col_name]] || 0
return pct_val == 0 ? 0 : (col_val / pct_val * 100.0)
else
raise _("Column function '%{name}' not supported") % {:name => col_def[:function]}
end
end
def build_correlate_tag_cols
tags2desc = {}
arr = self.cols.inject([]) do |a, c|
self.extras[:group_by_tag_cols].each do |tc|
tag = tc[(c.length + 1)..-1]
if tc.starts_with?(c)
unless tags2desc.key?(tag)
if tag == "_none_"
tags2desc[tag] = "[None]"
else
entry = Classification.find_by_name([performance[:group_by_category], tag].join("/"))
tags2desc[tag] = entry.nil? ? tag.titleize : entry.description
end
end
a << [tc, tags2desc[tag]]
end
end
a
end
arr.sort! { |a, b| a[1] <=> b[1] }
while arr.first[1] == "[None]"
arr.push(arr.shift)
end unless arr.blank? || (arr.first[1] == "[None]" && arr.last[1] == "[None]")
arr.each do |c, h|
self.cols.push(c)
col_order.push(c)
headers.push(h)
end
tarr = Array(tags2desc).sort_by { |t| t[1] }
while tarr.first[1] == "[None]"
tarr.push(tarr.shift)
end unless tarr.blank? || (tarr.first[1] == "[None]" && tarr.last[1] == "[None]")
self.extras[:group_by_tags] = tarr.collect { |a| a[0] }
self.extras[:group_by_tag_descriptions] = tarr.collect { |a| a[1] }
end
def build_add_missing_timestamps(recs)
return recs unless !recs.empty? && (recs.first.kind_of?(Metric) || recs.first.kind_of?(MetricRollup))
return recs if db_options && db_options[:calc_avgs_by] && db_options[:calc_avgs_by] != "time_interval" # Only fill in missing timestamps if averages are requested to be based on time
base_cols = Metric::BASE_COLS - ["id"]
int = recs.first.capture_interval_name == 'daily' ? 1.day.to_i : 1.hour.to_i
klass = recs.first.class
last_rec = nil
results = recs.sort_by { |r| [r.resource_type, r.resource_id.to_s, r.timestamp.iso8601] }.inject([]) do |arr, rec|
last_rec ||= rec
while (rec.timestamp - last_rec.timestamp) > int
base_attrs = last_rec.attributes.reject { |k, _v| !base_cols.include?(k) }
last_rec = klass.new(base_attrs.merge(:timestamp => (last_rec.timestamp + int)))
last_rec.inside_time_profile = false if last_rec.respond_to?(:inside_time_profile)
arr << last_rec
end
arr << rec
last_rec = rec
arr
end
results
end
def build_apply_time_profile(results)
return unless time_profile
# Apply time profile if one was provided
results.each { |rec| rec.apply_time_profile(time_profile) if rec.respond_to?(:apply_time_profile) }
end
def build_apply_display_filter(results)
return results if display_filter.nil?
if display_filter.kind_of?(MiqExpression)
display_filter.context_type = "hash" # Tell MiqExpression that the context objects are hashes
results.find_all { |h| display_filter.evaluate(h) }
elsif display_filter.kind_of?(Proc)
results.select(&display_filter)
elsif display_filter.kind_of?(Hash)
op = display_filter[:operator]
fld = display_filter[:field].to_s
val = display_filter[:value]
results.select do |r|
case op
when "=" then (r[fld] == val)
when "!=" then (r[fld] != val)
when "<" then (r[fld] < val)
when "<=" then (r[fld] <= val)
when ">" then (r[fld] > val)
when ">=" then (r[fld] >= val)
else
false
end
end
end
end
def get_group_val(row, keys)
keys.inject([]) { |a, k| a << row[k] }.join("__")
end
def process_group_break(gid, group, totals, result)
result[gid] = group
totals[:count] += group[:count]
process_totals(group)
end
def build_pivot(data)
return data unless rpt_options && rpt_options.key?(:pivot)
return data if data.blank?
# Build a tempory table so that ruport sorting can be used to sort data before summarizing pivot data
column_names = (data.first.keys.collect(&:to_s) + col_order).uniq
data = Ruport::Data::Table.new(:data => data, :column_names => column_names)
data = sort_table(data, rpt_options[:pivot][:group_cols].collect(&:to_s), :order => :ascending)
# build grouping options for subtotal
options = col_order.inject({}) do |h, col|
next(h) unless col.include?("__")
c, g = col.split("__")
h[c] ||= {}
h[c][:grouping] ||= []
h[c][:grouping] << g.to_sym
h
end
group_key = rpt_options[:pivot][:group_cols]
data = generate_subtotals(data, group_key, options)
data.inject([]) do |a, (k, v)|
next(a) if k == :_total_
row = col_order.inject({}) do |h, col|
if col.include?("__")
c, g = col.split("__")
h[col] = v[g.to_sym][c]
else
h[col] = v[:row][col]
end
h
end
a << row
end
end
# the columns that are needed for this report.
# there may be some columns that are used to derive columns,
# so we currently include '*'
def cols_for_report(extra_cols = [])
((cols || []) + (col_order || []) + (extra_cols || []) + build_cols_from_include(include)).uniq
end
def build_cols_from_include(hash, parent_association = nil)
return [] if hash.blank?
hash.inject([]) do |a, (k, v)|
full_path = get_full_path(parent_association, k)
v["columns"].each { |c| a << get_full_path(full_path, c) } if v.key?("columns")
a += (build_cols_from_include(v["include"], full_path) || []) if v.key?("include")
a
end
end
def build_includes(objs)
results = []
inc = include.presence || invent_report_includes
objs.each do |obj|
entry = {:obj => obj}
build_search_includes(obj, entry, inc) if inc
results.push(entry)
end
results
end
def build_search_includes(obj, entry, includes)
includes.each_key do |assoc|
next unless obj.respond_to?(assoc)
assoc_objects = [obj.send(assoc)].flatten.compact
entry[assoc.to_sym] = assoc_objects.collect do |rec|
new_entry = {:obj => rec}
build_search_includes(rec, new_entry, includes[assoc]["include"]) if includes[assoc]["include"]
new_entry
end
end
end
# simplify to use col_sort_order. "include" won't be necessary)
def build_reportable_data(entry, options, parent_association)
rec = entry[:obj]
data_records = [build_get_attributes_with_options(rec, options)]
data_records = build_add_includes(data_records, entry, options["include"], parent_association) if options["include"]
data_records
end
def build_get_attributes_with_options(rec, options = {})
only_or_except =
if options[:only] || options[:except]
{:only => options[:only], :except => options[:except]}
end
return {} unless only_or_except
attrs = {}
options[:only].each do |a|
if self.class.is_trend_column?(a)
attrs[a] = build_calculate_trend_point(rec, a)
else
attrs[a] = rec.send(a) if rec.respond_to?(a)
end
end
attrs = attrs.inject({}) do |h, (k, v)|
h["#{options[:qualify_attribute_names]}.#{k}"] = v
h
end if options[:qualify_attribute_names]
attrs
end
def build_add_includes(data_records, entry, includes, parent_association)
include_has_options = includes.kind_of?(Hash)
associations = include_has_options ? includes.keys : Array(includes)
associations.each do |association|
existing_records = data_records.dup
data_records = []
full_path = get_full_path(parent_association, association)
if include_has_options
assoc_options = includes[association].merge(:qualify_attribute_names => full_path,
:only => includes[association]["columns"])
else
assoc_options = {:qualify_attribute_names => full_path, :only => includes[association]["columns"]}
end
if association == "categories" || association == "managed"
association_objects = []
assochash = {}
@descriptions_by_tag_id ||= Classification.is_entry.each_with_object({}) do |c, h|
h[c.tag_id] = c.description
end
assoc_options[:only].each do |c|
entarr = []
entry[:obj].tags.each do |t|
next unless t.name.starts_with?("/managed/#{c}/")
next unless @descriptions_by_tag_id.key?(t.id)
entarr << @descriptions_by_tag_id[t.id]
end
assochash[full_path + "." + c] = entarr unless entarr.empty?
end
# join the the category data together
longest = 0
idx = 0
assochash.each_key { |k| longest = assochash[k].length if assochash[k].length > longest }
longest.times do
nh = {}
assochash.each_key { |k| nh[k] = assochash[k][idx].nil? ? assochash[k].last : assochash[k][idx] }
association_objects.push(nh)
idx += 1
end
else
association_objects = entry[association.to_sym]
end
existing_records.each do |existing_record|
if association_objects.empty?
data_records << existing_record
else
association_objects.each do |obj|
if association == "categories" || association == "managed"
association_records = [obj]
else
association_records = build_reportable_data(obj, assoc_options, full_path)
end
association_records.each do |assoc_record|
data_records << existing_record.merge(assoc_record)
end
end
end
end
end
data_records
end
def queue_report_result(options, res_opts)
options[:userid] ||= "system"
_log.info("Adding generate report task to the message queue...")
task = MiqTask.create(:name => "Generate Report: '#{name}'", :userid => options[:userid])
MiqQueue.submit_job(
:service => "reporting",
:class_name => self.class.name,
:instance_id => id,
:method_name => "build_report_result",
:msg_timeout => queue_timeout,
:args => [task.id, options, res_opts]
)
AuditEvent.success(:event => "generate_table", :target_class => self.class.base_class.name, :target_id => id, :userid => options[:userid], :message => "#{task.name}, successfully initiated")
task.update_status("Queued", "Ok", "Task has been queued")
_log.info("Finished adding generate report task with id [#{task.id}] to the message queue")
task.id
end
def build_report_result(taskid, options, res_opts = {})
task = MiqTask.find(taskid)
# Generate the table only if the task does not already contain a MiqReport object
if task.task_results.blank?
_log.info("Generating report table with taskid [#{taskid}] and options [#{options.inspect}]")
_async_generate_table(taskid, options.merge(:mode => "schedule", :report_source => res_opts[:source]))
# Reload the task after the _async_generate_table has updated it
task.reload
if !task.results_ready?
_log.warn("Generating report table with taskid [#{taskid}]... Failed to complete, '#{task.message}'")
return
else
_log.info("Generating report table with taskid [#{taskid}]... Complete")
end
end
res_last_run_on = Time.now.utc
# If a scheduler :at time was provided, convert that to a Time object, otherwise use the current time
if res_opts[:at]
unless res_opts[:at].kind_of?(Numeric)
raise _("Expected scheduled time 'at' to be 'numeric', received '%{type}'") % {:type => res_opts[:at].class}
end
at = Time.at(res_opts[:at]).utc
else
at = res_last_run_on
end
res = task.miq_report_result
nh = {:miq_task_id => taskid, :scheduled_on => at}
_log.info("Updating report results with hash: [#{nh.inspect}]")
res.update_attributes(nh)
_log.info("Finished creating report result with id [#{res.id}] for report id: [#{id}], name: [#{name}]")
notify_user_of_report(res_last_run_on, res, options) if options[:send_email]
# Remove the table in the task_results since we now have it in the report_results
task.task_results = nil
task.save
res
end
def table_has_records?
!table.empty?
end
def queue_timeout
((rpt_options || {})[:queue_timeout] || self.class.default_queue_timeout).to_i_with_method
end
def queue_timeout=(value)
self.rpt_options ||= {}
self.rpt_options[:queue_timeout] = value
end
#####################################################
def append_to_title!(title_suffix)
self.title += title_suffix
end
def append_user_filters_to_title(user)
return unless user && user.has_filters?
self.append_to_title!(" (filtered for #{user.name})")
end
def get_time_zone(default_tz = nil)
time_profile ? time_profile.tz || tz || default_tz : tz || default_tz
end
private
def get_full_path(parent, child)
if parent
"#{parent}.#{child}"
else
child.to_s
end
end
# Preps the current instance and db class for building a report
def _generate_table_prep
# Make sure the db_klass has the custom_attribute definitions defined for
# the report being built.
load_custom_attributes
# Default time zone in profile to report time zone
time_profile.tz ||= tz if time_profile
self.ext_options = {:tz => tz, :time_profile => time_profile}
# TODO: these columns need to be converted to real SQL columns
# only_cols = cols
self.extras ||= {}
end
def interval
@interval ||= db_options.present? && db_options[:interval]
end
def custom_results_method
@custom_results_method ||= db_options && db_options[:rpt_type] ? "build_results_for_report_#{db_options[:rpt_type]}" : nil
end
end
Simplify expression in MiqReport#get_time_zone
module MiqReport::Generator
extend ActiveSupport::Concern
include_concern 'Aggregation'
include_concern 'Async'
include_concern 'Html'
include_concern 'Sorting'
include_concern 'Trend'
include_concern 'Utilization'
DATE_TIME_BREAK_SUFFIXES = [
[_("Hour"), "hour"],
[_("Day"), "day"],
[_("Week"), "week"],
[_("Month"), "month"],
[_("Quarter"), "quarter"],
[_("Year"), "year"],
[_("Hour of the Day"), "hour_of_day"],
[_("Day of the Week"), "day_of_week"],
[_("Day of the Month"), "day_of_month"],
[_("Week of the Year"), "week_of_year"],
[_("Month of the Year"), "month_of_year"]
].freeze
module ClassMethods
def date_time_break_suffixes
DATE_TIME_BREAK_SUFFIXES
end
def get_col_break_suffixes(col)
col_type = MiqExpression.parse_field_or_tag(col).try(:column_type)
case col_type
when :date
date_time_break_suffixes.select { |_name, suffix| !suffix.to_s.starts_with?("hour") }
when :datetime
date_time_break_suffixes
else
[]
end
end
def all_break_suffixes
date_time_break_suffixes.collect(&:last)
end
def is_break_suffix?(suffix)
all_break_suffixes.include?(suffix)
end
def default_queue_timeout
::Settings.reporting.queue_timeout.to_i_with_method
end
end
def col_to_expression_col(col)
parts = col.split(".")
if parts.length == 1
table = db
else
table, col = parts[-2..-1]
end
"#{table2class(table)}-#{col}"
end
def table2class(table)
@table2class ||= {}
@table2class[table] ||= begin
case table.to_sym
when :ports, :nics, :storage_adapters
"GuestDevice"
when :"<compare>"
self.class.name
else
ref = db_class.reflection_with_virtual(table.to_sym)
ref ? ref.class_name : table.singularize.camelize
end
end
@table2class[table]
end
def get_include_for_find
include_as_hash(include.presence || invent_report_includes).deep_merge(include_for_find || {}).presence
end
def invent_includes
include_as_hash(invent_report_includes)
end
# would like this format to go away
# will go away when we drop build_reportable_data
def invent_report_includes
return {} unless col_order
col_order.each_with_object({}) do |col, ret|
next unless col.include?(".")
*rels, column = col.split(".")
if col !~ /managed\./ && col !~ /virtual_custom/
(rels.inject(ret) { |h, rel| h[rel] ||= {} }["columns"] ||= []) << column
end
end
end
def include_as_hash(includes = include, klass = db_class, klass_cols = cols)
result = {}
if klass_cols && klass && klass.respond_to?(:virtual_attribute?)
klass_cols.each do |c|
result[c.to_sym] = {} if klass.virtual_attribute?(c) && !klass.attribute_supported_by_sql?(c)
end
end
if includes.kind_of?(Hash)
includes.each do |k, v|
k = k.to_sym
if k == :managed
result[:tags] = {}
else
assoc_reflection = klass.reflect_on_association(k)
assoc_klass = (assoc_reflection.options[:polymorphic] ? k : assoc_reflection.klass) if assoc_reflection
result[k] = include_as_hash(v && v["include"], assoc_klass, v && v["columns"])
end
end
elsif includes.kind_of?(Array)
includes.each { |i| result[i.to_sym] = {} }
end
result
end
def queue_generate_table(options = {})
options[:userid] ||= "system"
options[:mode] ||= "async"
options[:report_source] ||= "Requested by user"
sync = options.delete(:report_sync) || ::Settings.product.report_sync
task = MiqTask.create(:name => "Generate Report: '#{name}'", :userid => options[:userid])
report_result = MiqReportResult.create(
:name => title,
:userid => options[:userid],
:report_source => options[:report_source],
:db => db,
:miq_report_id => id,
:miq_task_id => task.id
)
AuditEvent.success(
:event => "generate_table",
:target_class => self.class.base_class.name,
:target_id => id,
:userid => options[:userid],
:message => "#{task.name}, successfully initiated"
)
task.update_status("Queued", "Ok", "Task has been queued")
if sync
_async_generate_table(task.id, options)
else
MiqQueue.submit_job(
:service => "reporting",
:class_name => self.class.name,
:instance_id => id,
:method_name => "_async_generate_table",
:args => [task.id, options],
:msg_timeout => queue_timeout
)
end
report_result.id
end
def generate_table(options = {})
if options[:user]
User.with_user(options[:user]) { _generate_table(options) }
elsif options[:userid]
userid = MiqReportResult.parse_userid(options[:userid])
user = User.find_by_userid(userid)
User.with_user(user, userid) { _generate_table(options) }
else
_generate_table(options)
end
end
def _generate_table(options = {})
return build_table_from_report(options) if db == self.class.name # Build table based on data from passed in report object
_generate_table_prep
results = if custom_results_method
generate_custom_method_results(options)
elsif performance
generate_performance_results(options)
elsif interval == 'daily' && db_klass <= MetricRollup
generate_daily_metric_rollup_results(options)
elsif interval
generate_interval_metric_results(options)
else
generate_basic_results(options)
end
if db_options && db_options[:long_term_averages] && results.first.kind_of?(MetricRollup)
# Calculate long_term_averages and save in extras
extras[:long_term_averages] = Metric::LongTermAverages.get_averages_over_time_period(results.first.resource, db_options[:long_term_averages].merge(:ext_options => ext_options))
end
build_apply_time_profile(results)
build_table(results, db, options)
end
def generate_custom_method_results(options = {})
if db_klass.respond_to?(custom_results_method)
# Use custom method in DB class to get report results if defined
results, ext = db_klass.send(custom_results_method, db_options[:options].merge(:userid => options[:userid],
:ext_options => ext_options,
:report_cols => cols))
elsif respond_to?(custom_results_method)
# Use custom method in MiqReport class to get report results if defined
results, ext = send(custom_results_method, options)
else
raise _("Unsupported report type '%{type}'") % {:type => db_options[:rpt_type]}
end
# TODO: results = results.select(only_cols)
extras.merge!(ext) if ext && ext.kind_of?(Hash)
results
end
# Original C&U charts breakdown by tags
def generate_performance_results(options = {})
if performance[:group_by_category] && performance[:interval_name]
results, extras[:interval] = db_class.vms_by_category(performance)
else
results, extras[:group_by_tag_cols], extras[:group_by_tags] = db_class.group_by_tags(
db_class.find_entries(ext_options).where(where_clause).where(options[:where_clause]),
:category => performance[:group_by_category],
:cat_model => options[:cat_model],
)
build_correlate_tag_cols
end
results
end
# Ad-hoc daily performance reports
# Daily for: Performance - Clusters...
def generate_daily_metric_rollup_results(options = {})
unless conditions.nil?
conditions.preprocess_options = {:vim_performance_daily_adhoc => (time_profile && time_profile.rollup_daily_metrics)}
exp_sql, exp_includes = conditions.to_sql
# only_cols += conditions.columns_for_sql # Add cols references in expression to ensure they are present for evaluation
end
time_range = Metric::Helper.time_range_from_offset(interval, db_options[:start_offset], db_options[:end_offset], tz)
# TODO: add .select(only_cols)
db_includes = get_include_for_find
results = Metric::Helper.find_for_interval_name('daily', time_profile || tz, db_klass)
.where(where_clause).where(exp_sql)
.where(options[:where_clause])
.where(:timestamp => time_range)
.includes(db_includes)
.references(db_includes)
.includes(exp_includes || [])
.limit(options[:limit])
results = Rbac.filtered(results, :class => db,
:filter => conditions,
:userid => options[:userid],
:miq_group_id => options[:miq_group_id])
Metric::Helper.remove_duplicate_timestamps(results)
end
# Ad-hoc performance reports
def generate_interval_metric_results(options = {})
time_range = Metric::Helper.time_range_from_offset(interval, db_options[:start_offset], db_options[:end_offset])
# Only build where clause from expression for hourly report. It will not work properly for daily because many values are rolled up from hourly.
exp_sql, exp_includes = conditions.to_sql(tz) unless conditions.nil? || db_klass.respond_to?(:instances_are_derived?)
results = db_klass.with_interval_and_time_range(interval, time_range)
.where(where_clause)
.where(options[:where_clause])
.where(exp_sql)
.includes(get_include_for_find)
.includes(exp_includes || [])
.limit(options[:limit])
results = Rbac.filtered(results, :class => db,
:filter => conditions,
:userid => options[:userid],
:miq_group_id => options[:miq_group_id])
Metric::Helper.remove_duplicate_timestamps(results)
end
# Basic report
# Daily and Hourly for: C&U main reports go through here too
def generate_basic_results(options = {})
# TODO: need to enhance only_cols to better support virtual columns
# only_cols += conditions.columns_for_sql if conditions # Add cols references in expression to ensure they are present for evaluation
# NOTE: using search to get user property "managed", otherwise this is overkill
targets = db_class
targets = db_class.find_entries(ext_options) if targets.respond_to?(:find_entries)
# TODO: add once only_cols is fixed
# targets = targets.select(only_cols)
where_clause = MiqExpression.merge_where_clauses(self.where_clause, options[:where_clause])
# Remove custom_attributes as part of the `includes` if all of them exist
# in the select statement
if all_custom_attributes_are_virtual_sql_attributes?
remove_loading_relations_for_virtual_custom_attributes
end
rbac_opts = options.merge(
:targets => targets,
:filter => conditions,
:include_for_find => get_include_for_find,
:where_clause => where_clause,
:skip_counts => true
)
## add in virtual attributes that can be calculated from sql
rbac_opts[:extra_cols] = va_sql_cols unless va_sql_cols.blank?
rbac_opts[:use_sql_view] = if db_options.nil? || db_options[:use_sql_view].nil?
MiqReport.default_use_sql_view
else
db_options[:use_sql_view]
end
results, attrs = Rbac.search(rbac_opts)
results = Metric::Helper.remove_duplicate_timestamps(results)
results = BottleneckEvent.remove_duplicate_find_results(results) if db == "BottleneckEvent"
@user_categories = attrs[:user_filters]["managed"]
results
end
def build_create_results(options, taskid = nil)
ts = Time.now.utc
attrs = {
:name => title,
:userid => options[:userid],
:report_source => options[:report_source],
:db => db,
:last_run_on => ts,
:last_accessed_on => ts,
:miq_report_id => id,
:miq_group_id => options[:miq_group_id]
}
_log.info("Creating report results with hash: [#{attrs.inspect}]")
res = MiqReportResult.find_by_miq_task_id(taskid) unless taskid.nil?
res ||= MiqReportResult.find_by_userid(options[:userid]) if options[:userid].include?("|") # replace results if adhoc (<userid>|<session_id|<mode>) user report
res ||= MiqReportResult.new
res.attributes = attrs
res.report_results = self
curr_tz = Time.zone # Save current time zone setting
userid = options[:userid].split("|").first if options[:userid]
user = User.find_by_userid(userid) if userid
# TODO: user is nil from MiqWidget#generate_report_result due to passing the username as the second part of :userid, such as widget_id_735|admin...
# Looks like widget generation for a user doesn't expect multiple timezones, could be an issue with MiqGroups.
timezone = options[:timezone]
timezone ||= user.respond_to?(:get_timezone) ? user.get_timezone : User.server_timezone
Time.zone = timezone
html_rows = build_html_rows
Time.zone = curr_tz # Restore current time zone setting
res.report_html = html_rows
self.extras ||= {}
self.extras[:total_html_rows] = html_rows.length
append_user_filters_to_title(user)
report = dup
report.table = nil
res.report = report
res.save
_log.info("Finished creating report result with id [#{res.id}] for report id: [#{id}], name: [#{name}]")
res
end
def build_table(data, db, options = {})
data = data.to_a
objs = data[0] && data[0].kind_of?(Integer) ? db_klass.where(:id => data) : data.compact
remove_loading_relations_for_virtual_custom_attributes
# Add resource columns to performance reports cols and col_order arrays for widget click thru support
if db_klass.to_s.ends_with?("Performance")
res_cols = ['resource_name', 'resource_type', 'resource_id']
self.cols = (cols + res_cols).uniq
orig_col_order = col_order.dup
self.col_order = (col_order + res_cols).uniq
end
only_cols = options[:only] || cols_for_report(['id'])
self.col_order = cols_for_report if col_order.blank?
build_trend_data(objs)
build_trend_limits(objs)
# Add missing timestamps after trend calculation to prevent timestamp adjustment for added timestamps.
objs = build_add_missing_timestamps(objs)
data = build_includes(objs)
inc = include.presence || invent_report_includes
result = data.collect do |entry|
build_reportable_data(entry, {:only => only_cols, "include" => inc}, nil)
end.flatten
if rpt_options && rpt_options[:pivot]
result = build_pivot(result)
column_names = col_order
else
column_names = only_cols
end
result = build_apply_display_filter(result) unless display_filter.nil?
@table = Ruport::Data::Table.new(:data => result, :column_names => column_names)
@table.reorder(column_names) unless @table.data.empty?
# Remove any resource columns that were added earlier to col_order so they won't appear in the report
col_order.delete_if { |c| res_cols.include?(c) && !orig_col_order.include?(c) } if res_cols
build_sort_table unless options[:no_sort]
if options[:limit]
options[:offset] ||= 0
self.extras[:target_ids_for_paging] = @table.data.collect { |d| d["id"] } # Save ids of targets, since we have then all, to avoid going back to SQL for the next page
@table = @table.sub_table(@table.column_names, options[:offset]..options[:offset] + options[:limit] - 1)
end
build_subtotals
end
def build_table_from_report(options = {})
unless db_options && db_options[:report]
raise _("No %{class_name} object provided") % {:class_name => self.class.name}
end
unless db_options[:report].kind_of?(self.class)
raise _("DB option :report must be a %{class_name} object") % {:class_name => self.class.name}
end
result = generate_rows_from_data(get_data_from_report(db_options[:report]))
self.cols ||= []
only_cols = options[:only] || cols_for_report(generate_cols)
column_names = result.empty? ? self.cols : result.first.keys
@table = Ruport::Data::Table.new(:data => result, :column_names => column_names)
@table.reorder(only_cols) unless @table.data.empty?
build_sort_table
end
def get_data_from_report(rpt)
raise _("Report table is nil") if rpt.table.nil?
if db_options[:row_col] && db_options[:row_val]
rpt.table.find_all { |d| d.data.key?(db_options[:row_col]) && (d.data[db_options[:row_col]] == db_options[:row_val]) }.collect(&:data)
else
rpt.table.collect(&:data)
end
end
def generate_rows_from_data(data)
data.inject([]) do |arr, d|
generate_rows.each do |gen_row|
row = {}
gen_row.each_with_index do |col_def, col_idx|
new_col_name = generate_cols[col_idx]
row[new_col_name] = generate_col_from_data(col_def, d)
end
arr << row
end
arr
end
end
def generate_col_from_data(col_def, data)
if col_def.kind_of?(Hash)
unless data.key?(col_def[:col_name])
raise _("Column '%{name} does not exist in data") % {:name => col_def[:col_name]}
end
return col_def.key?(:function) ? apply_col_function(col_def, data) : data[col_def[:col_name]]
else
return col_def
end
end
def apply_col_function(col_def, data)
case col_def[:function]
when 'percent_of_col'
unless data.key?(col_def[:col_name])
raise _("Column '%{name} does not exist in data") % {:name => gen_row[:col_name]}
end
unless data.key?(col_def[:pct_col_name])
raise _("Column '%{name} does not exist in data") % {:name => gen_row[:pct_col_name]}
end
col_val = data[col_def[:col_name]] || 0
pct_val = data[col_def[:pct_col_name]] || 0
return pct_val == 0 ? 0 : (col_val / pct_val * 100.0)
else
raise _("Column function '%{name}' not supported") % {:name => col_def[:function]}
end
end
def build_correlate_tag_cols
tags2desc = {}
arr = self.cols.inject([]) do |a, c|
self.extras[:group_by_tag_cols].each do |tc|
tag = tc[(c.length + 1)..-1]
if tc.starts_with?(c)
unless tags2desc.key?(tag)
if tag == "_none_"
tags2desc[tag] = "[None]"
else
entry = Classification.find_by_name([performance[:group_by_category], tag].join("/"))
tags2desc[tag] = entry.nil? ? tag.titleize : entry.description
end
end
a << [tc, tags2desc[tag]]
end
end
a
end
arr.sort! { |a, b| a[1] <=> b[1] }
while arr.first[1] == "[None]"
arr.push(arr.shift)
end unless arr.blank? || (arr.first[1] == "[None]" && arr.last[1] == "[None]")
arr.each do |c, h|
self.cols.push(c)
col_order.push(c)
headers.push(h)
end
tarr = Array(tags2desc).sort_by { |t| t[1] }
while tarr.first[1] == "[None]"
tarr.push(tarr.shift)
end unless tarr.blank? || (tarr.first[1] == "[None]" && tarr.last[1] == "[None]")
self.extras[:group_by_tags] = tarr.collect { |a| a[0] }
self.extras[:group_by_tag_descriptions] = tarr.collect { |a| a[1] }
end
def build_add_missing_timestamps(recs)
return recs unless !recs.empty? && (recs.first.kind_of?(Metric) || recs.first.kind_of?(MetricRollup))
return recs if db_options && db_options[:calc_avgs_by] && db_options[:calc_avgs_by] != "time_interval" # Only fill in missing timestamps if averages are requested to be based on time
base_cols = Metric::BASE_COLS - ["id"]
int = recs.first.capture_interval_name == 'daily' ? 1.day.to_i : 1.hour.to_i
klass = recs.first.class
last_rec = nil
results = recs.sort_by { |r| [r.resource_type, r.resource_id.to_s, r.timestamp.iso8601] }.inject([]) do |arr, rec|
last_rec ||= rec
while (rec.timestamp - last_rec.timestamp) > int
base_attrs = last_rec.attributes.reject { |k, _v| !base_cols.include?(k) }
last_rec = klass.new(base_attrs.merge(:timestamp => (last_rec.timestamp + int)))
last_rec.inside_time_profile = false if last_rec.respond_to?(:inside_time_profile)
arr << last_rec
end
arr << rec
last_rec = rec
arr
end
results
end
def build_apply_time_profile(results)
return unless time_profile
# Apply time profile if one was provided
results.each { |rec| rec.apply_time_profile(time_profile) if rec.respond_to?(:apply_time_profile) }
end
def build_apply_display_filter(results)
return results if display_filter.nil?
if display_filter.kind_of?(MiqExpression)
display_filter.context_type = "hash" # Tell MiqExpression that the context objects are hashes
results.find_all { |h| display_filter.evaluate(h) }
elsif display_filter.kind_of?(Proc)
results.select(&display_filter)
elsif display_filter.kind_of?(Hash)
op = display_filter[:operator]
fld = display_filter[:field].to_s
val = display_filter[:value]
results.select do |r|
case op
when "=" then (r[fld] == val)
when "!=" then (r[fld] != val)
when "<" then (r[fld] < val)
when "<=" then (r[fld] <= val)
when ">" then (r[fld] > val)
when ">=" then (r[fld] >= val)
else
false
end
end
end
end
def get_group_val(row, keys)
keys.inject([]) { |a, k| a << row[k] }.join("__")
end
def process_group_break(gid, group, totals, result)
result[gid] = group
totals[:count] += group[:count]
process_totals(group)
end
def build_pivot(data)
return data unless rpt_options && rpt_options.key?(:pivot)
return data if data.blank?
# Build a tempory table so that ruport sorting can be used to sort data before summarizing pivot data
column_names = (data.first.keys.collect(&:to_s) + col_order).uniq
data = Ruport::Data::Table.new(:data => data, :column_names => column_names)
data = sort_table(data, rpt_options[:pivot][:group_cols].collect(&:to_s), :order => :ascending)
# build grouping options for subtotal
options = col_order.inject({}) do |h, col|
next(h) unless col.include?("__")
c, g = col.split("__")
h[c] ||= {}
h[c][:grouping] ||= []
h[c][:grouping] << g.to_sym
h
end
group_key = rpt_options[:pivot][:group_cols]
data = generate_subtotals(data, group_key, options)
data.inject([]) do |a, (k, v)|
next(a) if k == :_total_
row = col_order.inject({}) do |h, col|
if col.include?("__")
c, g = col.split("__")
h[col] = v[g.to_sym][c]
else
h[col] = v[:row][col]
end
h
end
a << row
end
end
# the columns that are needed for this report.
# there may be some columns that are used to derive columns,
# so we currently include '*'
def cols_for_report(extra_cols = [])
((cols || []) + (col_order || []) + (extra_cols || []) + build_cols_from_include(include)).uniq
end
def build_cols_from_include(hash, parent_association = nil)
return [] if hash.blank?
hash.inject([]) do |a, (k, v)|
full_path = get_full_path(parent_association, k)
v["columns"].each { |c| a << get_full_path(full_path, c) } if v.key?("columns")
a += (build_cols_from_include(v["include"], full_path) || []) if v.key?("include")
a
end
end
def build_includes(objs)
results = []
inc = include.presence || invent_report_includes
objs.each do |obj|
entry = {:obj => obj}
build_search_includes(obj, entry, inc) if inc
results.push(entry)
end
results
end
def build_search_includes(obj, entry, includes)
includes.each_key do |assoc|
next unless obj.respond_to?(assoc)
assoc_objects = [obj.send(assoc)].flatten.compact
entry[assoc.to_sym] = assoc_objects.collect do |rec|
new_entry = {:obj => rec}
build_search_includes(rec, new_entry, includes[assoc]["include"]) if includes[assoc]["include"]
new_entry
end
end
end
# simplify to use col_sort_order. "include" won't be necessary)
def build_reportable_data(entry, options, parent_association)
rec = entry[:obj]
data_records = [build_get_attributes_with_options(rec, options)]
data_records = build_add_includes(data_records, entry, options["include"], parent_association) if options["include"]
data_records
end
def build_get_attributes_with_options(rec, options = {})
only_or_except =
if options[:only] || options[:except]
{:only => options[:only], :except => options[:except]}
end
return {} unless only_or_except
attrs = {}
options[:only].each do |a|
if self.class.is_trend_column?(a)
attrs[a] = build_calculate_trend_point(rec, a)
else
attrs[a] = rec.send(a) if rec.respond_to?(a)
end
end
attrs = attrs.inject({}) do |h, (k, v)|
h["#{options[:qualify_attribute_names]}.#{k}"] = v
h
end if options[:qualify_attribute_names]
attrs
end
def build_add_includes(data_records, entry, includes, parent_association)
include_has_options = includes.kind_of?(Hash)
associations = include_has_options ? includes.keys : Array(includes)
associations.each do |association|
existing_records = data_records.dup
data_records = []
full_path = get_full_path(parent_association, association)
if include_has_options
assoc_options = includes[association].merge(:qualify_attribute_names => full_path,
:only => includes[association]["columns"])
else
assoc_options = {:qualify_attribute_names => full_path, :only => includes[association]["columns"]}
end
if association == "categories" || association == "managed"
association_objects = []
assochash = {}
@descriptions_by_tag_id ||= Classification.is_entry.each_with_object({}) do |c, h|
h[c.tag_id] = c.description
end
assoc_options[:only].each do |c|
entarr = []
entry[:obj].tags.each do |t|
next unless t.name.starts_with?("/managed/#{c}/")
next unless @descriptions_by_tag_id.key?(t.id)
entarr << @descriptions_by_tag_id[t.id]
end
assochash[full_path + "." + c] = entarr unless entarr.empty?
end
# join the the category data together
longest = 0
idx = 0
assochash.each_key { |k| longest = assochash[k].length if assochash[k].length > longest }
longest.times do
nh = {}
assochash.each_key { |k| nh[k] = assochash[k][idx].nil? ? assochash[k].last : assochash[k][idx] }
association_objects.push(nh)
idx += 1
end
else
association_objects = entry[association.to_sym]
end
existing_records.each do |existing_record|
if association_objects.empty?
data_records << existing_record
else
association_objects.each do |obj|
if association == "categories" || association == "managed"
association_records = [obj]
else
association_records = build_reportable_data(obj, assoc_options, full_path)
end
association_records.each do |assoc_record|
data_records << existing_record.merge(assoc_record)
end
end
end
end
end
data_records
end
def queue_report_result(options, res_opts)
options[:userid] ||= "system"
_log.info("Adding generate report task to the message queue...")
task = MiqTask.create(:name => "Generate Report: '#{name}'", :userid => options[:userid])
MiqQueue.submit_job(
:service => "reporting",
:class_name => self.class.name,
:instance_id => id,
:method_name => "build_report_result",
:msg_timeout => queue_timeout,
:args => [task.id, options, res_opts]
)
AuditEvent.success(:event => "generate_table", :target_class => self.class.base_class.name, :target_id => id, :userid => options[:userid], :message => "#{task.name}, successfully initiated")
task.update_status("Queued", "Ok", "Task has been queued")
_log.info("Finished adding generate report task with id [#{task.id}] to the message queue")
task.id
end
def build_report_result(taskid, options, res_opts = {})
task = MiqTask.find(taskid)
# Generate the table only if the task does not already contain a MiqReport object
if task.task_results.blank?
_log.info("Generating report table with taskid [#{taskid}] and options [#{options.inspect}]")
_async_generate_table(taskid, options.merge(:mode => "schedule", :report_source => res_opts[:source]))
# Reload the task after the _async_generate_table has updated it
task.reload
if !task.results_ready?
_log.warn("Generating report table with taskid [#{taskid}]... Failed to complete, '#{task.message}'")
return
else
_log.info("Generating report table with taskid [#{taskid}]... Complete")
end
end
res_last_run_on = Time.now.utc
# If a scheduler :at time was provided, convert that to a Time object, otherwise use the current time
if res_opts[:at]
unless res_opts[:at].kind_of?(Numeric)
raise _("Expected scheduled time 'at' to be 'numeric', received '%{type}'") % {:type => res_opts[:at].class}
end
at = Time.at(res_opts[:at]).utc
else
at = res_last_run_on
end
res = task.miq_report_result
nh = {:miq_task_id => taskid, :scheduled_on => at}
_log.info("Updating report results with hash: [#{nh.inspect}]")
res.update_attributes(nh)
_log.info("Finished creating report result with id [#{res.id}] for report id: [#{id}], name: [#{name}]")
notify_user_of_report(res_last_run_on, res, options) if options[:send_email]
# Remove the table in the task_results since we now have it in the report_results
task.task_results = nil
task.save
res
end
def table_has_records?
!table.empty?
end
def queue_timeout
((rpt_options || {})[:queue_timeout] || self.class.default_queue_timeout).to_i_with_method
end
def queue_timeout=(value)
self.rpt_options ||= {}
self.rpt_options[:queue_timeout] = value
end
#####################################################
def append_to_title!(title_suffix)
self.title += title_suffix
end
def append_user_filters_to_title(user)
return unless user && user.has_filters?
self.append_to_title!(" (filtered for #{user.name})")
end
def get_time_zone(default_tz = nil)
time_profile&.tz || tz || default_tz
end
private
def get_full_path(parent, child)
if parent
"#{parent}.#{child}"
else
child.to_s
end
end
# Preps the current instance and db class for building a report
def _generate_table_prep
# Make sure the db_klass has the custom_attribute definitions defined for
# the report being built.
load_custom_attributes
# Default time zone in profile to report time zone
time_profile.tz ||= tz if time_profile
self.ext_options = {:tz => tz, :time_profile => time_profile}
# TODO: these columns need to be converted to real SQL columns
# only_cols = cols
self.extras ||= {}
end
def interval
@interval ||= db_options.present? && db_options[:interval]
end
def custom_results_method
@custom_results_method ||= db_options && db_options[:rpt_type] ? "build_results_for_report_#{db_options[:rpt_type]}" : nil
end
end
|
require 'bundler/capistrano'
require 'thinking_sphinx/deploy/capistrano'
set :whenever_command, 'bundle exec whenever'
require 'whenever/capistrano'
set :scm, :git
set :repository, "git://github.com/biow0lf/prometheus2.0.git"
set :branch, "origin/master"
set :migrate_target, :current
set :ssh_options, { :forward_agent => true }
set :rails_env, "production"
set :deploy_to, "/home/prometheusapp/www"
set :normalize_asset_timestamps, false
set :user, "prometheusapp"
set :group, "prometheusapp"
set :use_sudo, false
role :app, "packages.altlinux.org"
role :web, "packages.altlinux.org"
role :db, "packages.altlinux.org", :primary => true
set(:latest_release) { fetch(:current_path) }
set(:release_path) { fetch(:current_path) }
set(:current_release) { fetch(:current_path) }
set(:current_revision) { capture("cd #{current_path}; git rev-parse --short HEAD").strip }
set(:latest_revision) { capture("cd #{current_path}; git rev-parse --short HEAD").strip }
set(:previous_revision) { capture("cd #{current_path}; git rev-parse --short HEAD@{1}").strip }
default_environment["RAILS_ENV"] = 'production'
default_run_options[:shell] = 'bash'
set :ssh_options, { :forward_agent => false, :port => 222 }
namespace :deploy do
desc "Deploy your application"
task :default do
update
restart
end
desc "Setup your git-based deployment app"
task :setup, :except => { :no_release => true } do
dirs = [deploy_to, shared_path]
dirs += shared_children.map { |d| File.join(shared_path, d) }
run "#{try_sudo} mkdir -p #{dirs.join(' ')} && #{try_sudo} chmod g+w #{dirs.join(' ')}"
run "git clone #{repository} #{current_path}"
end
task :cold do
update
migrate
end
task :update do
transaction do
update_code
end
end
desc "Update the deployed code."
task :update_code, :except => { :no_release => true } do
run "cd #{current_path}; git fetch origin; git reset --hard #{branch}"
finalize_update
end
desc "Update the database (overwritten to avoid symlink)"
task :migrations do
transaction do
update_code
end
migrate
restart
end
task :finalize_update, :except => { :no_release => true } do
run "chmod -R g+w #{latest_release}" if fetch(:group_writable, true)
# mkdir -p is making sure that the directories are there for some SCM's that don't
# save empty folders
run <<-CMD
rm -rf #{latest_release}/log #{latest_release}/public/system #{latest_release}/tmp/pids &&
mkdir -p #{latest_release}/public &&
mkdir -p #{latest_release}/tmp &&
ln -s #{shared_path}/log #{latest_release}/log &&
ln -s #{shared_path}/system #{latest_release}/public/system &&
ln -s #{shared_path}/pids #{latest_release}/tmp/pids &&
ln -sf #{shared_path}/config/database.yml #{latest_release}/config/database.yml &&
ln -sf #{shared_path}/config/newrelic.yml #{latest_release}/config/newrelic.yml &&
ln -sf #{shared_path}/config/redis.yml #{latest_release}/config/redis.yml &&
ln -sf #{shared_path}/config/initializers/devise.rb #{latest_release}/config/initializers/devise.rb &&
ln -sf #{shared_path}/config/initializers/secret_token.rb #{latest_release}/config/initializers/secret_token.rb
CMD
if fetch(:normalize_asset_timestamps, true)
stamp = Time.now.utc.strftime("%Y%m%d%H%M.%S")
asset_paths = fetch(:public_children, %w(images stylesheets javascripts)).map { |p| "#{latest_release}/public/#{p}" }.join(" ")
run "find #{asset_paths} -exec touch -t #{stamp} {} ';'; true", :env => { "TZ" => "UTC" }
end
end
desc "Zero-downtime restart of Unicorn"
task :restart, :except => { :no_release => true } do
run "kill -s USR2 `cat /tmp/unicorn.my_site.pid`"
end
desc "Start unicorn"
task :start, :except => { :no_release => true } do
run "cd #{current_path} && bundle exec unicorn_rails -c config/unicorn.rb -D"
end
desc "Stop unicorn"
task :stop, :except => { :no_release => true } do
run "kill -s QUIT `cat /tmp/unicorn.my_site.pid`"
end
namespace :rollback do
desc "Moves the repo back to the previous version of HEAD"
task :repo, :except => { :no_release => true } do
set :branch, "HEAD@{1}"
deploy.default
end
desc "Rewrite reflog so HEAD@{1} will continue to point to at the next previous release."
task :cleanup, :except => { :no_release => true } do
run "cd #{current_path}; git reflog delete --rewrite HEAD@{1}; git reflog delete --rewrite HEAD@{1}"
end
desc "Rolls back to the previously deployed version."
task :default do
rollback.repo
rollback.cleanup
end
end
end
def run_rake(cmd)
run "cd #{current_path}; #{rake} #{cmd}"
end
# after 'deploy:update_code', 'deploy:symlink_all'
#
# namespace :deploy do
# desc "Symlinks all needed files"
# task :symlink_all, :roles => :app do
# # precompile the assets
# run "cd #{release_path} && bundle exec rake assets:precompile"
# end
# end
# task :before_update_code, :roles => [:app] do
# thinking_sphinx.stop
# end
#
# task :after_update_code, :roles => [:app] do
# run "cd #{release_path} && bundle exec rake thinking_sphinx:configure"
# run "cd #{release_path} && bundle exec rake thinking_sphinx:index"
# thinking_sphinx.start
# end
namespace :redis do
desc "Start the Redis server"
task :start do
run "/usr/sbin/redis-server /home/prometheusapp/www/shared/config/redis.conf"
end
desc "Stop the Redis server"
task :stop do
run 'echo "SHUTDOWN" | nc localhost 6379'
end
end
namespace :memcached do
desc "Start the memcached server"
task :start do
run '/usr/bin/memcached -d -m 128'
end
end
Compile assets
require 'bundler/capistrano'
require 'thinking_sphinx/deploy/capistrano'
set :whenever_command, 'bundle exec whenever'
require 'whenever/capistrano'
set :scm, :git
set :repository, "git://github.com/biow0lf/prometheus2.0.git"
set :branch, "origin/master"
set :migrate_target, :current
set :ssh_options, { :forward_agent => true }
set :rails_env, "production"
set :deploy_to, "/home/prometheusapp/www"
set :normalize_asset_timestamps, false
set :user, "prometheusapp"
set :group, "prometheusapp"
set :use_sudo, false
role :app, "packages.altlinux.org"
role :web, "packages.altlinux.org"
role :db, "packages.altlinux.org", :primary => true
set(:latest_release) { fetch(:current_path) }
set(:release_path) { fetch(:current_path) }
set(:current_release) { fetch(:current_path) }
set(:current_revision) { capture("cd #{current_path}; git rev-parse --short HEAD").strip }
set(:latest_revision) { capture("cd #{current_path}; git rev-parse --short HEAD").strip }
set(:previous_revision) { capture("cd #{current_path}; git rev-parse --short HEAD@{1}").strip }
default_environment["RAILS_ENV"] = 'production'
default_run_options[:shell] = 'bash'
set :ssh_options, { :forward_agent => false, :port => 222 }
namespace :deploy do
desc "Deploy your application"
task :default do
update
restart
end
desc "Setup your git-based deployment app"
task :setup, :except => { :no_release => true } do
dirs = [deploy_to, shared_path]
dirs += shared_children.map { |d| File.join(shared_path, d) }
run "#{try_sudo} mkdir -p #{dirs.join(' ')} && #{try_sudo} chmod g+w #{dirs.join(' ')}"
run "git clone #{repository} #{current_path}"
end
task :cold do
update
migrate
end
task :update do
transaction do
update_code
end
end
desc "Update the deployed code."
task :update_code, :except => { :no_release => true } do
run "cd #{current_path}; git fetch origin; git reset --hard #{branch}"
finalize_update
end
desc "Update the database (overwritten to avoid symlink)"
task :migrations do
transaction do
update_code
end
migrate
restart
end
task :finalize_update, :except => { :no_release => true } do
run "chmod -R g+w #{latest_release}" if fetch(:group_writable, true)
# mkdir -p is making sure that the directories are there for some SCM's that don't
# save empty folders
run <<-CMD
rm -rf #{latest_release}/log #{latest_release}/public/system #{latest_release}/tmp/pids &&
mkdir -p #{latest_release}/public &&
mkdir -p #{latest_release}/tmp &&
ln -s #{shared_path}/log #{latest_release}/log &&
ln -s #{shared_path}/system #{latest_release}/public/system &&
ln -s #{shared_path}/pids #{latest_release}/tmp/pids &&
ln -sf #{shared_path}/config/database.yml #{latest_release}/config/database.yml &&
ln -sf #{shared_path}/config/newrelic.yml #{latest_release}/config/newrelic.yml &&
ln -sf #{shared_path}/config/redis.yml #{latest_release}/config/redis.yml &&
ln -sf #{shared_path}/config/initializers/devise.rb #{latest_release}/config/initializers/devise.rb &&
ln -sf #{shared_path}/config/initializers/secret_token.rb #{latest_release}/config/initializers/secret_token.rb &&
cd #{release_path} && bundle exec rake assets:precompile
CMD
if fetch(:normalize_asset_timestamps, true)
stamp = Time.now.utc.strftime("%Y%m%d%H%M.%S")
asset_paths = fetch(:public_children, %w(images stylesheets javascripts)).map { |p| "#{latest_release}/public/#{p}" }.join(" ")
run "find #{asset_paths} -exec touch -t #{stamp} {} ';'; true", :env => { "TZ" => "UTC" }
end
end
desc "Zero-downtime restart of Unicorn"
task :restart, :except => { :no_release => true } do
run "kill -s USR2 `cat /tmp/unicorn.my_site.pid`"
end
desc "Start unicorn"
task :start, :except => { :no_release => true } do
run "cd #{current_path} && bundle exec unicorn_rails -c config/unicorn.rb -D"
end
desc "Stop unicorn"
task :stop, :except => { :no_release => true } do
run "kill -s QUIT `cat /tmp/unicorn.my_site.pid`"
end
namespace :rollback do
desc "Moves the repo back to the previous version of HEAD"
task :repo, :except => { :no_release => true } do
set :branch, "HEAD@{1}"
deploy.default
end
desc "Rewrite reflog so HEAD@{1} will continue to point to at the next previous release."
task :cleanup, :except => { :no_release => true } do
run "cd #{current_path}; git reflog delete --rewrite HEAD@{1}; git reflog delete --rewrite HEAD@{1}"
end
desc "Rolls back to the previously deployed version."
task :default do
rollback.repo
rollback.cleanup
end
end
end
def run_rake(cmd)
run "cd #{current_path}; #{rake} #{cmd}"
end
# after 'deploy:update_code', 'deploy:symlink_all'
#
# namespace :deploy do
# desc "Symlinks all needed files"
# task :symlink_all, :roles => :app do
# # precompile the assets
# run "cd #{release_path} && bundle exec rake assets:precompile"
# end
# end
# task :before_update_code, :roles => [:app] do
# thinking_sphinx.stop
# end
#
# task :after_update_code, :roles => [:app] do
# run "cd #{release_path} && bundle exec rake thinking_sphinx:configure"
# run "cd #{release_path} && bundle exec rake thinking_sphinx:index"
# thinking_sphinx.start
# end
namespace :redis do
desc "Start the Redis server"
task :start do
run "/usr/sbin/redis-server /home/prometheusapp/www/shared/config/redis.conf"
end
desc "Stop the Redis server"
task :stop do
run 'echo "SHUTDOWN" | nc localhost 6379'
end
end
namespace :memcached do
desc "Start the memcached server"
task :start do
run '/usr/bin/memcached -d -m 128'
end
end
|
class Lldpd < Formula
desc "Implementation of IEEE 802.1ab (LLDP)"
homepage "https://vincentbernat.github.io/lldpd/"
url "http://media.luffy.cx/files/lldpd/lldpd-0.9.3.tar.gz"
sha256 "cfa8a3245dd71efc1f5de064a72576a7c1afaaf0ccb5833238398b6dc612b127"
bottle do
sha256 "a653e4fab4e04a21c1cf30acce6a6aef8747146239ad3e749958be6989696c11" => :el_capitan
sha256 "2c8789cf6590fc9b9d341f92ec44a439d130502a5137f01d46218a8824abe604" => :yosemite
sha256 "da4209b5c0a98657602877247777d2b2df55a916745e5fe83e3632e80bdbe03b" => :mavericks
end
option "with-snmp", "Build SNMP subagent support"
option "with-json", "Build JSON support for lldpcli"
depends_on "pkg-config" => :build
depends_on "readline"
depends_on "libevent"
depends_on "net-snmp" if build.with? "snmp"
depends_on "jansson" if build.with? "json"
def install
readline = Formula["readline"]
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--with-xml",
"--with-readline",
"--with-privsep-chroot=/var/empty",
"--with-privsep-user=nobody",
"--with-privsep-group=nogroup",
"--with-launchddaemonsdir=no",
"CPPFLAGS=-I#{readline.include} -DRONLY=1",
"LDFLAGS=-L#{readline.lib}",
]
args << (build.with?("snmp") ? "--with-snmp" : "--without-snmp")
args << (build.with?("json") ? "--with-json" : "--without-json")
system "./configure", *args
system "make"
system "make", "install"
end
def post_install
(var/"run").mkpath
end
plist_options :startup => true
def plist
additional_args = ""
if build.with? "snmp"
additional_args += "<string>-x</string>"
end
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/lldpd</string>
#{additional_args}
</array>
<key>RunAtLoad</key><true/>
<key>KeepAlive</key><true/>
</dict>
</plist>
EOS
end
end
lldpd: update 0.9.3 bottle.
class Lldpd < Formula
desc "Implementation of IEEE 802.1ab (LLDP)"
homepage "https://vincentbernat.github.io/lldpd/"
url "http://media.luffy.cx/files/lldpd/lldpd-0.9.3.tar.gz"
sha256 "cfa8a3245dd71efc1f5de064a72576a7c1afaaf0ccb5833238398b6dc612b127"
bottle do
sha256 "63c0a76719d7243976cbdd971ce4f65ebb9d1bad7909a8144a3f77d26fa08b0e" => :el_capitan
sha256 "741ba34732c3ab89031eb42eb797bbe25709912db0877188011c495a4ff79cff" => :yosemite
sha256 "2685403a92d8825c6ea70ccbfded8d7f019d38d24bf2d951565659aa46e15955" => :mavericks
end
option "with-snmp", "Build SNMP subagent support"
option "with-json", "Build JSON support for lldpcli"
depends_on "pkg-config" => :build
depends_on "readline"
depends_on "libevent"
depends_on "net-snmp" if build.with? "snmp"
depends_on "jansson" if build.with? "json"
def install
readline = Formula["readline"]
args = [
"--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--localstatedir=#{var}",
"--with-xml",
"--with-readline",
"--with-privsep-chroot=/var/empty",
"--with-privsep-user=nobody",
"--with-privsep-group=nogroup",
"--with-launchddaemonsdir=no",
"CPPFLAGS=-I#{readline.include} -DRONLY=1",
"LDFLAGS=-L#{readline.lib}",
]
args << (build.with?("snmp") ? "--with-snmp" : "--without-snmp")
args << (build.with?("json") ? "--with-json" : "--without-json")
system "./configure", *args
system "make"
system "make", "install"
end
def post_install
(var/"run").mkpath
end
plist_options :startup => true
def plist
additional_args = ""
if build.with? "snmp"
additional_args += "<string>-x</string>"
end
<<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/lldpd</string>
#{additional_args}
</array>
<key>RunAtLoad</key><true/>
<key>KeepAlive</key><true/>
</dict>
</plist>
EOS
end
end
|
added "unpublish" example
require 'rubygems'
require 'vortex_client'
# Find all files recursively under '/people' with name 'index.html', and set them to "unpublished"
@vortex = Vortex::Connection.new("https://nyweb2-dav.uio.no/", :use_osx_keychain => true)
@vortex.find('/people/', :recursive => true, :filename=>/\.html$/) do |item|
item.proppatch('<v:unpublish-date xmlns:v="vrtx">'+Time.now.httpdate.to_s+'</v:unpublish-date>')
end
|
require 'enumerator'
require 'miq-hash_struct'
class MiqRequestWorkflow
include Vmdb::Logging
include_concern "DialogFieldValidation"
# We rely on MiqRequestWorkflow's descendants to be comprehensive
singleton_class.send(:prepend, DescendantLoader::ArDescendantsWithLoader)
attr_accessor :dialogs, :requester, :values, :last_vm_id
def self.automate_dialog_request
nil
end
def self.default_dialog_file
nil
end
def self.default_pre_dialog_file
nil
end
def self.encrypted_options_fields
[]
end
def self.encrypted_options_field_regs
encrypted_options_fields.map { |f| /\[:#{f}\]/ }
end
def self.all_encrypted_options_fields
descendants.flat_map(&:encrypted_options_fields).uniq
end
def self.update_requester_from_parameters(data, user)
return user if data[:user_name].blank?
new_user = User.lookup_by_identity(data[:user_name])
unless new_user
_log.error("requested not changed to <#{data[:user_name]}> due to a lookup failure")
raise ActiveRecord::RecordNotFound
end
_log.warn("requested changed to <#{new_user.userid}>")
new_user
end
def initialize(values, requester, options = {})
instance_var_init(values, requester, options)
unless options[:skip_dialog_load] == true
# If this is the first time we are called the values hash will be empty
# Also skip if we are being called from a web-service
if @dialogs.nil?
@dialogs = get_dialogs
normalize_numeric_fields
else
@running_pre_dialog = true if options[:use_pre_dialog] != false
end
end
unless options[:skip_dialog_load] == true
set_default_values
update_field_visibility
end
end
def instance_var_init(values, requester, options)
@values = values
@filters = {}
@requester = requester.kind_of?(User) ? requester : User.lookup_by_identity(requester)
group_description = values[:requester_group]
if group_description && group_description != @requester.miq_group_description
@requester = @requester.clone
@requester.current_group_by_description = group_description
end
@values.merge!(options) unless options.blank?
end
# Helper method when not using workflow
def make_request(request, values, requester = nil, auto_approve = false)
return false unless validate(values)
password_helper(values, true)
# Ensure that tags selected in the pre-dialog get applied to the request
values[:vm_tags] = (values[:vm_tags].to_miq_a + @values[:pre_dialog_vm_tags]).uniq if @values.try(:[], :pre_dialog_vm_tags).present?
set_request_values(values)
if request
MiqRequest.update_request(request, values, @requester)
else
req = request_class.new(:options => values, :requester => @requester, :request_type => request_type.to_s)
return req unless req.valid? # TODO: CatalogController#atomic_req_submit is the only one that enumerates over the errors
values[:__request_type__] = request_type.to_s.presence # Pass this along to MiqRequest#create_request
request_class.create_request(values, @requester, auto_approve)
end
end
def init_from_dialog(init_values)
@dialogs[:dialogs].keys.each do |dialog_name|
get_all_fields(dialog_name).each_pair do |field_name, field_values|
next unless init_values[field_name].nil?
next if field_values[:display] == :ignore
if !field_values[:default].nil?
val = field_values[:default]
end
if field_values[:values]
if field_values[:values].kind_of?(Hash)
# Save [value, description], skip for timezones array
init_values[field_name] = [val, field_values[:values][val]]
else
field_values[:values].each do |tz|
if tz[1].to_i_with_method == val.to_i_with_method
# Save [value, description] for timezones array
init_values[field_name] = [val, tz[0]]
end
end
end
else
# Set to default value
init_values[field_name] = val
end
end
end
end
def validate(values)
# => Input - A hash keyed by field name with entered values
# => Output - true || false
#
# Update @dialogs adding error keys to fields that don't validate
valid = true
get_all_dialogs(false).each do |d, dlg|
# Check if the entire dialog is ignored or disabled and check while processing the fields
dialog_disabled = !dialog_active?(d, dlg, values)
get_all_fields(d, false).each do |f, fld|
fld[:error] = nil
# Check the disabled flag here so we reset the "error" value on each field
next if dialog_disabled || fld[:display] == :hide
value = fld[:data_type] =~ /array_/ ? values[f] : get_value(values[f])
if fld[:required] == true
# If :required_method is defined let it determine if the field is value
if fld[:required_method].nil?
default_require_method = "default_require_#{f}".to_sym
if self.respond_to?(default_require_method)
fld[:error] = send(default_require_method, f, values, dlg, fld, value)
unless fld[:error].nil?
valid = false
next
end
else
if value.blank?
fld[:error] = "#{required_description(dlg, fld)} is required"
valid = false
next
end
end
else
Array.wrap(fld[:required_method]).each do |method|
fld[:error] = send(method, f, values, dlg, fld, value)
# Bail out early if we see an error
break unless fld[:error].nil?
end
unless fld[:error].nil?
valid = false
next
end
end
end
if fld[:validation_method] && respond_to?(fld[:validation_method])
if (fld[:error] = send(fld[:validation_method], f, values, dlg, fld, value))
valid = false
next
end
end
next if value.blank?
msg = "'#{fld[:description]}' in dialog #{dlg[:description]} must be of type #{fld[:data_type]}"
validate_data_types(value, fld, msg, valid)
end
end
valid
end
def validate_data_types(value, fld, msg, valid)
case fld[:data_type]
when :integer
unless is_integer?(value)
fld[:error] = msg
valid = false
end
when :float
unless is_numeric?(value)
fld[:error] = msg
valid = false
end
when :boolean
# TODO: do we need validation for boolean
when :button
# Ignore
when :array_integer
unless value.kind_of?(Array)
fld[:error] = msg
valid = false
end
else
data_type = Object.const_get(fld[:data_type].to_s.camelize)
unless value.kind_of?(data_type)
fld[:error] = msg
valid = false
end
end
[valid, fld]
end
def get_dialog_order
@dialogs[:dialog_order]
end
def get_buttons
@dialogs[:buttons] || [:submit, :cancel]
end
def provisioning_tab_list
dialog_names = @dialogs[:dialog_order].collect(&:to_s)
dialog_descriptions = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :description)
end
dialog_display = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :display)
end
tab_list = []
dialog_names.each_with_index do |dialog_name, index|
next if dialog_display[index] == :hide || dialog_display[index] == :ignore
tab_list << {
:name => dialog_name,
:description => dialog_descriptions[index]
}
end
tab_list
end
def get_all_dialogs(refresh_values = true)
@dialogs[:dialogs].each_key { |d| get_dialog(d, refresh_values) }
@dialogs[:dialogs]
end
def get_dialog(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
get_all_fields(dialog_name, refresh_values)
dialog
end
def get_all_fields(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
dialog[:fields].each_key { |f| get_field(f, dialog_name, refresh_values) }
dialog[:fields]
end
def get_field(field_name, dialog_name = nil, refresh_values = true)
field_name = field_name.to_sym
dialog_name = find_dialog_from_field_name(field_name) if dialog_name.nil?
field = @dialogs.fetch_path(:dialogs, dialog_name.to_sym, :fields, field_name)
return {} unless field
if field.key?(:values_from) && refresh_values
options = field[:values_from][:options] || {}
options[:prov_field_name] = field_name
field[:values] = send(field[:values_from][:method], options)
# Reset then currently selected item if it no longer appears in the available values
if field[:values].kind_of?(Hash)
if field[:values].length == 1
unless field[:auto_select_single] == false
@values[field_name] = field[:values].to_a.first
end
else
currently_selected = get_value(@values[field_name])
unless currently_selected.nil? || field[:values].key?(currently_selected)
@values[field_name] = [nil, nil]
end
end
end
end
field
end
# TODO: Return list in defined ordered
def dialogs
@dialogs[:dialogs].each_pair { |n, d| yield(n, d) }
end
def fields(dialog = nil)
dialog = [*dialog] unless dialog.nil?
@dialogs[:dialogs].each_pair do |dn, d|
next unless dialog.blank? || dialog.include?(dn)
d[:fields].each_pair do |fn, f|
yield(fn, f, dn, d)
end
end
end
def normalize_numeric_fields
fields do |_fn, f, _dn, _d|
if f[:data_type] == :integer
f[:default] = f[:default].to_i_with_method unless f[:default].blank?
unless f[:values].blank?
keys = f[:values].keys.dup
keys.each { |k| f[:values][k.to_i_with_method] = f[:values].delete(k) }
end
end
end
end
# Helper method to write message to the rails log (production.log) for debugging
def rails_logger(_name, _start)
# Rails.logger.warn("#{name} #{start.zero? ? 'start' : 'end'}")
end
def parse_ws_string(text_input, options = {})
self.class.parse_ws_string(text_input, options)
end
def self.parse_ws_string(text_input, options = {})
return parse_request_parameter_hash(text_input, options) if text_input.kind_of?(Hash)
return {} unless text_input.kind_of?(String)
deprecated_warn = "method: parse_ws_string, arg Type => String"
solution = "arg should be a hash"
MiqAeMethodService::Deprecation.deprecation_warning(deprecated_warn, solution)
result = {}
text_input.split('|').each do |value|
next if value.blank?
idx = value.index('=')
next if idx.nil?
key = options[:modify_key_name] == false ? value[0, idx].strip : value[0, idx].strip.downcase.to_sym
result[key] = value[idx + 1..-1].strip
end
result
end
def self.parse_request_parameter_hash(parameter_hash, options = {})
parameter_hash.each_with_object({}) do |param, hash|
key, value = param
key = key.strip.downcase.to_sym unless options[:modify_key_name] == false
hash[key] = value
end
end
def ws_tags(tag_string, parser = :parse_ws_string)
# Tags are passed as category|value. Example: cc|001|environment|test
ws_tags = send(parser, tag_string)
tags = allowed_tags.each_with_object({}) do |v, tags|
tags[v[:name]] = v[:children].each_with_object({}) { |(k, v), tc| tc[v[:name]] = k }
end
ws_tags.collect { |cat, tag| tags.fetch_path(cat.to_s.downcase, tag.downcase) }.compact
end
# @param parser [:parse_ws_string|:parse_ws_string_v1]
# @param additional_values [String] values of the form cc=001|environment=test
def ws_values(additional_values, parser = :parse_ws_string, parser_options = {})
parsed_values = send(parser, additional_values, parser_options)
parsed_values.each_with_object({}) { |(k, v), ws_values| ws_values[k.to_sym] = v }
end
def parse_ws_string_v1(values, _options = {})
na = []
values.to_s.split("|").each_slice(2) do |k, v|
next if v.nil?
na << [k.strip, v.strip]
end
na
end
def find_dialog_from_field_name(field_name)
@dialogs[:dialogs].each_key do |dialog_name|
return dialog_name if @dialogs[:dialogs][dialog_name][:fields].key?(field_name.to_sym)
end
nil
end
def get_value(data)
data.kind_of?(Array) ? data.first : data
end
def set_or_default_field_values(values)
field_names = values.keys
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
if f.key?(:values)
selected_key = nil
if f[:values].key?(values[fn])
selected_key = values[fn]
elsif f.key?(:default) && f[:values].key?(f[:default])
selected_key = f[:default]
else
unless f[:values].blank?
sorted_values = f[:values].sort
selected_key = sorted_values.first.first
end
end
@values[fn] = [selected_key, f[:values][selected_key]] unless selected_key.nil?
else
@values[fn] = values[fn]
end
end
end
end
def clear_field_values(field_names)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
@values[fn] = f.key?(:values) ? [nil, nil] : nil
end
end
end
def set_value_from_list(fn, f, value, values = nil, partial_key = false)
@values[fn] = [nil, nil]
values = f[:values] if values.nil?
unless value.nil?
@values[fn] = values.to_a.detect do |v|
if partial_key
_log.warn("comparing [#{v[0]}] to [#{value}]")
v[0].to_s.downcase.include?(value.to_s.downcase)
else
v.include?(value)
end
end
if @values[fn].nil?
_log.info("set_value_from_list did not matched an item") if partial_key
@values[fn] = [nil, nil]
else
_log.info("set_value_from_list matched item value:[#{value}] to item:[#{@values[fn][0]}]") if partial_key
end
end
end
def show_dialog(dialog_name, show_flag, enabled_flag = nil)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
unless dialog.nil?
dialog[:display_init] = dialog[:display] if dialog[:display_init].nil?
# If the initial dialog is not set to show then do not modify it here.
return if dialog[:display_init] != :show
dialog[:display] = show_flag
@values["#{dialog_name}_enabled".to_sym] = [enabled_flag] unless enabled_flag.nil?
end
end
def required_description(dlg, fld)
"'#{dlg[:description]}/#{fld[:required_description] || fld[:description]}'"
end
def allowed_filters(options = {})
model_name = options[:category]
return @filters[model_name] unless @filters[model_name].nil?
rails_logger("allowed_filters - #{model_name}", 0)
@filters[model_name] = @requester.get_expressions(model_name).invert
rails_logger("allowed_filters - #{model_name}", 1)
@filters[model_name]
end
def dialog_active?(name, config, values)
return false if config[:display] == :ignore
enabled_field = "#{name}_enabled".to_sym
# Check if the fields hash contains a <dialog_name>_enabled field
enabled = get_value(values[enabled_field])
return false if enabled == false || enabled == "disabled"
true
end
def show_fields(display_flag, field_names, display_field = :display)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
flag = f[:display_override].blank? ? display_flag : f[:display_override]
f[display_field] = flag
end
end
end
def retrieve_ldap(_options = {})
email = get_value(@values[:owner_email])
unless email.blank?
l = MiqLdap.new
if l.bind_with_default == true
raise _("No information returned for %{email}") % {:email => email} if (d = l.get_user_info(email)).nil?
[:first_name, :last_name, :address, :city, :state, :zip, :country, :title, :company,
:department, :office, :phone, :phone_mobile, :manager, :manager_mail, :manager_phone].each do |prop|
@values["owner_#{prop}".to_sym] = d[prop].try(:dup)
end
@values[:sysprep_organization] = d[:company].try(:dup)
end
end
end
def default_schedule_time(options = {})
# TODO: Added support for "default_from", like values_from, that gets called once after dialog creation
# Update VM description
fields do |fn, f, _dn, _d|
if fn == :schedule_time
f[:default] = Time.now + options[:offset].to_i_with_method if f[:default].nil?
break
end
end
end
def values_less_then(options)
results = options[:values].transform_keys(&:to_i_with_method)
field, include_equals = options[:field], options[:include_equals]
max_value = field.nil? ? options[:value].to_i_with_method : get_value(@values[field]).to_i_with_method
return results if max_value <= 0
results.reject { |k, _v| include_equals == true ? max_value < k : max_value <= k }
end
def tags
vm_tags = @values[:vm_tags]
return unless vm_tags.kind_of?(Array)
vm_tags.each do |tag_id|
tag = Classification.find(tag_id)
yield(tag.name, tag.parent.name) # yield the tag's name and category
end
end
def get_tags
tag_string = ''
tags do |tag, cat|
tag_string << ':' unless tag_string.empty?
tag_string << "#{cat}/#{tag}"
end
tag_string
end
def allowed_tags(options = {})
return @tags unless @tags.nil?
region_number = options.delete(:region_number)
# TODO: Call allowed_tags properly from controller - it is currently hard-coded with no options passed
field_options = @dialogs.fetch_path(:dialogs, :purpose, :fields, :vm_tags, :options)
options = field_options unless field_options.nil?
rails_logger('allowed_tags', 0)
st = Time.now
@tags = {}
exclude_list = options[:exclude].blank? ? [] : options[:exclude].collect(&:to_s)
include_list = options[:include].blank? ? [] : options[:include].collect(&:to_s)
single_select = options[:single_select].blank? ? [] : options[:single_select].collect(&:to_s)
cats = Classification.visible.writeable.managed
cats = cats.in_region(region_number) if region_number
cats.each do |t|
next if exclude_list.include?(t.name)
next unless include_list.blank? || include_list.include?(t.name)
# Force passed tags to be single select
single_value = single_select.include?(t.name) ? true : t.single_value?
@tags[t.id] = {:name => t.name, :description => t.description, :single_value => single_value, :children => {}, :id => t.id}
end
ents = Classification.visible.writeable.parent_ids(@tags.keys).with_tag_name
ents = ents.in_region(region_number) if region_number
ents.each do |t|
full_tag_name = "#{@tags[t.parent_id][:name]}/#{t.name}"
next if exclude_list.include?(full_tag_name)
@tags[t.parent_id][:children][t.id] = {:name => t.name, :description => t.description}
end
@tags.delete_if { |_k, v| v[:children].empty? }
# Now sort the tags based on the order passed options. All remaining tags not defined in the order
# will be sorted by description and appended to the other sorted tags
tag_results, tags_to_sort = [], []
sort_order = options[:order].blank? ? [] : options[:order].collect(&:to_s)
@tags.each do |_k, v|
(idx = sort_order.index(v[:name])).nil? ? tags_to_sort << v : tag_results[idx] = v
end
tags_to_sort = tags_to_sort.sort_by { |a| a[:description] }
@tags = tag_results.compact + tags_to_sort
@tags.each do |tag|
tag[:children] = if tag[:children].first.last[:name] =~ /^\d/
tag[:children].sort_by { |_k, v| v[:name].to_i }
else
tag[:children].sort_by { |_k, v| v[:description] }
end
end
rails_logger('allowed_tags', 1)
_log.info("allowed_tags returned [#{@tags.length}] objects in [#{Time.now - st}] seconds")
@tags
end
def allowed_tags_and_pre_tags
pre_tags = @values[:pre_dialog_vm_tags].to_miq_a
return allowed_tags if pre_tags.blank?
tag_cats = allowed_tags.dup
tag_cat_names = tag_cats.collect { |cat| cat[:name] }
Classification.where(:id => pre_tags).each do |tag|
parent = tag.parent
next if tag_cat_names.include?(parent.name)
new_cat = {:name => parent.name, :description => parent.description, :single_value => parent.single_value?, :children => {}, :id => parent.id}
parent.children.each { |c| new_cat[:children][c.id] = {:name => c.name, :description => c.description} }
tag_cats << new_cat
tag_cat_names << new_cat[:name]
end
tag_cats
end
def tag_symbol
:tag_ids
end
def build_ci_hash_struct(ci, props)
nh = MiqHashStruct.new(:id => ci.id, :evm_object_class => ci.class.base_class.name.to_sym)
props.each { |p| nh.send("#{p}=", ci.send(p)) }
nh
end
def get_dialogs
@values[:miq_request_dialog_name] ||= @values[:provision_dialog_name] || dialog_name_from_automate || self.class.default_dialog_file
dp = @values[:miq_request_dialog_name] = File.basename(@values[:miq_request_dialog_name], ".rb")
_log.info("Loading dialogs <#{dp}> for user <#{@requester.userid}>")
d = MiqDialog.find_by("lower(name) = ? and dialog_type = ?", dp.downcase, self.class.base_model.name)
if d.nil?
raise MiqException::Error,
"Dialog cannot be found. Name:[%{name}] Type:[%{type}]" % {:name => @values[:miq_request_dialog_name],
:type => self.class.base_model.name}
end
d.content
end
def get_pre_dialogs
pre_dialogs = nil
pre_dialog_name = dialog_name_from_automate('get_pre_dialog_name')
unless pre_dialog_name.blank?
pre_dialog_name = File.basename(pre_dialog_name, ".rb")
d = MiqDialog.find_by(:name => pre_dialog_name, :dialog_type => self.class.base_model.name)
unless d.nil?
_log.info("Loading pre-dialogs <#{pre_dialog_name}> for user <#{@requester.userid}>")
pre_dialogs = d.content
end
end
pre_dialogs
end
def dialog_name_from_automate(message = 'get_dialog_name', input_fields = [:request_type], extra_attrs = {})
return nil if self.class.automate_dialog_request.nil?
_log.info("Querying Automate Profile for dialog name")
attrs = {'request' => self.class.automate_dialog_request, 'message' => message}
extra_attrs.each { |k, v| attrs[k] = v }
@values.each_key do |k|
key = "dialog_input_#{k.to_s.downcase}"
if attrs.key?(key)
_log.info("Skipping key=<#{key}> because already set to <#{attrs[key]}>")
else
value = (k == :vm_tags) ? get_tags : get_value(@values[k]).to_s
_log.info("Setting attrs[#{key}]=<#{value}>")
attrs[key] = value
end
end
input_fields.each { |k| attrs["dialog_input_#{k.to_s.downcase}"] = send(k).to_s }
ws = MiqAeEngine.resolve_automation_object("REQUEST", @requester, attrs, :vmdb_object => @requester)
if ws && ws.root
dialog_option_prefix = 'dialog_option_'
dialog_option_prefix_length = dialog_option_prefix.length
ws.root.attributes.each do |key, value|
next unless key.downcase.starts_with?(dialog_option_prefix)
next unless key.length > dialog_option_prefix_length
key = key[dialog_option_prefix_length..-1].downcase
_log.info("Setting @values[#{key}]=<#{value}>")
@values[key.to_sym] = value
end
name = ws.root("dialog_name")
return name.presence
end
nil
end
def self.request_type(type)
type.presence.try(:to_sym) || request_class.request_types.first
end
def request_type
self.class.request_type(get_value(@values[:request_type]))
end
def request_class
req_class = self.class.request_class
return req_class unless get_value(@values[:service_template_request]) == true
(req_class.name + "Template").constantize
end
def self.request_class
@workflow_class ||= name.underscore.gsub(/_workflow$/, "_request").camelize.constantize
end
def set_default_values
set_default_user_info rescue nil
end
def set_default_user_info
return if get_dialog(:requester).blank?
if get_value(@values[:owner_email]).blank? && @requester.email.present?
@values[:owner_email] = @requester.email
retrieve_ldap if MiqLdap.using_ldap?
end
show_flag = MiqLdap.using_ldap? ? :show : :hide
show_fields(show_flag, [:owner_load_ldap])
end
def set_request_values(values)
values[:requester_group] ||= @requester.current_group.description
email = values[:owner_email]
if email.present? && values[:owner_group].blank?
values[:owner_group] = User.find_by_lower_email(email, @requester).try(:miq_group_description)
end
end
def password_helper(values = @values, encrypt = true)
self.class.encrypted_options_fields.each do |pwd_key|
next if values[pwd_key].blank?
if encrypt
values[pwd_key].replace(MiqPassword.try_encrypt(values[pwd_key]))
else
values[pwd_key].replace(MiqPassword.try_decrypt(values[pwd_key]))
end
end
end
def update_field_visibility
end
def refresh_field_values(values)
st = Time.now
@values = values
get_source_and_targets(true)
# @values gets modified during this call
get_all_dialogs
values.merge!(@values)
# Update the display flag for fields based on current settings
update_field_visibility
_log.info("refresh completed in [#{Time.now - st}] seconds")
rescue => err
$log.log_backtrace(err)
raise
end
# Run the relationship methods and perform set intersections on the returned values.
# Optional starting set of results maybe passed in.
def allowed_ci(ci, relats, sources, filtered_ids = nil)
result = nil
relats.each do |rsc_type|
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 0)
rc = send("#{rsc_type}_to_#{ci}", sources)
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 1)
unless rc.nil?
rc = rc.to_a
result = result.nil? ? rc : result & rc
end
end
result = [] if result.nil?
result.reject! { |k, _v| !filtered_ids.include?(k) } unless filtered_ids.nil?
result.each_with_object({}) { |s, hash| hash[s[0]] = s[1] }
end
def process_filter(filter_prop, ci_klass, targets)
rails_logger("process_filter - [#{ci_klass}]", 0)
filter_id = get_value(@values[filter_prop]).to_i
MiqSearch.filtered(filter_id, ci_klass, targets,
:user => @requester,
:miq_group => @requester.current_group,
).tap { rails_logger("process_filter - [#{ci_klass}]", 1) }
end
def find_all_ems_of_type(klass, src = nil)
result = []
each_ems_metadata(src, klass) { |ci| result << ci }
result
end
def find_hosts_under_ci(item)
find_classes_under_ci(item, Host)
end
def find_respools_under_ci(item)
find_classes_under_ci(item, ResourcePool)
end
def find_classes_under_ci(item, klass)
results = []
return results if item.nil?
@_find_classes_under_ci_prefix ||= _log.prefix
node = load_ems_node(item, @_find_classes_under_ci_prefix)
each_ems_metadata(node.attributes[:object], klass) { |ci| results << ci } unless node.nil?
results
end
def load_ems_node(item, log_header)
@ems_xml_nodes ||= {}
klass_name = item.kind_of?(MiqHashStruct) ? item.evm_object_class : item.class.base_class.name
node = @ems_xml_nodes["#{klass_name}_#{item.id}"]
$log.error("#{log_header} Resource <#{klass_name}_#{item.id} - #{item.name}> not found in cached resource tree.") if node.nil?
node
end
def ems_has_clusters?
found = each_ems_metadata(nil, EmsCluster) { |ci| break(ci) }
return found.evm_object_class == :EmsCluster if found.kind_of?(MiqHashStruct)
false
end
def get_ems_folders(folder, dh = {}, full_path = "")
if folder.evm_object_class == :EmsFolder
if folder.hidden
return dh if folder.name != 'vm'
else
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path unless folder.type == "Datacenter"
end
end
# Process child folders
@_get_ems_folders_prefix ||= _log.prefix
node = load_ems_node(folder, @_get_ems_folders_prefix)
node.children.each { |child| get_ems_folders(child.attributes[:object], dh, full_path) } unless node.nil?
dh
end
def get_ems_respool(node, dh = {}, full_path = "")
return if node.nil?
if node.kind_of?(XmlHash::Element)
folder = node.attributes[:object]
if node.name == :ResourcePool
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path
end
end
# Process child folders
node.children.each { |child| get_ems_respool(child, dh, full_path) }
dh
end
def find_datacenter_for_ci(item, ems_src = nil)
find_class_above_ci(item, EmsFolder, ems_src, true)
end
def find_hosts_for_respool(item, ems_src = nil)
hosts = find_class_above_ci(item, Host, ems_src)
return [hosts] unless hosts.blank?
cluster = find_cluster_above_ci(item)
find_hosts_under_ci(cluster)
end
def find_cluster_above_ci(item, ems_src = nil)
find_class_above_ci(item, EmsCluster, ems_src)
end
def find_class_above_ci(item, klass, _ems_src = nil, datacenter = false)
result = nil
@_find_class_above_ci_prefix ||= _log.prefix
node = load_ems_node(item, @_find_class_above_ci_prefix)
klass_name = klass.name.to_sym
# Walk the xml document parents to find the requested class
while node.kind_of?(XmlHash::Element)
ci = node.attributes[:object]
if node.name == klass_name && (datacenter == false || datacenter == true && ci.type == "Datacenter")
result = ci
break
end
node = node.parent
end
result
end
def each_ems_metadata(ems_ci = nil, klass = nil, &_blk)
if ems_ci.nil?
src = get_source_and_targets
ems_xml = get_ems_metadata_tree(src)
ems_node = ems_xml.try(:root)
else
@_each_ems_metadata_prefix ||= _log.prefix
ems_node = load_ems_node(ems_ci, @_each_ems_metadata_prefix)
end
klass_name = klass.name.to_sym unless klass.nil?
unless ems_node.nil?
ems_node.each_recursive { |node| yield(node.attributes[:object]) if klass.nil? || klass_name == node.name }
end
end
def get_ems_metadata_tree(src)
@ems_metadata_tree ||= begin
return if src[:ems].nil?
st = Time.zone.now
result = load_ar_obj(src[:ems]).fulltree_arranged(:except_type => "VmOrTemplate")
ems_metadata_tree_add_hosts_under_clusters!(result)
@ems_xml_nodes = {}
xml = MiqXml.newDoc(:xmlhash)
convert_to_xml(xml, result)
_log.info("EMS metadata collection completed in [#{Time.zone.now - st}] seconds")
xml
end
end
def ems_metadata_tree_add_hosts_under_clusters!(result)
result.each do |obj, children|
ems_metadata_tree_add_hosts_under_clusters!(children)
obj.hosts.each { |h| children[h] = {} } if obj.kind_of?(EmsCluster)
end
end
def convert_to_xml(xml, result)
result.each do |obj, children|
@ems_xml_nodes["#{obj.class.base_class}_#{obj.id}"] = node = xml.add_element(obj.class.base_class.name, :object => ci_to_hash_struct(obj))
convert_to_xml(node, children)
end
end
def add_target(dialog_key, key, klass, result)
key_id = "#{key}_id".to_sym
result[key_id] = get_value(@values[dialog_key])
result[key_id] = nil if result[key_id] == 0
result[key] = ci_to_hash_struct(klass.find_by(:id => result[key_id])) unless result[key_id].nil?
end
def ci_to_hash_struct(ci)
return if ci.nil?
return ci.collect { |c| ci_to_hash_struct(c) } if ci.respond_to?(:collect)
method_name = "#{ci.class.base_class.name.underscore}_to_hash_struct".to_sym
return send(method_name, ci) if respond_to?(method_name, true)
default_ci_to_hash_struct(ci)
end
def host_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :vmm_product, :vmm_version, :state, :v_total_vms, :maintenance])
end
def vm_or_template_to_hash_struct(ci)
v = build_ci_hash_struct(ci, [:name, :platform])
v.snapshots = ci.snapshots.collect { |si| ci_to_hash_struct(si) }
v
end
def ems_folder_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :type, :hidden])
end
def storage_to_hash_struct(ci)
storage_clusters = ci.storage_clusters.blank? ? nil : ci.storage_clusters.collect(&:name).join(', ')
build_ci_hash_struct(ci, [:name, :free_space, :total_space, :storage_domain_type]).tap do |hs|
hs.storage_clusters = storage_clusters
end
end
def snapshot_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :current?])
end
def customization_spec_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :typ, :description, :last_update_time, :is_sysprep_spec?])
end
def load_ar_obj(ci)
return load_ar_objs(ci) if ci.kind_of?(Array)
return ci unless ci.kind_of?(MiqHashStruct)
ci.evm_object_class.to_s.camelize.constantize.find_by(:id => ci.id)
end
def load_ar_objs(ci)
ci.collect { |i| load_ar_obj(i) }
end
# Return empty hash if we are selecting placement automatically so we do not
# spend time determining all the available resources
def resources_for_ui
get_source_and_targets
end
def allowed_hosts_obj(options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
datacenter = src[:datacenter] || options[:datacenter]
rails_logger('allowed_hosts_obj', 0)
st = Time.now
hosts_ids = find_all_ems_of_type(Host).collect(&:id)
hosts_ids &= load_ar_obj(src[:storage]).hosts.collect(&:id) unless src[:storage].nil?
if datacenter
@_allowed_hosts_obj_prefix ||= _log.prefix
dc_node = load_ems_node(datacenter, @_allowed_hosts_obj_prefix)
hosts_ids &= find_hosts_under_ci(dc_node.attributes[:object]).collect(&:id)
end
return [] if hosts_ids.blank?
# Remove any hosts that are no longer in the list
all_hosts = load_ar_obj(src[:ems]).hosts.find_all { |h| hosts_ids.include?(h.id) }
allowed_hosts_obj_cache = process_filter(:host_filter, Host, all_hosts)
_log.info("allowed_hosts_obj returned [#{allowed_hosts_obj_cache.length}] objects in [#{Time.now - st}] seconds")
rails_logger('allowed_hosts_obj', 1)
allowed_hosts_obj_cache
end
def allowed_storages(_options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
hosts = src[:host].nil? ? allowed_hosts_obj({}) : [load_ar_obj(src[:host])]
return [] if hosts.blank?
rails_logger('allowed_storages', 0)
st = Time.now
MiqPreloader.preload(hosts, :storages)
storages = hosts.each_with_object({}) do |host, hash|
host.writable_storages.each { |s| hash[s.id] = s }
end.values
selected_storage_profile_id = get_value(@values[:placement_storage_profile])
if selected_storage_profile_id
storages.reject! { |s| !s.storage_profiles.pluck(:id).include?(selected_storage_profile_id) }
end
allowed_storages_cache = process_filter(:ds_filter, Storage, storages).collect do |s|
ci_to_hash_struct(s)
end
_log.info("allowed_storages returned [#{allowed_storages_cache.length}] objects in [#{Time.now - st}] seconds")
rails_logger('allowed_storages', 1)
allowed_storages_cache
end
def allowed_hosts(_options = {})
hosts = allowed_hosts_obj
hosts_ids = hosts.collect(&:id)
result_hosts_hash = allowed_ci(:host, [:cluster, :respool, :folder], hosts_ids)
host_ids = result_hosts_hash.to_a.transpose.first
return [] if host_ids.nil?
find_all_ems_of_type(Host).collect { |h| h if host_ids.include?(h.id) }.compact
end
def allowed_datacenters(_options = {})
allowed_ci(:datacenter, [:cluster, :respool, :host, :folder])
end
def allowed_clusters(_options = {})
all_clusters = EmsCluster.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:cluster_filter, EmsCluster, all_clusters)
allowed_ci(:cluster, [:respool, :host, :folder], filtered_targets.collect(&:id))
end
def allowed_respools(_options = {})
all_resource_pools = ResourcePool.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:rp_filter, ResourcePool, all_resource_pools)
allowed_ci(:respool, [:cluster, :host, :folder], filtered_targets.collect(&:id))
end
alias_method :allowed_resource_pools, :allowed_respools
def allowed_folders(_options = {})
allowed_ci(:folder, [:cluster, :host, :respool])
end
def cluster_to_datacenter(src)
return nil unless ems_has_clusters?
ci_to_datacenter(src, :cluster, EmsCluster)
end
def respool_to_datacenter(src)
ci_to_datacenter(src, :respool, ResourcePool)
end
def host_to_datacenter(src)
ci_to_datacenter(src, :host, Host)
end
def folder_to_datacenter(src)
return nil if src[:folder].nil?
ci_to_datacenter(src, :folder, EmsFolder)
end
def ci_to_datacenter(src, ci, ci_type)
sources = src[ci].nil? ? find_all_ems_of_type(ci_type) : [src[ci]]
sources.collect { |c| find_datacenter_for_ci(c) }.compact.uniq.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def respool_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:respool].nil? ? find_all_ems_of_type(ResourcePool) : [src[:respool]]
build_id_to_name_hash(sources.collect { |rp| find_cluster_above_ci(rp) }.compact)
end
def host_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
build_id_to_name_hash(sources.collect { |h| find_cluster_above_ci(h) }.compact)
end
def folder_to_cluster(src)
return nil unless ems_has_clusters?
source = find_all_ems_of_type(EmsCluster)
build_id_to_name_hash(filter_to_objects_in_same_datacenter(source, src))
end
def cluster_to_respool(src)
return nil unless ems_has_clusters?
targets = src[:cluster].nil? ? find_all_ems_of_type(ResourcePool) : find_respools_under_ci(src[:cluster])
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def folder_to_respool(src)
return nil if src[:folder].nil?
datacenter = find_datacenter_for_ci(src[:folder])
targets = find_respools_under_ci(datacenter)
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def host_to_respool(src)
hosts = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
targets = hosts.collect do |h|
cluster = find_cluster_above_ci(h)
source = cluster.nil? ? h : cluster
find_respools_under_ci(source)
end.flatten
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def cluster_to_host(src)
return nil unless ems_has_clusters?
hosts = src[:cluster].nil? ? find_all_ems_of_type(Host) : find_hosts_under_ci(src[:cluster])
build_id_to_name_hash(hosts)
end
def respool_to_host(src)
hosts = src[:respool].nil? ? find_all_ems_of_type(Host) : find_hosts_for_respool(src[:respool])
build_id_to_name_hash(hosts)
end
def folder_to_host(src)
source = find_all_ems_of_type(Host)
build_id_to_name_hash(filter_to_objects_in_same_datacenter(source, src))
end
def host_to_folder(src)
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
datacenters = sources.collect do |h|
rails_logger("host_to_folder for host #{h.name}", 0)
result = find_datacenter_for_ci(h)
rails_logger("host_to_folder for host #{h.name}", 1)
result
end.compact
datacenters.each_with_object({}) do |dc, folders|
rails_logger("host_to_folder for dc #{dc.name}", 0)
folders.merge!(get_ems_folders(dc))
rails_logger("host_to_folder for dc #{dc.name}", 1)
end
end
def cluster_to_folder(src)
return nil unless ems_has_clusters?
return nil if src[:cluster].nil?
sources = [src[:cluster]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def respool_to_folder(src)
return nil if src[:respool].nil?
sources = [src[:respool]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def set_ws_field_value(values, key, data, dialog_name, dlg_fields)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
set_value = cast_value(value, data_type)
result = nil
if dlg_field.key?(:values)
get_source_and_targets(true)
get_field(key, dialog_name)
field_values = dlg_field[:values]
_log.info("processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>")
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.id == set_value }
[found.id, found.name] if found
elsif data_type == :array_integer
field_values.keys & set_value
else
[set_value, field_values[set_value]] if field_values.key?(set_value)
end
set_value = apply_result(result, data_type)
end
end
_log.warn("Unable to find value for key <#{dialog_name}:#{key}(#{data_type})> with input value <#{set_value.inspect}>. No matching item found.") if result.nil?
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
def cast_value(value, data_type)
case data_type
when :integer then value.to_i_with_method
when :float then value.to_f
when :boolean then value.to_s.downcase.in?(%w(true t))
when :time then Time.zone.parse(value)
when :button then value # Ignore
when :array_integer then value.to_miq_a.map!(&:to_i)
else value # Ignore
end
end
def set_ws_field_value_by_display_name(values, key, data, dialog_name, dlg_fields, obj_key = :name)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
find_value = value.to_s.downcase
if dlg_field.key?(:values)
field_values = dlg_field[:values]
_log.info("processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>")
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.send(obj_key).to_s.downcase == find_value }
[found.id, found.send(obj_key)] if found
else
field_values.detect { |_k, v| v.to_s.downcase == find_value }
end
if result.nil?
_log.warn("Unable to set key <#{dialog_name}:#{key}(#{data_type})> to value <#{find_value.inspect}>. No matching item found.")
else
set_value = [result.first, result.last]
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
end
end
end
def set_ws_field_value_by_id_or_name(values, dlg_field, data, dialog_name, dlg_fields, data_key = nil, id_klass = nil)
data_key = dlg_field if data_key.blank?
if data.key?(data_key)
data[data_key] = "#{id_klass}::#{data[data_key]}" unless id_klass.blank?
data[dlg_field] = data.delete(data_key)
set_ws_field_value(values, dlg_field, data, dialog_name, dlg_fields)
else
data_key_without_id = data_key.to_s.chomp('_id').to_sym
if data.key?(data_key_without_id)
data[data_key] = data.delete(data_key_without_id)
data[dlg_field] = data.delete(data_key)
set_ws_field_value_by_display_name(values, dlg_field, data, dialog_name, dlg_fields, :name)
end
end
end
def get_ws_dialog_fields(dialog_name)
dlg_fields = @dialogs.fetch_path(:dialogs, dialog_name, :fields)
_log.info("<#{dialog_name}> dialog not found in dialogs. Field updates will be skipped.") if dlg_fields.nil?
dlg_fields
end
def allowed_customization_templates(_options = {})
result = []
customization_template_id = get_value(@values[:customization_template_id])
@values[:customization_template_script] = nil if customization_template_id.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
image = supports_iso? ? get_iso_image : get_pxe_image
unless image.nil?
result = image.customization_templates.collect do |c|
# filter customizationtemplates
if c.pxe_image_type.provision_type.blank? || c.pxe_image_type.provision_type == prov_typ
@values[:customization_template_script] = c.script if c.id == customization_template_id
build_ci_hash_struct(c, [:name, :description, :updated_at])
end
end.compact
end
@values[:customization_template_script] = nil if result.blank?
result
end
def get_iso_image
get_image_by_type(:iso_image_id)
end
def get_pxe_image
get_image_by_type(:pxe_image_id)
end
def get_image_by_type(image_type)
klass, id = get_value(@values[image_type]).to_s.split('::')
return nil if id.blank?
klass.constantize.find_by(:id => id)
end
def get_pxe_server
PxeServer.find_by(:id => get_value(@values[:pxe_server_id]))
end
def allowed_pxe_servers(_options = {})
PxeServer.all.each_with_object({}) { |p, h| h[p.id] = p.name }
end
def allowed_pxe_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
pxe_server.pxe_images.collect do |p|
next if p.pxe_image_type.nil? || p.default_for_windows
# filter pxe images by provision_type to show vm/any or host/any
build_ci_hash_struct(p, [:name, :description]) if p.pxe_image_type.provision_type.blank? || p.pxe_image_type.provision_type == prov_typ
end.compact
end
def allowed_windows_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
pxe_server.windows_images.collect do |p|
build_ci_hash_struct(p, [:name, :description])
end.compact
end
def allowed_images(options = {})
result = allowed_pxe_images(options) + allowed_windows_images(options)
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def get_iso_images
template = VmOrTemplate.find_by(:id => get_value(@values[:src_vm_id]))
template.try(:ext_management_system).try(:iso_datastore).try(:iso_images) || []
end
def allowed_iso_images(_options = {})
result = get_iso_images.collect do |p|
build_ci_hash_struct(p, [:name])
end.compact
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def ws_requester_fields(values, fields)
dialog_name = :requester
dlg_fields = @dialogs.fetch_path(:dialogs, :requester, :fields)
if dlg_fields.nil?
_log.info("<#{dialog_name}> dialog not found in dialogs. Field updates be skipped.")
return
end
data = parse_ws_string(fields)
_log.info("data:<#{data.inspect}>")
values[:auto_approve] = data.delete(:auto_approve) == 'true'
data.delete(:user_name)
# get owner values from LDAP if configured
if data[:owner_email].present? && MiqLdap.using_ldap?
email = data[:owner_email]
unless email.include?('@')
email = "#{email}@#{::Settings.authentication.user_suffix}"
end
values[:owner_email] = email
retrieve_ldap rescue nil
end
dlg_keys = dlg_fields.keys
data.keys.each do |key|
if dlg_keys.include?(key)
_log.info("processing key <#{dialog_name}:#{key}> with value <#{data[key].inspect}>")
values[key] = data[key]
else
_log.warn("Skipping key <#{dialog_name}:#{key}>. Key name not found in dialog")
end
end
end
def ws_schedule_fields(values, _fields, data)
return if (dlg_fields = get_ws_dialog_fields(dialog_name = :schedule)).nil?
unless data[:schedule_time].blank?
values[:schedule_type] = 'schedule'
[:schedule_time, :retirement_time].each do |key|
data_type = :time
time_value = data.delete(key)
set_value = time_value.blank? ? nil : Time.parse(time_value)
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
end
dlg_keys = dlg_fields.keys
data.keys.each { |key| set_ws_field_value(values, key, data, dialog_name, dlg_fields) if dlg_keys.include?(key) }
end
def raise_validate_errors
errors = []
fields { |_fn, f, _dn, _d| errors << f[:error] unless f[:error].nil? }
err_text = "Provision failed for the following reasons:\n#{errors.join("\n")}"
_log.error("<#{err_text}>")
raise _("Provision failed for the following reasons:\n%{errors}") % {:errors => errors.join("\n")}
end
private
def apply_result(result, data_type)
return result if data_type == :array_integer
[result.first, result.last] unless result.nil?
end
def build_id_to_name_hash(array)
array.each_with_object({}) { |i, h| h[i.id] = i.name }
end
def default_ci_to_hash_struct(ci)
attributes = []
attributes << :name if ci.respond_to?(:name)
build_ci_hash_struct(ci, attributes)
end
def filter_to_objects_in_same_datacenter(array, source)
# If a folder is selected, reduce the host/cluster list to only hosts/clusters in the same datacenter as the folder
source[:datacenter] ? array.reject { |i| find_datacenter_for_ci(i).id != source[:datacenter].id } : array
end
end
Avoid duplicate host load in allowed_hosts_obj
MiqRequestWorkflow#allowed_hosts_obj originally would collect the known
host ids from the ems tree it has already built up, combine those with
any hosts associated to the storage it had, do a query against the
`src[:ems]`'s hosts, and select only the ones that match the original
set of host ids. From there, it would take that list of host objects,
throw it through Rbac, which would also make another query to find the
hosts that match that list AND filter it based on the targeted user,
throwing out the previously instantiated hosts that were used as the
intermediate list from `src[:ems]`.
This change simply avoids creating that intermediate `Host` object list
by replacing the `.find_all` with a `.where`, meaning we pass a scoped
query to Rbac (instead of an `Array`) that hasn't been executed until it
has the user filter applied to it.
Metrics
-------
Benchmarks were taken by monitoring the UI requests using the
`manageiq_performance` gem. The route monitored was selecting the
`"Lifecycle" -> "Publish this VM to a Template"` menu button from a VM
show page. The metrics shown are against database with a fairly large
EMS, with about 600 hosts, and are 3 subsequent requests against that
route.
**Before**
| ms | queries | query (ms) | rows |
| ---: | ---: | ---: | ---: |
| 18613 | 2062 | 1463.7 | 70017 |
| 17695 | 2062 | 1475.5 | 70017 |
| 17774 | 2062 | 1578.4 | 70017 |
**After**
| ms | queries | query (ms) | rows |
| ---: | ---: | ---: | ---: |
| 18553 | 2061 | 1560.7 | 61866 |
| 17385 | 2061 | 1564.6 | 61866 |
| 17468 | 2061 | 1437.5 | 61866 |
require 'enumerator'
require 'miq-hash_struct'
class MiqRequestWorkflow
include Vmdb::Logging
include_concern "DialogFieldValidation"
# We rely on MiqRequestWorkflow's descendants to be comprehensive
singleton_class.send(:prepend, DescendantLoader::ArDescendantsWithLoader)
attr_accessor :dialogs, :requester, :values, :last_vm_id
def self.automate_dialog_request
nil
end
def self.default_dialog_file
nil
end
def self.default_pre_dialog_file
nil
end
def self.encrypted_options_fields
[]
end
def self.encrypted_options_field_regs
encrypted_options_fields.map { |f| /\[:#{f}\]/ }
end
def self.all_encrypted_options_fields
descendants.flat_map(&:encrypted_options_fields).uniq
end
def self.update_requester_from_parameters(data, user)
return user if data[:user_name].blank?
new_user = User.lookup_by_identity(data[:user_name])
unless new_user
_log.error("requested not changed to <#{data[:user_name]}> due to a lookup failure")
raise ActiveRecord::RecordNotFound
end
_log.warn("requested changed to <#{new_user.userid}>")
new_user
end
def initialize(values, requester, options = {})
instance_var_init(values, requester, options)
unless options[:skip_dialog_load] == true
# If this is the first time we are called the values hash will be empty
# Also skip if we are being called from a web-service
if @dialogs.nil?
@dialogs = get_dialogs
normalize_numeric_fields
else
@running_pre_dialog = true if options[:use_pre_dialog] != false
end
end
unless options[:skip_dialog_load] == true
set_default_values
update_field_visibility
end
end
def instance_var_init(values, requester, options)
@values = values
@filters = {}
@requester = requester.kind_of?(User) ? requester : User.lookup_by_identity(requester)
group_description = values[:requester_group]
if group_description && group_description != @requester.miq_group_description
@requester = @requester.clone
@requester.current_group_by_description = group_description
end
@values.merge!(options) unless options.blank?
end
# Helper method when not using workflow
def make_request(request, values, requester = nil, auto_approve = false)
return false unless validate(values)
password_helper(values, true)
# Ensure that tags selected in the pre-dialog get applied to the request
values[:vm_tags] = (values[:vm_tags].to_miq_a + @values[:pre_dialog_vm_tags]).uniq if @values.try(:[], :pre_dialog_vm_tags).present?
set_request_values(values)
if request
MiqRequest.update_request(request, values, @requester)
else
req = request_class.new(:options => values, :requester => @requester, :request_type => request_type.to_s)
return req unless req.valid? # TODO: CatalogController#atomic_req_submit is the only one that enumerates over the errors
values[:__request_type__] = request_type.to_s.presence # Pass this along to MiqRequest#create_request
request_class.create_request(values, @requester, auto_approve)
end
end
def init_from_dialog(init_values)
@dialogs[:dialogs].keys.each do |dialog_name|
get_all_fields(dialog_name).each_pair do |field_name, field_values|
next unless init_values[field_name].nil?
next if field_values[:display] == :ignore
if !field_values[:default].nil?
val = field_values[:default]
end
if field_values[:values]
if field_values[:values].kind_of?(Hash)
# Save [value, description], skip for timezones array
init_values[field_name] = [val, field_values[:values][val]]
else
field_values[:values].each do |tz|
if tz[1].to_i_with_method == val.to_i_with_method
# Save [value, description] for timezones array
init_values[field_name] = [val, tz[0]]
end
end
end
else
# Set to default value
init_values[field_name] = val
end
end
end
end
def validate(values)
# => Input - A hash keyed by field name with entered values
# => Output - true || false
#
# Update @dialogs adding error keys to fields that don't validate
valid = true
get_all_dialogs(false).each do |d, dlg|
# Check if the entire dialog is ignored or disabled and check while processing the fields
dialog_disabled = !dialog_active?(d, dlg, values)
get_all_fields(d, false).each do |f, fld|
fld[:error] = nil
# Check the disabled flag here so we reset the "error" value on each field
next if dialog_disabled || fld[:display] == :hide
value = fld[:data_type] =~ /array_/ ? values[f] : get_value(values[f])
if fld[:required] == true
# If :required_method is defined let it determine if the field is value
if fld[:required_method].nil?
default_require_method = "default_require_#{f}".to_sym
if self.respond_to?(default_require_method)
fld[:error] = send(default_require_method, f, values, dlg, fld, value)
unless fld[:error].nil?
valid = false
next
end
else
if value.blank?
fld[:error] = "#{required_description(dlg, fld)} is required"
valid = false
next
end
end
else
Array.wrap(fld[:required_method]).each do |method|
fld[:error] = send(method, f, values, dlg, fld, value)
# Bail out early if we see an error
break unless fld[:error].nil?
end
unless fld[:error].nil?
valid = false
next
end
end
end
if fld[:validation_method] && respond_to?(fld[:validation_method])
if (fld[:error] = send(fld[:validation_method], f, values, dlg, fld, value))
valid = false
next
end
end
next if value.blank?
msg = "'#{fld[:description]}' in dialog #{dlg[:description]} must be of type #{fld[:data_type]}"
validate_data_types(value, fld, msg, valid)
end
end
valid
end
def validate_data_types(value, fld, msg, valid)
case fld[:data_type]
when :integer
unless is_integer?(value)
fld[:error] = msg
valid = false
end
when :float
unless is_numeric?(value)
fld[:error] = msg
valid = false
end
when :boolean
# TODO: do we need validation for boolean
when :button
# Ignore
when :array_integer
unless value.kind_of?(Array)
fld[:error] = msg
valid = false
end
else
data_type = Object.const_get(fld[:data_type].to_s.camelize)
unless value.kind_of?(data_type)
fld[:error] = msg
valid = false
end
end
[valid, fld]
end
def get_dialog_order
@dialogs[:dialog_order]
end
def get_buttons
@dialogs[:buttons] || [:submit, :cancel]
end
def provisioning_tab_list
dialog_names = @dialogs[:dialog_order].collect(&:to_s)
dialog_descriptions = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :description)
end
dialog_display = dialog_names.collect do |dialog_name|
@dialogs.fetch_path(:dialogs, dialog_name.to_sym, :display)
end
tab_list = []
dialog_names.each_with_index do |dialog_name, index|
next if dialog_display[index] == :hide || dialog_display[index] == :ignore
tab_list << {
:name => dialog_name,
:description => dialog_descriptions[index]
}
end
tab_list
end
def get_all_dialogs(refresh_values = true)
@dialogs[:dialogs].each_key { |d| get_dialog(d, refresh_values) }
@dialogs[:dialogs]
end
def get_dialog(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
get_all_fields(dialog_name, refresh_values)
dialog
end
def get_all_fields(dialog_name, refresh_values = true)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
return {} unless dialog
dialog[:fields].each_key { |f| get_field(f, dialog_name, refresh_values) }
dialog[:fields]
end
def get_field(field_name, dialog_name = nil, refresh_values = true)
field_name = field_name.to_sym
dialog_name = find_dialog_from_field_name(field_name) if dialog_name.nil?
field = @dialogs.fetch_path(:dialogs, dialog_name.to_sym, :fields, field_name)
return {} unless field
if field.key?(:values_from) && refresh_values
options = field[:values_from][:options] || {}
options[:prov_field_name] = field_name
field[:values] = send(field[:values_from][:method], options)
# Reset then currently selected item if it no longer appears in the available values
if field[:values].kind_of?(Hash)
if field[:values].length == 1
unless field[:auto_select_single] == false
@values[field_name] = field[:values].to_a.first
end
else
currently_selected = get_value(@values[field_name])
unless currently_selected.nil? || field[:values].key?(currently_selected)
@values[field_name] = [nil, nil]
end
end
end
end
field
end
# TODO: Return list in defined ordered
def dialogs
@dialogs[:dialogs].each_pair { |n, d| yield(n, d) }
end
def fields(dialog = nil)
dialog = [*dialog] unless dialog.nil?
@dialogs[:dialogs].each_pair do |dn, d|
next unless dialog.blank? || dialog.include?(dn)
d[:fields].each_pair do |fn, f|
yield(fn, f, dn, d)
end
end
end
def normalize_numeric_fields
fields do |_fn, f, _dn, _d|
if f[:data_type] == :integer
f[:default] = f[:default].to_i_with_method unless f[:default].blank?
unless f[:values].blank?
keys = f[:values].keys.dup
keys.each { |k| f[:values][k.to_i_with_method] = f[:values].delete(k) }
end
end
end
end
# Helper method to write message to the rails log (production.log) for debugging
def rails_logger(_name, _start)
# Rails.logger.warn("#{name} #{start.zero? ? 'start' : 'end'}")
end
def parse_ws_string(text_input, options = {})
self.class.parse_ws_string(text_input, options)
end
def self.parse_ws_string(text_input, options = {})
return parse_request_parameter_hash(text_input, options) if text_input.kind_of?(Hash)
return {} unless text_input.kind_of?(String)
deprecated_warn = "method: parse_ws_string, arg Type => String"
solution = "arg should be a hash"
MiqAeMethodService::Deprecation.deprecation_warning(deprecated_warn, solution)
result = {}
text_input.split('|').each do |value|
next if value.blank?
idx = value.index('=')
next if idx.nil?
key = options[:modify_key_name] == false ? value[0, idx].strip : value[0, idx].strip.downcase.to_sym
result[key] = value[idx + 1..-1].strip
end
result
end
def self.parse_request_parameter_hash(parameter_hash, options = {})
parameter_hash.each_with_object({}) do |param, hash|
key, value = param
key = key.strip.downcase.to_sym unless options[:modify_key_name] == false
hash[key] = value
end
end
def ws_tags(tag_string, parser = :parse_ws_string)
# Tags are passed as category|value. Example: cc|001|environment|test
ws_tags = send(parser, tag_string)
tags = allowed_tags.each_with_object({}) do |v, tags|
tags[v[:name]] = v[:children].each_with_object({}) { |(k, v), tc| tc[v[:name]] = k }
end
ws_tags.collect { |cat, tag| tags.fetch_path(cat.to_s.downcase, tag.downcase) }.compact
end
# @param parser [:parse_ws_string|:parse_ws_string_v1]
# @param additional_values [String] values of the form cc=001|environment=test
def ws_values(additional_values, parser = :parse_ws_string, parser_options = {})
parsed_values = send(parser, additional_values, parser_options)
parsed_values.each_with_object({}) { |(k, v), ws_values| ws_values[k.to_sym] = v }
end
def parse_ws_string_v1(values, _options = {})
na = []
values.to_s.split("|").each_slice(2) do |k, v|
next if v.nil?
na << [k.strip, v.strip]
end
na
end
def find_dialog_from_field_name(field_name)
@dialogs[:dialogs].each_key do |dialog_name|
return dialog_name if @dialogs[:dialogs][dialog_name][:fields].key?(field_name.to_sym)
end
nil
end
def get_value(data)
data.kind_of?(Array) ? data.first : data
end
def set_or_default_field_values(values)
field_names = values.keys
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
if f.key?(:values)
selected_key = nil
if f[:values].key?(values[fn])
selected_key = values[fn]
elsif f.key?(:default) && f[:values].key?(f[:default])
selected_key = f[:default]
else
unless f[:values].blank?
sorted_values = f[:values].sort
selected_key = sorted_values.first.first
end
end
@values[fn] = [selected_key, f[:values][selected_key]] unless selected_key.nil?
else
@values[fn] = values[fn]
end
end
end
end
def clear_field_values(field_names)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
@values[fn] = f.key?(:values) ? [nil, nil] : nil
end
end
end
def set_value_from_list(fn, f, value, values = nil, partial_key = false)
@values[fn] = [nil, nil]
values = f[:values] if values.nil?
unless value.nil?
@values[fn] = values.to_a.detect do |v|
if partial_key
_log.warn("comparing [#{v[0]}] to [#{value}]")
v[0].to_s.downcase.include?(value.to_s.downcase)
else
v.include?(value)
end
end
if @values[fn].nil?
_log.info("set_value_from_list did not matched an item") if partial_key
@values[fn] = [nil, nil]
else
_log.info("set_value_from_list matched item value:[#{value}] to item:[#{@values[fn][0]}]") if partial_key
end
end
end
def show_dialog(dialog_name, show_flag, enabled_flag = nil)
dialog = @dialogs.fetch_path(:dialogs, dialog_name.to_sym)
unless dialog.nil?
dialog[:display_init] = dialog[:display] if dialog[:display_init].nil?
# If the initial dialog is not set to show then do not modify it here.
return if dialog[:display_init] != :show
dialog[:display] = show_flag
@values["#{dialog_name}_enabled".to_sym] = [enabled_flag] unless enabled_flag.nil?
end
end
def required_description(dlg, fld)
"'#{dlg[:description]}/#{fld[:required_description] || fld[:description]}'"
end
def allowed_filters(options = {})
model_name = options[:category]
return @filters[model_name] unless @filters[model_name].nil?
rails_logger("allowed_filters - #{model_name}", 0)
@filters[model_name] = @requester.get_expressions(model_name).invert
rails_logger("allowed_filters - #{model_name}", 1)
@filters[model_name]
end
def dialog_active?(name, config, values)
return false if config[:display] == :ignore
enabled_field = "#{name}_enabled".to_sym
# Check if the fields hash contains a <dialog_name>_enabled field
enabled = get_value(values[enabled_field])
return false if enabled == false || enabled == "disabled"
true
end
def show_fields(display_flag, field_names, display_field = :display)
fields do |fn, f, _dn, _d|
if field_names.include?(fn)
flag = f[:display_override].blank? ? display_flag : f[:display_override]
f[display_field] = flag
end
end
end
def retrieve_ldap(_options = {})
email = get_value(@values[:owner_email])
unless email.blank?
l = MiqLdap.new
if l.bind_with_default == true
raise _("No information returned for %{email}") % {:email => email} if (d = l.get_user_info(email)).nil?
[:first_name, :last_name, :address, :city, :state, :zip, :country, :title, :company,
:department, :office, :phone, :phone_mobile, :manager, :manager_mail, :manager_phone].each do |prop|
@values["owner_#{prop}".to_sym] = d[prop].try(:dup)
end
@values[:sysprep_organization] = d[:company].try(:dup)
end
end
end
def default_schedule_time(options = {})
# TODO: Added support for "default_from", like values_from, that gets called once after dialog creation
# Update VM description
fields do |fn, f, _dn, _d|
if fn == :schedule_time
f[:default] = Time.now + options[:offset].to_i_with_method if f[:default].nil?
break
end
end
end
def values_less_then(options)
results = options[:values].transform_keys(&:to_i_with_method)
field, include_equals = options[:field], options[:include_equals]
max_value = field.nil? ? options[:value].to_i_with_method : get_value(@values[field]).to_i_with_method
return results if max_value <= 0
results.reject { |k, _v| include_equals == true ? max_value < k : max_value <= k }
end
def tags
vm_tags = @values[:vm_tags]
return unless vm_tags.kind_of?(Array)
vm_tags.each do |tag_id|
tag = Classification.find(tag_id)
yield(tag.name, tag.parent.name) # yield the tag's name and category
end
end
def get_tags
tag_string = ''
tags do |tag, cat|
tag_string << ':' unless tag_string.empty?
tag_string << "#{cat}/#{tag}"
end
tag_string
end
def allowed_tags(options = {})
return @tags unless @tags.nil?
region_number = options.delete(:region_number)
# TODO: Call allowed_tags properly from controller - it is currently hard-coded with no options passed
field_options = @dialogs.fetch_path(:dialogs, :purpose, :fields, :vm_tags, :options)
options = field_options unless field_options.nil?
rails_logger('allowed_tags', 0)
st = Time.now
@tags = {}
exclude_list = options[:exclude].blank? ? [] : options[:exclude].collect(&:to_s)
include_list = options[:include].blank? ? [] : options[:include].collect(&:to_s)
single_select = options[:single_select].blank? ? [] : options[:single_select].collect(&:to_s)
cats = Classification.visible.writeable.managed
cats = cats.in_region(region_number) if region_number
cats.each do |t|
next if exclude_list.include?(t.name)
next unless include_list.blank? || include_list.include?(t.name)
# Force passed tags to be single select
single_value = single_select.include?(t.name) ? true : t.single_value?
@tags[t.id] = {:name => t.name, :description => t.description, :single_value => single_value, :children => {}, :id => t.id}
end
ents = Classification.visible.writeable.parent_ids(@tags.keys).with_tag_name
ents = ents.in_region(region_number) if region_number
ents.each do |t|
full_tag_name = "#{@tags[t.parent_id][:name]}/#{t.name}"
next if exclude_list.include?(full_tag_name)
@tags[t.parent_id][:children][t.id] = {:name => t.name, :description => t.description}
end
@tags.delete_if { |_k, v| v[:children].empty? }
# Now sort the tags based on the order passed options. All remaining tags not defined in the order
# will be sorted by description and appended to the other sorted tags
tag_results, tags_to_sort = [], []
sort_order = options[:order].blank? ? [] : options[:order].collect(&:to_s)
@tags.each do |_k, v|
(idx = sort_order.index(v[:name])).nil? ? tags_to_sort << v : tag_results[idx] = v
end
tags_to_sort = tags_to_sort.sort_by { |a| a[:description] }
@tags = tag_results.compact + tags_to_sort
@tags.each do |tag|
tag[:children] = if tag[:children].first.last[:name] =~ /^\d/
tag[:children].sort_by { |_k, v| v[:name].to_i }
else
tag[:children].sort_by { |_k, v| v[:description] }
end
end
rails_logger('allowed_tags', 1)
_log.info("allowed_tags returned [#{@tags.length}] objects in [#{Time.now - st}] seconds")
@tags
end
def allowed_tags_and_pre_tags
pre_tags = @values[:pre_dialog_vm_tags].to_miq_a
return allowed_tags if pre_tags.blank?
tag_cats = allowed_tags.dup
tag_cat_names = tag_cats.collect { |cat| cat[:name] }
Classification.where(:id => pre_tags).each do |tag|
parent = tag.parent
next if tag_cat_names.include?(parent.name)
new_cat = {:name => parent.name, :description => parent.description, :single_value => parent.single_value?, :children => {}, :id => parent.id}
parent.children.each { |c| new_cat[:children][c.id] = {:name => c.name, :description => c.description} }
tag_cats << new_cat
tag_cat_names << new_cat[:name]
end
tag_cats
end
def tag_symbol
:tag_ids
end
def build_ci_hash_struct(ci, props)
nh = MiqHashStruct.new(:id => ci.id, :evm_object_class => ci.class.base_class.name.to_sym)
props.each { |p| nh.send("#{p}=", ci.send(p)) }
nh
end
def get_dialogs
@values[:miq_request_dialog_name] ||= @values[:provision_dialog_name] || dialog_name_from_automate || self.class.default_dialog_file
dp = @values[:miq_request_dialog_name] = File.basename(@values[:miq_request_dialog_name], ".rb")
_log.info("Loading dialogs <#{dp}> for user <#{@requester.userid}>")
d = MiqDialog.find_by("lower(name) = ? and dialog_type = ?", dp.downcase, self.class.base_model.name)
if d.nil?
raise MiqException::Error,
"Dialog cannot be found. Name:[%{name}] Type:[%{type}]" % {:name => @values[:miq_request_dialog_name],
:type => self.class.base_model.name}
end
d.content
end
def get_pre_dialogs
pre_dialogs = nil
pre_dialog_name = dialog_name_from_automate('get_pre_dialog_name')
unless pre_dialog_name.blank?
pre_dialog_name = File.basename(pre_dialog_name, ".rb")
d = MiqDialog.find_by(:name => pre_dialog_name, :dialog_type => self.class.base_model.name)
unless d.nil?
_log.info("Loading pre-dialogs <#{pre_dialog_name}> for user <#{@requester.userid}>")
pre_dialogs = d.content
end
end
pre_dialogs
end
def dialog_name_from_automate(message = 'get_dialog_name', input_fields = [:request_type], extra_attrs = {})
return nil if self.class.automate_dialog_request.nil?
_log.info("Querying Automate Profile for dialog name")
attrs = {'request' => self.class.automate_dialog_request, 'message' => message}
extra_attrs.each { |k, v| attrs[k] = v }
@values.each_key do |k|
key = "dialog_input_#{k.to_s.downcase}"
if attrs.key?(key)
_log.info("Skipping key=<#{key}> because already set to <#{attrs[key]}>")
else
value = (k == :vm_tags) ? get_tags : get_value(@values[k]).to_s
_log.info("Setting attrs[#{key}]=<#{value}>")
attrs[key] = value
end
end
input_fields.each { |k| attrs["dialog_input_#{k.to_s.downcase}"] = send(k).to_s }
ws = MiqAeEngine.resolve_automation_object("REQUEST", @requester, attrs, :vmdb_object => @requester)
if ws && ws.root
dialog_option_prefix = 'dialog_option_'
dialog_option_prefix_length = dialog_option_prefix.length
ws.root.attributes.each do |key, value|
next unless key.downcase.starts_with?(dialog_option_prefix)
next unless key.length > dialog_option_prefix_length
key = key[dialog_option_prefix_length..-1].downcase
_log.info("Setting @values[#{key}]=<#{value}>")
@values[key.to_sym] = value
end
name = ws.root("dialog_name")
return name.presence
end
nil
end
def self.request_type(type)
type.presence.try(:to_sym) || request_class.request_types.first
end
def request_type
self.class.request_type(get_value(@values[:request_type]))
end
def request_class
req_class = self.class.request_class
return req_class unless get_value(@values[:service_template_request]) == true
(req_class.name + "Template").constantize
end
def self.request_class
@workflow_class ||= name.underscore.gsub(/_workflow$/, "_request").camelize.constantize
end
def set_default_values
set_default_user_info rescue nil
end
def set_default_user_info
return if get_dialog(:requester).blank?
if get_value(@values[:owner_email]).blank? && @requester.email.present?
@values[:owner_email] = @requester.email
retrieve_ldap if MiqLdap.using_ldap?
end
show_flag = MiqLdap.using_ldap? ? :show : :hide
show_fields(show_flag, [:owner_load_ldap])
end
def set_request_values(values)
values[:requester_group] ||= @requester.current_group.description
email = values[:owner_email]
if email.present? && values[:owner_group].blank?
values[:owner_group] = User.find_by_lower_email(email, @requester).try(:miq_group_description)
end
end
def password_helper(values = @values, encrypt = true)
self.class.encrypted_options_fields.each do |pwd_key|
next if values[pwd_key].blank?
if encrypt
values[pwd_key].replace(MiqPassword.try_encrypt(values[pwd_key]))
else
values[pwd_key].replace(MiqPassword.try_decrypt(values[pwd_key]))
end
end
end
def update_field_visibility
end
def refresh_field_values(values)
st = Time.now
@values = values
get_source_and_targets(true)
# @values gets modified during this call
get_all_dialogs
values.merge!(@values)
# Update the display flag for fields based on current settings
update_field_visibility
_log.info("refresh completed in [#{Time.now - st}] seconds")
rescue => err
$log.log_backtrace(err)
raise
end
# Run the relationship methods and perform set intersections on the returned values.
# Optional starting set of results maybe passed in.
def allowed_ci(ci, relats, sources, filtered_ids = nil)
result = nil
relats.each do |rsc_type|
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 0)
rc = send("#{rsc_type}_to_#{ci}", sources)
rails_logger("allowed_ci - #{rsc_type}_to_#{ci}", 1)
unless rc.nil?
rc = rc.to_a
result = result.nil? ? rc : result & rc
end
end
result = [] if result.nil?
result.reject! { |k, _v| !filtered_ids.include?(k) } unless filtered_ids.nil?
result.each_with_object({}) { |s, hash| hash[s[0]] = s[1] }
end
def process_filter(filter_prop, ci_klass, targets)
rails_logger("process_filter - [#{ci_klass}]", 0)
filter_id = get_value(@values[filter_prop]).to_i
MiqSearch.filtered(filter_id, ci_klass, targets,
:user => @requester,
:miq_group => @requester.current_group,
).tap { rails_logger("process_filter - [#{ci_klass}]", 1) }
end
def find_all_ems_of_type(klass, src = nil)
result = []
each_ems_metadata(src, klass) { |ci| result << ci }
result
end
def find_hosts_under_ci(item)
find_classes_under_ci(item, Host)
end
def find_respools_under_ci(item)
find_classes_under_ci(item, ResourcePool)
end
def find_classes_under_ci(item, klass)
results = []
return results if item.nil?
@_find_classes_under_ci_prefix ||= _log.prefix
node = load_ems_node(item, @_find_classes_under_ci_prefix)
each_ems_metadata(node.attributes[:object], klass) { |ci| results << ci } unless node.nil?
results
end
def load_ems_node(item, log_header)
@ems_xml_nodes ||= {}
klass_name = item.kind_of?(MiqHashStruct) ? item.evm_object_class : item.class.base_class.name
node = @ems_xml_nodes["#{klass_name}_#{item.id}"]
$log.error("#{log_header} Resource <#{klass_name}_#{item.id} - #{item.name}> not found in cached resource tree.") if node.nil?
node
end
def ems_has_clusters?
found = each_ems_metadata(nil, EmsCluster) { |ci| break(ci) }
return found.evm_object_class == :EmsCluster if found.kind_of?(MiqHashStruct)
false
end
def get_ems_folders(folder, dh = {}, full_path = "")
if folder.evm_object_class == :EmsFolder
if folder.hidden
return dh if folder.name != 'vm'
else
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path unless folder.type == "Datacenter"
end
end
# Process child folders
@_get_ems_folders_prefix ||= _log.prefix
node = load_ems_node(folder, @_get_ems_folders_prefix)
node.children.each { |child| get_ems_folders(child.attributes[:object], dh, full_path) } unless node.nil?
dh
end
def get_ems_respool(node, dh = {}, full_path = "")
return if node.nil?
if node.kind_of?(XmlHash::Element)
folder = node.attributes[:object]
if node.name == :ResourcePool
full_path += full_path.blank? ? folder.name.to_s : " / #{folder.name}"
dh[folder.id] = full_path
end
end
# Process child folders
node.children.each { |child| get_ems_respool(child, dh, full_path) }
dh
end
def find_datacenter_for_ci(item, ems_src = nil)
find_class_above_ci(item, EmsFolder, ems_src, true)
end
def find_hosts_for_respool(item, ems_src = nil)
hosts = find_class_above_ci(item, Host, ems_src)
return [hosts] unless hosts.blank?
cluster = find_cluster_above_ci(item)
find_hosts_under_ci(cluster)
end
def find_cluster_above_ci(item, ems_src = nil)
find_class_above_ci(item, EmsCluster, ems_src)
end
def find_class_above_ci(item, klass, _ems_src = nil, datacenter = false)
result = nil
@_find_class_above_ci_prefix ||= _log.prefix
node = load_ems_node(item, @_find_class_above_ci_prefix)
klass_name = klass.name.to_sym
# Walk the xml document parents to find the requested class
while node.kind_of?(XmlHash::Element)
ci = node.attributes[:object]
if node.name == klass_name && (datacenter == false || datacenter == true && ci.type == "Datacenter")
result = ci
break
end
node = node.parent
end
result
end
def each_ems_metadata(ems_ci = nil, klass = nil, &_blk)
if ems_ci.nil?
src = get_source_and_targets
ems_xml = get_ems_metadata_tree(src)
ems_node = ems_xml.try(:root)
else
@_each_ems_metadata_prefix ||= _log.prefix
ems_node = load_ems_node(ems_ci, @_each_ems_metadata_prefix)
end
klass_name = klass.name.to_sym unless klass.nil?
unless ems_node.nil?
ems_node.each_recursive { |node| yield(node.attributes[:object]) if klass.nil? || klass_name == node.name }
end
end
def get_ems_metadata_tree(src)
@ems_metadata_tree ||= begin
return if src[:ems].nil?
st = Time.zone.now
result = load_ar_obj(src[:ems]).fulltree_arranged(:except_type => "VmOrTemplate")
ems_metadata_tree_add_hosts_under_clusters!(result)
@ems_xml_nodes = {}
xml = MiqXml.newDoc(:xmlhash)
convert_to_xml(xml, result)
_log.info("EMS metadata collection completed in [#{Time.zone.now - st}] seconds")
xml
end
end
def ems_metadata_tree_add_hosts_under_clusters!(result)
result.each do |obj, children|
ems_metadata_tree_add_hosts_under_clusters!(children)
obj.hosts.each { |h| children[h] = {} } if obj.kind_of?(EmsCluster)
end
end
def convert_to_xml(xml, result)
result.each do |obj, children|
@ems_xml_nodes["#{obj.class.base_class}_#{obj.id}"] = node = xml.add_element(obj.class.base_class.name, :object => ci_to_hash_struct(obj))
convert_to_xml(node, children)
end
end
def add_target(dialog_key, key, klass, result)
key_id = "#{key}_id".to_sym
result[key_id] = get_value(@values[dialog_key])
result[key_id] = nil if result[key_id] == 0
result[key] = ci_to_hash_struct(klass.find_by(:id => result[key_id])) unless result[key_id].nil?
end
def ci_to_hash_struct(ci)
return if ci.nil?
return ci.collect { |c| ci_to_hash_struct(c) } if ci.respond_to?(:collect)
method_name = "#{ci.class.base_class.name.underscore}_to_hash_struct".to_sym
return send(method_name, ci) if respond_to?(method_name, true)
default_ci_to_hash_struct(ci)
end
def host_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :vmm_product, :vmm_version, :state, :v_total_vms, :maintenance])
end
def vm_or_template_to_hash_struct(ci)
v = build_ci_hash_struct(ci, [:name, :platform])
v.snapshots = ci.snapshots.collect { |si| ci_to_hash_struct(si) }
v
end
def ems_folder_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :type, :hidden])
end
def storage_to_hash_struct(ci)
storage_clusters = ci.storage_clusters.blank? ? nil : ci.storage_clusters.collect(&:name).join(', ')
build_ci_hash_struct(ci, [:name, :free_space, :total_space, :storage_domain_type]).tap do |hs|
hs.storage_clusters = storage_clusters
end
end
def snapshot_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :current?])
end
def customization_spec_to_hash_struct(ci)
build_ci_hash_struct(ci, [:name, :typ, :description, :last_update_time, :is_sysprep_spec?])
end
def load_ar_obj(ci)
return load_ar_objs(ci) if ci.kind_of?(Array)
return ci unless ci.kind_of?(MiqHashStruct)
ci.evm_object_class.to_s.camelize.constantize.find_by(:id => ci.id)
end
def load_ar_objs(ci)
ci.collect { |i| load_ar_obj(i) }
end
# Return empty hash if we are selecting placement automatically so we do not
# spend time determining all the available resources
def resources_for_ui
get_source_and_targets
end
def allowed_hosts_obj(options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
datacenter = src[:datacenter] || options[:datacenter]
rails_logger('allowed_hosts_obj', 0)
st = Time.now
hosts_ids = find_all_ems_of_type(Host).collect(&:id)
hosts_ids &= load_ar_obj(src[:storage]).hosts.collect(&:id) unless src[:storage].nil?
if datacenter
@_allowed_hosts_obj_prefix ||= _log.prefix
dc_node = load_ems_node(datacenter, @_allowed_hosts_obj_prefix)
hosts_ids &= find_hosts_under_ci(dc_node.attributes[:object]).collect(&:id)
end
return [] if hosts_ids.blank?
# Remove any hosts that are no longer in the list
all_hosts = load_ar_obj(src[:ems]).hosts.where(:id => hosts_ids)
allowed_hosts_obj_cache = process_filter(:host_filter, Host, all_hosts)
_log.info("allowed_hosts_obj returned [#{allowed_hosts_obj_cache.length}] objects in [#{Time.now - st}] seconds")
rails_logger('allowed_hosts_obj', 1)
allowed_hosts_obj_cache
end
def allowed_storages(_options = {})
return [] if (src = resources_for_ui).blank? || src[:ems].nil?
hosts = src[:host].nil? ? allowed_hosts_obj({}) : [load_ar_obj(src[:host])]
return [] if hosts.blank?
rails_logger('allowed_storages', 0)
st = Time.now
MiqPreloader.preload(hosts, :storages)
storages = hosts.each_with_object({}) do |host, hash|
host.writable_storages.each { |s| hash[s.id] = s }
end.values
selected_storage_profile_id = get_value(@values[:placement_storage_profile])
if selected_storage_profile_id
storages.reject! { |s| !s.storage_profiles.pluck(:id).include?(selected_storage_profile_id) }
end
allowed_storages_cache = process_filter(:ds_filter, Storage, storages).collect do |s|
ci_to_hash_struct(s)
end
_log.info("allowed_storages returned [#{allowed_storages_cache.length}] objects in [#{Time.now - st}] seconds")
rails_logger('allowed_storages', 1)
allowed_storages_cache
end
def allowed_hosts(_options = {})
hosts = allowed_hosts_obj
hosts_ids = hosts.collect(&:id)
result_hosts_hash = allowed_ci(:host, [:cluster, :respool, :folder], hosts_ids)
host_ids = result_hosts_hash.to_a.transpose.first
return [] if host_ids.nil?
find_all_ems_of_type(Host).collect { |h| h if host_ids.include?(h.id) }.compact
end
def allowed_datacenters(_options = {})
allowed_ci(:datacenter, [:cluster, :respool, :host, :folder])
end
def allowed_clusters(_options = {})
all_clusters = EmsCluster.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:cluster_filter, EmsCluster, all_clusters)
allowed_ci(:cluster, [:respool, :host, :folder], filtered_targets.collect(&:id))
end
def allowed_respools(_options = {})
all_resource_pools = ResourcePool.where(:ems_id => get_source_and_targets[:ems].try(:id))
filtered_targets = process_filter(:rp_filter, ResourcePool, all_resource_pools)
allowed_ci(:respool, [:cluster, :host, :folder], filtered_targets.collect(&:id))
end
alias_method :allowed_resource_pools, :allowed_respools
def allowed_folders(_options = {})
allowed_ci(:folder, [:cluster, :host, :respool])
end
def cluster_to_datacenter(src)
return nil unless ems_has_clusters?
ci_to_datacenter(src, :cluster, EmsCluster)
end
def respool_to_datacenter(src)
ci_to_datacenter(src, :respool, ResourcePool)
end
def host_to_datacenter(src)
ci_to_datacenter(src, :host, Host)
end
def folder_to_datacenter(src)
return nil if src[:folder].nil?
ci_to_datacenter(src, :folder, EmsFolder)
end
def ci_to_datacenter(src, ci, ci_type)
sources = src[ci].nil? ? find_all_ems_of_type(ci_type) : [src[ci]]
sources.collect { |c| find_datacenter_for_ci(c) }.compact.uniq.each_with_object({}) { |c, r| r[c.id] = c.name }
end
def respool_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:respool].nil? ? find_all_ems_of_type(ResourcePool) : [src[:respool]]
build_id_to_name_hash(sources.collect { |rp| find_cluster_above_ci(rp) }.compact)
end
def host_to_cluster(src)
return nil unless ems_has_clusters?
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
build_id_to_name_hash(sources.collect { |h| find_cluster_above_ci(h) }.compact)
end
def folder_to_cluster(src)
return nil unless ems_has_clusters?
source = find_all_ems_of_type(EmsCluster)
build_id_to_name_hash(filter_to_objects_in_same_datacenter(source, src))
end
def cluster_to_respool(src)
return nil unless ems_has_clusters?
targets = src[:cluster].nil? ? find_all_ems_of_type(ResourcePool) : find_respools_under_ci(src[:cluster])
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def folder_to_respool(src)
return nil if src[:folder].nil?
datacenter = find_datacenter_for_ci(src[:folder])
targets = find_respools_under_ci(datacenter)
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def host_to_respool(src)
hosts = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
targets = hosts.collect do |h|
cluster = find_cluster_above_ci(h)
source = cluster.nil? ? h : cluster
find_respools_under_ci(source)
end.flatten
res_pool_with_path = get_ems_respool(get_ems_metadata_tree(src))
targets.each_with_object({}) { |rp, r| r[rp.id] = res_pool_with_path[rp.id] }
end
def cluster_to_host(src)
return nil unless ems_has_clusters?
hosts = src[:cluster].nil? ? find_all_ems_of_type(Host) : find_hosts_under_ci(src[:cluster])
build_id_to_name_hash(hosts)
end
def respool_to_host(src)
hosts = src[:respool].nil? ? find_all_ems_of_type(Host) : find_hosts_for_respool(src[:respool])
build_id_to_name_hash(hosts)
end
def folder_to_host(src)
source = find_all_ems_of_type(Host)
build_id_to_name_hash(filter_to_objects_in_same_datacenter(source, src))
end
def host_to_folder(src)
sources = src[:host].nil? ? allowed_hosts_obj : [src[:host]]
datacenters = sources.collect do |h|
rails_logger("host_to_folder for host #{h.name}", 0)
result = find_datacenter_for_ci(h)
rails_logger("host_to_folder for host #{h.name}", 1)
result
end.compact
datacenters.each_with_object({}) do |dc, folders|
rails_logger("host_to_folder for dc #{dc.name}", 0)
folders.merge!(get_ems_folders(dc))
rails_logger("host_to_folder for dc #{dc.name}", 1)
end
end
def cluster_to_folder(src)
return nil unless ems_has_clusters?
return nil if src[:cluster].nil?
sources = [src[:cluster]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def respool_to_folder(src)
return nil if src[:respool].nil?
sources = [src[:respool]]
datacenters = sources.collect { |h| find_datacenter_for_ci(h) }.compact
datacenters.each_with_object({}) { |dc, folders| folders.merge!(get_ems_folders(dc)) }
end
def set_ws_field_value(values, key, data, dialog_name, dlg_fields)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
set_value = cast_value(value, data_type)
result = nil
if dlg_field.key?(:values)
get_source_and_targets(true)
get_field(key, dialog_name)
field_values = dlg_field[:values]
_log.info("processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>")
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.id == set_value }
[found.id, found.name] if found
elsif data_type == :array_integer
field_values.keys & set_value
else
[set_value, field_values[set_value]] if field_values.key?(set_value)
end
set_value = apply_result(result, data_type)
end
end
_log.warn("Unable to find value for key <#{dialog_name}:#{key}(#{data_type})> with input value <#{set_value.inspect}>. No matching item found.") if result.nil?
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
def cast_value(value, data_type)
case data_type
when :integer then value.to_i_with_method
when :float then value.to_f
when :boolean then value.to_s.downcase.in?(%w(true t))
when :time then Time.zone.parse(value)
when :button then value # Ignore
when :array_integer then value.to_miq_a.map!(&:to_i)
else value # Ignore
end
end
def set_ws_field_value_by_display_name(values, key, data, dialog_name, dlg_fields, obj_key = :name)
value = data.delete(key)
dlg_field = dlg_fields[key]
data_type = dlg_field[:data_type]
find_value = value.to_s.downcase
if dlg_field.key?(:values)
field_values = dlg_field[:values]
_log.info("processing key <#{dialog_name}:#{key}(#{data_type})> with values <#{field_values.inspect}>")
if field_values.present?
result = if field_values.first.kind_of?(MiqHashStruct)
found = field_values.detect { |v| v.send(obj_key).to_s.downcase == find_value }
[found.id, found.send(obj_key)] if found
else
field_values.detect { |_k, v| v.to_s.downcase == find_value }
end
if result.nil?
_log.warn("Unable to set key <#{dialog_name}:#{key}(#{data_type})> to value <#{find_value.inspect}>. No matching item found.")
else
set_value = [result.first, result.last]
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
end
end
end
def set_ws_field_value_by_id_or_name(values, dlg_field, data, dialog_name, dlg_fields, data_key = nil, id_klass = nil)
data_key = dlg_field if data_key.blank?
if data.key?(data_key)
data[data_key] = "#{id_klass}::#{data[data_key]}" unless id_klass.blank?
data[dlg_field] = data.delete(data_key)
set_ws_field_value(values, dlg_field, data, dialog_name, dlg_fields)
else
data_key_without_id = data_key.to_s.chomp('_id').to_sym
if data.key?(data_key_without_id)
data[data_key] = data.delete(data_key_without_id)
data[dlg_field] = data.delete(data_key)
set_ws_field_value_by_display_name(values, dlg_field, data, dialog_name, dlg_fields, :name)
end
end
end
def get_ws_dialog_fields(dialog_name)
dlg_fields = @dialogs.fetch_path(:dialogs, dialog_name, :fields)
_log.info("<#{dialog_name}> dialog not found in dialogs. Field updates will be skipped.") if dlg_fields.nil?
dlg_fields
end
def allowed_customization_templates(_options = {})
result = []
customization_template_id = get_value(@values[:customization_template_id])
@values[:customization_template_script] = nil if customization_template_id.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
image = supports_iso? ? get_iso_image : get_pxe_image
unless image.nil?
result = image.customization_templates.collect do |c|
# filter customizationtemplates
if c.pxe_image_type.provision_type.blank? || c.pxe_image_type.provision_type == prov_typ
@values[:customization_template_script] = c.script if c.id == customization_template_id
build_ci_hash_struct(c, [:name, :description, :updated_at])
end
end.compact
end
@values[:customization_template_script] = nil if result.blank?
result
end
def get_iso_image
get_image_by_type(:iso_image_id)
end
def get_pxe_image
get_image_by_type(:pxe_image_id)
end
def get_image_by_type(image_type)
klass, id = get_value(@values[image_type]).to_s.split('::')
return nil if id.blank?
klass.constantize.find_by(:id => id)
end
def get_pxe_server
PxeServer.find_by(:id => get_value(@values[:pxe_server_id]))
end
def allowed_pxe_servers(_options = {})
PxeServer.all.each_with_object({}) { |p, h| h[p.id] = p.name }
end
def allowed_pxe_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
prov_typ = self.class == MiqHostProvisionWorkflow ? "host" : "vm"
pxe_server.pxe_images.collect do |p|
next if p.pxe_image_type.nil? || p.default_for_windows
# filter pxe images by provision_type to show vm/any or host/any
build_ci_hash_struct(p, [:name, :description]) if p.pxe_image_type.provision_type.blank? || p.pxe_image_type.provision_type == prov_typ
end.compact
end
def allowed_windows_images(_options = {})
pxe_server = get_pxe_server
return [] if pxe_server.nil?
pxe_server.windows_images.collect do |p|
build_ci_hash_struct(p, [:name, :description])
end.compact
end
def allowed_images(options = {})
result = allowed_pxe_images(options) + allowed_windows_images(options)
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def get_iso_images
template = VmOrTemplate.find_by(:id => get_value(@values[:src_vm_id]))
template.try(:ext_management_system).try(:iso_datastore).try(:iso_images) || []
end
def allowed_iso_images(_options = {})
result = get_iso_images.collect do |p|
build_ci_hash_struct(p, [:name])
end.compact
# Change the ID to contain the class name since this is a mix class type
result.each { |ci| ci.id = "#{ci.evm_object_class}::#{ci.id}" }
result
end
def ws_requester_fields(values, fields)
dialog_name = :requester
dlg_fields = @dialogs.fetch_path(:dialogs, :requester, :fields)
if dlg_fields.nil?
_log.info("<#{dialog_name}> dialog not found in dialogs. Field updates be skipped.")
return
end
data = parse_ws_string(fields)
_log.info("data:<#{data.inspect}>")
values[:auto_approve] = data.delete(:auto_approve) == 'true'
data.delete(:user_name)
# get owner values from LDAP if configured
if data[:owner_email].present? && MiqLdap.using_ldap?
email = data[:owner_email]
unless email.include?('@')
email = "#{email}@#{::Settings.authentication.user_suffix}"
end
values[:owner_email] = email
retrieve_ldap rescue nil
end
dlg_keys = dlg_fields.keys
data.keys.each do |key|
if dlg_keys.include?(key)
_log.info("processing key <#{dialog_name}:#{key}> with value <#{data[key].inspect}>")
values[key] = data[key]
else
_log.warn("Skipping key <#{dialog_name}:#{key}>. Key name not found in dialog")
end
end
end
def ws_schedule_fields(values, _fields, data)
return if (dlg_fields = get_ws_dialog_fields(dialog_name = :schedule)).nil?
unless data[:schedule_time].blank?
values[:schedule_type] = 'schedule'
[:schedule_time, :retirement_time].each do |key|
data_type = :time
time_value = data.delete(key)
set_value = time_value.blank? ? nil : Time.parse(time_value)
_log.info("setting key <#{dialog_name}:#{key}(#{data_type})> to value <#{set_value.inspect}>")
values[key] = set_value
end
end
dlg_keys = dlg_fields.keys
data.keys.each { |key| set_ws_field_value(values, key, data, dialog_name, dlg_fields) if dlg_keys.include?(key) }
end
def raise_validate_errors
errors = []
fields { |_fn, f, _dn, _d| errors << f[:error] unless f[:error].nil? }
err_text = "Provision failed for the following reasons:\n#{errors.join("\n")}"
_log.error("<#{err_text}>")
raise _("Provision failed for the following reasons:\n%{errors}") % {:errors => errors.join("\n")}
end
private
def apply_result(result, data_type)
return result if data_type == :array_integer
[result.first, result.last] unless result.nil?
end
def build_id_to_name_hash(array)
array.each_with_object({}) { |i, h| h[i.id] = i.name }
end
def default_ci_to_hash_struct(ci)
attributes = []
attributes << :name if ci.respond_to?(:name)
build_ci_hash_struct(ci, attributes)
end
def filter_to_objects_in_same_datacenter(array, source)
# If a folder is selected, reduce the host/cluster list to only hosts/clusters in the same datacenter as the folder
source[:datacenter] ? array.reject { |i| find_datacenter_for_ci(i).id != source[:datacenter].id } : array
end
end
|
require 'bundler/vlad'
set :application, "octodata"
set :repository, "git@github.com:christhomson/octodata.git"
set :user, "deploy"
set :domain, "#{user}@deploy.cthomson.ca"
set :deploy_to, "/home/deploy/apps/octodata"
set :revision, "origin/master"
shared_paths.merge!({"production.sqlite3" => "db/production.sqlite3"})
# On the server side, the upstart scripts should be installed to /etc/init.
# We also need to allow the "[start|stop|restart] [thin]" commands with no password for this user.
namespace :vlad do
namespace :thin do
remote_task :start, roles: :app do
puts "Starting Thin..."
sudo "start octodata_thin"
end
remote_task :stop, roles: :app do
puts "Attempting to stop Thin..."
sudo "stop octodata_thin"
end
remote_task :restart, roles: :app do
puts "Restarting Thin..."
sudo "restart octodata_thin"
end
end
remote_task :update_cron, roles: :app do
run "cd #{current_path}; bundle exec whenever --update-crontab"
end
remote_task :symlink_config, roles: :app do
files = %w{database.yml settings.yml}
files.each do |file|
run "touch #{shared_path}/#{file}; ln -s #{shared_path}/#{file} #{release_path}/config/#{file}"
end
end
task :deploy => [
"vlad:update",
"vlad:bundle:install",
"vlad:symlink_config",
"vlad:migrate",
"vlad:thin:restart",
"vlad:cleanup"
]
task :start => [
"vlad:thin:restart",
]
end
Added vlad:copy_production task.
require 'bundler/vlad'
set :application, "octodata"
set :repository, "git@github.com:christhomson/octodata.git"
set :user, "deploy"
set :domain, "#{user}@deploy.cthomson.ca"
set :deploy_to, "/home/deploy/apps/octodata"
set :revision, "origin/master"
shared_paths.merge!({"production.sqlite3" => "db/production.sqlite3"})
# On the server side, the upstart scripts should be installed to /etc/init.
# We also need to allow the "[start|stop|restart] [thin]" commands with no password for this user.
namespace :vlad do
namespace :thin do
remote_task :start, roles: :app do
puts "Starting Thin..."
sudo "start octodata_thin"
end
remote_task :stop, roles: :app do
puts "Attempting to stop Thin..."
sudo "stop octodata_thin"
end
remote_task :restart, roles: :app do
puts "Restarting Thin..."
sudo "restart octodata_thin"
end
end
remote_task :update_cron, roles: :app do
run "cd #{current_path}; bundle exec whenever --update-crontab"
end
remote_task :symlink_config, roles: :app do
files = %w{database.yml settings.yml}
files.each do |file|
run "touch #{shared_path}/#{file}; ln -s #{shared_path}/#{file} #{release_path}/config/#{file}"
end
end
task :deploy => [
"vlad:update",
"vlad:bundle:install",
"vlad:symlink_config",
"vlad:migrate",
"vlad:thin:restart",
"vlad:cleanup"
]
task :start => [
"vlad:thin:restart",
]
task :copy_production do
exec "scp #{domain}:#{deploy_to}/shared/production.sqlite3 db/development.sqlite3"
end
end
|
class Maven < Formula
desc "Java-based project management"
homepage "https://maven.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=maven/maven-3/3.6.2/binaries/apache-maven-3.6.2-bin.tar.gz"
mirror "https://archive.apache.org/dist/maven/maven-3/3.6.2/binaries/apache-maven-3.6.2-bin.tar.gz"
sha256 "3fbc92d1961482d6fbd57fbf3dd6d27a4de70778528ee3fb44aa7d27eb32dfdc"
bottle :unneeded
depends_on :java => "1.7+"
conflicts_with "mvnvm", :because => "also installs a 'mvn' executable"
def install
# Remove windows files
rm_f Dir["bin/*.cmd"]
# Fix the permissions on the global settings file.
chmod 0644, "conf/settings.xml"
libexec.install Dir["*"]
# Leave conf file in libexec. The mvn symlink will be resolved and the conf
# file will be found relative to it
Pathname.glob("#{libexec}/bin/*") do |file|
next if file.directory?
basename = file.basename
next if basename.to_s == "m2.conf"
(bin/basename).write_env_script file, Language::Java.overridable_java_home_env
end
end
test do
(testpath/"pom.xml").write <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.homebrew</groupId>
<artifactId>maven-test</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
</project>
EOS
(testpath/"src/main/java/org/homebrew/MavenTest.java").write <<~EOS
package org.homebrew;
public class MavenTest {
public static void main(String[] args) {
System.out.println("Testing Maven with Homebrew!");
}
}
EOS
system "#{bin}/mvn", "compile", "-Duser.home=#{testpath}"
end
end
maven 3.6.3
Closes #47242.
Signed-off-by: Rui Chen <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
class Maven < Formula
desc "Java-based project management"
homepage "https://maven.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz"
mirror "https://archive.apache.org/dist/maven/maven-3/3.6.3/binaries/apache-maven-3.6.3-bin.tar.gz"
sha256 "26ad91d751b3a9a53087aefa743f4e16a17741d3915b219cf74112bf87a438c5"
bottle :unneeded
depends_on :java => "1.7+"
conflicts_with "mvnvm", :because => "also installs a 'mvn' executable"
def install
# Remove windows files
rm_f Dir["bin/*.cmd"]
# Fix the permissions on the global settings file.
chmod 0644, "conf/settings.xml"
libexec.install Dir["*"]
# Leave conf file in libexec. The mvn symlink will be resolved and the conf
# file will be found relative to it
Pathname.glob("#{libexec}/bin/*") do |file|
next if file.directory?
basename = file.basename
next if basename.to_s == "m2.conf"
(bin/basename).write_env_script file, Language::Java.overridable_java_home_env
end
end
test do
(testpath/"pom.xml").write <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="https://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.homebrew</groupId>
<artifactId>maven-test</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
</project>
EOS
(testpath/"src/main/java/org/homebrew/MavenTest.java").write <<~EOS
package org.homebrew;
public class MavenTest {
public static void main(String[] args) {
System.out.println("Testing Maven with Homebrew!");
}
}
EOS
system "#{bin}/mvn", "compile", "-Duser.home=#{testpath}"
end
end
|
class ODKConfigGenerator
def generate_odk_config(username, password, site_url)
string = generate_string(username, password, site_url)
processed_string = Base64.strict_encode64(Zlib::Deflate.deflate(string))
qrcode = RQRCode::QRCode.new(processed_string)
end
# Qs for tom: why no form? what is relationship to admin status?
# size needs to be really large, doesn't seem right
# confused about escaping the url - to_json doesn't escape forward slashes as in spec
def generate_string(username, password, site_url)
hash = {
general: {
password: password,
username: username,
server_url: site_url
},
admin: {}
}
hash.to_json
#'{"general":{"password":"%{password}","username":"%{username}","server_url":"%{site_url}"},"admin":{}}' % {password: password, username: username, site_url: escaped_site_url}
end
end
5946: clean up code
class ODKConfigGenerator
def generate_odk_config(username, password, site_url)
string = generate_string(username, password, site_url)
processed_string = Base64.strict_encode64(Zlib::Deflate.deflate(string))
qrcode = RQRCode::QRCode.new(processed_string)
end
def generate_string(username, password, site_url)
hash = {
general: {
password: password,
username: username,
server_url: site_url
},
admin: {}
}
hash.to_json
end
end
|
set :stages, %w(staging production)
set :default_stage, "staging"
require 'capistrano/ext/multistage'
set :application, "wheelmap"
set :repository, "git@github.com:sozialhelden/wheelmap.git"
set :branch, ENV['BRANCH'] || "master"
set :use_sudo, false
set :scm, :git
# Or: `accurev`, `bzr`, `cvs`, `darcs`, `git`, `mercurial`, `perforce`, `subversion` or `none`
set :git_shallow_clone, 1
set :deploy_via, :remote_cache
set :default_run_options, :pty => true # or else you'll get "sorry, you must have a tty to run sudo"
set :ssh_options, :keys => [ File.expand_path("~/.ssh/wheelmap_rsa") ], :forward_agent => true
set :user, 'rails'
role :web, "178.77.98.117" # Your HTTP server, Apache/etc
role :app, "178.77.98.117" # This may be the same as your `Web` server
role :db, "178.77.98.117", :primary => true # This is where Rails migrations will run
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
after 'deploy:symlink', 'deploy:symlink_configs'
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
task :symlink_configs do
run "mkdir -p #{shared_path}/config/"
%w(database.yml osm.yml open_street_map.yml).each do |file|
run "ln -nfs #{shared_path}/config/#{file} #{release_path}/config/#{file}"
end
end
end
# have builder check and install gems after each update_code
require 'bundler/capistrano'
set :bundle_without, [:development, :test, :metrics, :deployment]
$:.unshift(File.expand_path('./lib', ENV['rvm_path'])) # Add RVM's lib directory to the load path.
require "rvm/capistrano"
set :rvm_ruby_string, 'ruby-1.8.7-p330'
set :rvm_type, :user
changed order
set :stages, %w(staging production)
set :default_stage, "staging"
require 'capistrano/ext/multistage'
set :application, "wheelmap"
set :repository, "git@github.com:sozialhelden/wheelmap.git"
set :branch, ENV['BRANCH'] || "master"
set :use_sudo, false
set :scm, :git
# Or: `accurev`, `bzr`, `cvs`, `darcs`, `git`, `mercurial`, `perforce`, `subversion` or `none`
set :deploy_via, :remote_cache
set :git_shallow_clone, 1
set :default_run_options, :pty => true # or else you'll get "sorry, you must have a tty to run sudo"
set :ssh_options, :keys => [ File.expand_path("~/.ssh/wheelmap_rsa") ], :forward_agent => true
set :user, 'rails'
role :web, "178.77.98.117" # Your HTTP server, Apache/etc
role :app, "178.77.98.117" # This may be the same as your `Web` server
role :db, "178.77.98.117", :primary => true # This is where Rails migrations will run
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
after 'deploy:symlink', 'deploy:symlink_configs'
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
task :symlink_configs do
run "mkdir -p #{shared_path}/config/"
%w(database.yml osm.yml open_street_map.yml).each do |file|
run "ln -nfs #{shared_path}/config/#{file} #{release_path}/config/#{file}"
end
end
end
# have builder check and install gems after each update_code
require 'bundler/capistrano'
set :bundle_without, [:development, :test, :metrics, :deployment]
$:.unshift(File.expand_path('./lib', ENV['rvm_path'])) # Add RVM's lib directory to the load path.
require "rvm/capistrano"
set :rvm_ruby_string, 'ruby-1.8.7-p330'
set :rvm_type, :user
|
class Meson < Formula
desc "Fast and user friendly build system"
homepage "http://mesonbuild.com/"
url "https://github.com/mesonbuild/meson/releases/download/0.44.0/meson-0.44.0.tar.gz"
sha256 "50f9b12b77272ef6ab064d26b7e06667f07fa9f931e6a20942bba2216ba4281b"
head "https://github.com/mesonbuild/meson.git"
bottle do
cellar :any_skip_relocation
sha256 "b29b8e4a61308d73c4744200a7aee75f0c3389d2359b29993a7c8cb6e4a680e2" => :high_sierra
sha256 "b29b8e4a61308d73c4744200a7aee75f0c3389d2359b29993a7c8cb6e4a680e2" => :sierra
sha256 "b29b8e4a61308d73c4744200a7aee75f0c3389d2359b29993a7c8cb6e4a680e2" => :el_capitan
end
depends_on :python3
depends_on "ninja"
def install
version = Language::Python.major_minor_version("python3")
ENV["PYTHONPATH"] = lib/"python#{version}/site-packages"
system "python3", *Language::Python.setup_install_args(prefix)
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
(testpath/"helloworld.c").write <<~EOS
main() {
puts("hi");
return 0;
}
EOS
(testpath/"meson.build").write <<~EOS
project('hello', 'c')
executable('hello', 'helloworld.c')
EOS
mkdir testpath/"build" do
system "#{bin}/meson", ".."
assert_predicate testpath/"build/build.ninja", :exist?
end
end
end
meson: update 0.44.0 bottle.
class Meson < Formula
desc "Fast and user friendly build system"
homepage "http://mesonbuild.com/"
url "https://github.com/mesonbuild/meson/releases/download/0.44.0/meson-0.44.0.tar.gz"
sha256 "50f9b12b77272ef6ab064d26b7e06667f07fa9f931e6a20942bba2216ba4281b"
head "https://github.com/mesonbuild/meson.git"
bottle do
cellar :any_skip_relocation
sha256 "99722391bc3b2289943de52db40d7b5852d34a2ccd73d2e199d21ef7f45c84f9" => :high_sierra
sha256 "99722391bc3b2289943de52db40d7b5852d34a2ccd73d2e199d21ef7f45c84f9" => :sierra
sha256 "99722391bc3b2289943de52db40d7b5852d34a2ccd73d2e199d21ef7f45c84f9" => :el_capitan
end
depends_on :python3
depends_on "ninja"
def install
version = Language::Python.major_minor_version("python3")
ENV["PYTHONPATH"] = lib/"python#{version}/site-packages"
system "python3", *Language::Python.setup_install_args(prefix)
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
test do
(testpath/"helloworld.c").write <<~EOS
main() {
puts("hi");
return 0;
}
EOS
(testpath/"meson.build").write <<~EOS
project('hello', 'c')
executable('hello', 'helloworld.c')
EOS
mkdir testpath/"build" do
system "#{bin}/meson", ".."
assert_predicate testpath/"build/build.ninja", :exist?
end
end
end
|
# frozen_string_literal: true
require ClaratBase::Engine.root.join('app', 'models', 'offer')
module Offer::SearchAlgolia
extend ActiveSupport::Concern
included do
include AlgoliaSearch
algoliasearch do
I18n.available_locales.each do |locale|
index = %w(
name description code_word next_steps category_keywords definitions
organization_names category_names stamps_string tags
)
attributes = [:organization_count, :location_address, :location_name,
:slug, :encounter, :organization_names,
:location_visible, :code_word]
facets = [:_age_filters, :_language_filters, :_target_audience_filters,
:_exclusive_gender_filters, :section_identifier]
add_index Offer.personal_index_name(locale),
disable_indexing: Rails.env.test?,
if: :personal_indexable? do
attributesToIndex index
ranking %w(typo geo words proximity attribute exact custom)
attribute(:name) { send("name_#{locale}") }
attribute(:description) { send("description_#{locale}") }
attribute(:next_steps) { _next_steps locale }
attribute(:lang) { lang(locale) }
attribute(:tags) { tag_string(locale) }
attribute(:definitions) { definitions_string(locale) }
attribute(:_tags) { _categories(locale) }
attribute(:stamps_string) { stamps_string(locale) }
attribute(:singular_stamp) { singular_stamp(locale) }
attribute(:category_names) { category_names(locale) }
attribute(:category_keywords) { category_keywords(locale) }
add_attribute(*attributes)
add_attribute(*facets)
add_attribute :_geoloc
attributesForFaceting facets + [:_tags]
optionalWords STOPWORDS
end
add_index Offer.remote_index_name(locale),
disable_indexing: Rails.env.test?,
if: :remote_indexable? do
attributesToIndex index
attribute(:name) { send("name_#{locale}") }
attribute(:description) { send("description_#{locale}") }
attribute(:next_steps) { _next_steps locale }
attribute(:lang) { lang(locale) }
attribute(:tags) { tag_string(locale) }
attribute(:definitions) { definitions_string(locale) }
attribute(:_tags) { _categories(locale) }
attribute(:stamps_string) { stamps_string(locale) }
attribute(:singular_stamp) { singular_stamp(locale) }
attribute(:category_names) { category_names(locale) }
attribute(:category_keywords) { category_keywords(locale) }
add_attribute(*attributes)
add_attribute :area_minlat, :area_maxlat, :area_minlong,
:area_maxlong
add_attribute(*facets)
attributesForFaceting facets + [:_tags, :encounter]
optionalWords STOPWORDS
# no geo necessary
ranking %w(typo words proximity attribute exact custom)
end
end
end
end
end
extended search_algolia with category_explanations
# frozen_string_literal: true
require ClaratBase::Engine.root.join('app', 'models', 'offer')
module Offer::SearchAlgolia
extend ActiveSupport::Concern
included do
include AlgoliaSearch
algoliasearch do
I18n.available_locales.each do |locale|
index = %w(
name description code_word next_steps category_keywords definitions
organization_names category_names stamps_string tags category_explanations
)
attributes = [:organization_count, :location_address, :location_name,
:slug, :encounter, :organization_names,
:location_visible, :code_word]
facets = [:_age_filters, :_language_filters, :_target_audience_filters,
:_exclusive_gender_filters, :section_identifier]
add_index Offer.personal_index_name(locale),
disable_indexing: Rails.env.test?,
if: :personal_indexable? do
attributesToIndex index
ranking %w(typo geo words proximity attribute exact custom)
attribute(:name) { send("name_#{locale}") }
attribute(:description) { send("description_#{locale}") }
attribute(:next_steps) { _next_steps locale }
attribute(:lang) { lang(locale) }
attribute(:tags) { tag_string(locale) }
attribute(:definitions) { definitions_string(locale) }
attribute(:_tags) { _categories(locale) }
attribute(:stamps_string) { stamps_string(locale) }
attribute(:singular_stamp) { singular_stamp(locale) }
attribute(:category_names) { category_names(locale) }
attribute(:category_keywords) { category_keywords(locale) }
attribute(:category_explanations) { category_explanations(locale) }
add_attribute(*attributes)
add_attribute(*facets)
add_attribute :_geoloc
attributesForFaceting facets + [:_tags]
optionalWords STOPWORDS
end
add_index Offer.remote_index_name(locale),
disable_indexing: Rails.env.test?,
if: :remote_indexable? do
attributesToIndex index
attribute(:name) { send("name_#{locale}") }
attribute(:description) { send("description_#{locale}") }
attribute(:next_steps) { _next_steps locale }
attribute(:lang) { lang(locale) }
attribute(:tags) { tag_string(locale) }
attribute(:definitions) { definitions_string(locale) }
attribute(:_tags) { _categories(locale) }
attribute(:stamps_string) { stamps_string(locale) }
attribute(:singular_stamp) { singular_stamp(locale) }
attribute(:category_names) { category_names(locale) }
attribute(:category_keywords) { category_keywords(locale) }
add_attribute(*attributes)
add_attribute :area_minlat, :area_maxlat, :area_minlong,
:area_maxlong
add_attribute(*facets)
attributesForFaceting facets + [:_tags, :encounter]
optionalWords STOPWORDS
# no geo necessary
ranking %w(typo words proximity attribute exact custom)
end
end
end
end
end
|
set :scm, :git
set :repo_url, 'git@github.com:rasyidmujahid/scalar-web.git'
set :ssh_options, {
forward_agent: true,
port: 3456
}
set :branch, :master
set :deploy_to, -> { "/var/www/html/#{fetch(:application)}" }
set :log_level, :debug
SSHKit.config.command_map[:composer] = "php /var/local/composer/composer.phar"
# Apache users with .htaccess files:
# it needs to be added to linked_files so it persists across deploys:
# set :linked_files, %w{.env web/.htaccess}
set :linked_files, %w{.env}
set :linked_files, %w{config/.env}
set :linked_dirs, %w{web/app/uploads}
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
# Your restart mechanism here, for example:
# execute :service, :nginx, :reload
end
end
end
# The above restart task is not run by default
# Uncomment the following line to run it on deploys if needed
# after 'deploy:publishing', 'deploy:restart'
temporary fix .env loaded thru 2 places
set :scm, :git
set :repo_url, 'git@github.com:rasyidmujahid/scalar-web.git'
set :ssh_options, {
forward_agent: true,
port: 3456
}
set :branch, :master
set :deploy_to, -> { "/var/www/html/#{fetch(:application)}" }
set :log_level, :debug
SSHKit.config.command_map[:composer] = "php /var/local/composer/composer.phar"
# Apache users with .htaccess files:
# it needs to be added to linked_files so it persists across deploys:
# set :linked_files, %w{.env web/.htaccess}
set :linked_files, %w{.env config/.env}
set :linked_dirs, %w{web/app/uploads}
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
# Your restart mechanism here, for example:
# execute :service, :nginx, :reload
end
end
end
# The above restart task is not run by default
# Uncomment the following line to run it on deploys if needed
# after 'deploy:publishing', 'deploy:restart'
|
class Mesos < Formula
desc "Apache cluster manager"
homepage "https://mesos.apache.org"
url "https://www.apache.org/dyn/closer.cgi?path=mesos/1.0.0/mesos-1.0.0.tar.gz"
mirror "https://archive.apache.org/dist/mesos/1.0.0/mesos-1.0.0.tar.gz"
sha256 "dabca5b60604fd672aaa34e4178bb42c6513eab59a07a98ece1e057eb34c28b2"
bottle do
sha256 "7bbf7f532c4ce172a754232a6b8ad8066a7245db06147ef54c5b2901ffe60a3f" => :sierra
sha256 "8a4d45b766546eb80be55bb65c50b66a6d1e3b0f655646b222e5252384330b0f" => :el_capitan
sha256 "3ba5bc60511694dc4cdebbacc8f409fd4dc17ba12961bc78eccc2d1d3dfc7ade" => :yosemite
sha256 "2b0aab36735f07c2db20b45b8b381003d93898213c41ff6ed071cdd26da54346" => :mavericks
end
depends_on java: "1.7+"
depends_on macos: :mountain_lion
depends_on apr: :build
depends_on "maven" => :build
depends_on "subversion"
resource "boto" do
url "https://pypi.python.org/packages/6f/ce/3447e2136c629ae895611d946879b43c19346c54876dea614316306b17dd/boto-2.40.0.tar.gz"
sha256 "e12d5fca11fcabfd0acd18f78651e0f0dba60f958a0520ff4e9b73e35cd9928f"
end
resource "protobuf" do
url "https://pypi.python.org/packages/source/p/protobuf/protobuf-2.6.1.tar.gz"
sha256 "8faca1fb462ee1be58d00f5efb4ca4f64bde92187fe61fde32615bbee7b3e745"
end
# build dependencies for protobuf
resource "six" do
url "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz"
sha256 "e24052411fc4fbd1f672635537c3fc2330d9481b18c0317695b46259512c91d5"
end
resource "python-dateutil" do
url "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz"
sha256 "439df33ce47ef1478a4f4765f3390eab0ed3ec4ae10be32f2930000c8d19f417"
end
resource "pytz" do
url "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.bz2"
sha256 "387f968fde793b142865802916561839f5591d8b4b14c941125eb0fca7e4e58d"
end
resource "python-gflags" do
url "https://pypi.python.org/packages/source/p/python-gflags/python-gflags-2.0.tar.gz"
sha256 "0dff6360423f3ec08cbe3bfaf37b339461a54a21d13be0dd5d9c9999ce531078"
end
resource "google-apputils" do
url "https://pypi.python.org/packages/source/g/google-apputils/google-apputils-0.4.2.tar.gz"
sha256 "47959d0651c32102c10ad919b8a0ffe0ae85f44b8457ddcf2bdc0358fb03dc29"
end
needs :cxx11
def install
ENV.java_cache
boto_path = libexec/"boto/lib/python2.7/site-packages"
ENV.prepend_create_path "PYTHONPATH", boto_path
resource("boto").stage do
system "python", *Language::Python.setup_install_args(libexec/"boto")
end
(lib/"python2.7/site-packages").mkpath
(lib/"python2.7/site-packages/homebrew-mesos-boto.pth").write "#{boto_path}\n"
# work around distutils abusing CC instead of using CXX
# https://issues.apache.org/jira/browse/MESOS-799
# https://github.com/Homebrew/homebrew/pull/37087
native_patch = <<-EOS.undent
import os
os.environ["CC"] = os.environ["CXX"]
os.environ["LDFLAGS"] = "@LIBS@"
\\0
EOS
inreplace "src/python/executor/setup.py.in",
"import ext_modules",
native_patch
inreplace "src/python/scheduler/setup.py.in",
"import ext_modules",
native_patch
# skip build javadoc because Homebrew sandbox ENV.java_cache
# would trigger maven-javadoc-plugin bug.
# https://issues.apache.org/jira/browse/MESOS-3482
maven_javadoc_patch = <<-EOS.undent
<properties>
<maven.javadoc.skip>true</maven.javadoc.skip>
</properties>
\\0
EOS
inreplace "src/java/mesos.pom.in",
"<url>http://mesos.apache.org</url>",
maven_javadoc_patch
args = %W[
--prefix=#{prefix}
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--with-svn=#{Formula["subversion"].opt_prefix}
]
unless MacOS::CLT.installed?
args << "--with-apr=#{Formula["apr"].opt_libexec}"
end
ENV.cxx11
system "./configure", "--disable-python", *args
system "make"
system "make", "install"
# The native Python modules `executor` and `scheduler` (see below) fail to
# link to Subversion libraries if Homebrew isn't installed in `/usr/local`.
ENV.append_to_cflags "-L#{Formula["subversion"].opt_lib}"
system "./configure", "--enable-python", *args
["native", "interface", "executor", "scheduler", "cli", ""].each do |p|
cd "src/python/#{p}" do
system "python", *Language::Python.setup_install_args(prefix)
end
end
# stage protobuf build dependencies
ENV.prepend_create_path "PYTHONPATH", buildpath/"protobuf/lib/python2.7/site-packages"
%w[six python-dateutil pytz python-gflags google-apputils].each do |r|
resource(r).stage do
system "python", *Language::Python.setup_install_args(buildpath/"protobuf")
end
end
protobuf_path = libexec/"protobuf/lib/python2.7/site-packages"
ENV.prepend_create_path "PYTHONPATH", protobuf_path
resource("protobuf").stage do
ln_s buildpath/"protobuf/lib/python2.7/site-packages/google/apputils", "google/apputils"
system "python", *Language::Python.setup_install_args(libexec/"protobuf")
end
pth_contents = "import site; site.addsitedir('#{protobuf_path}')\n"
(lib/"python2.7/site-packages/homebrew-mesos-protobuf.pth").write pth_contents
end
test do
require "timeout"
master = fork do
exec "#{sbin}/mesos-master", "--ip=127.0.0.1",
"--registry=in_memory"
end
agent = fork do
exec "#{sbin}/mesos-agent", "--master=127.0.0.1:5050",
"--work_dir=#{testpath}"
end
Timeout.timeout(15) do
system "#{bin}/mesos", "execute",
"--master=127.0.0.1:5050",
"--name=execute-touch",
"--command=touch\s#{testpath}/executed"
end
Process.kill("TERM", master)
Process.kill("TERM", agent)
assert File.exist?("#{testpath}/executed")
system "python", "-c", "import mesos.native"
end
end
mesos 1.0.1
Closes #5096.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Mesos < Formula
desc "Apache cluster manager"
homepage "https://mesos.apache.org"
url "https://www.apache.org/dyn/closer.cgi?path=mesos/1.0.1/mesos-1.0.1.tar.gz"
mirror "https://archive.apache.org/dist/mesos/1.0.1/mesos-1.0.1.tar.gz"
sha256 "e053d97192ca1dd949e07e6e34cca0f28af9767cdff5ec984769b2102017b0c1"
bottle do
sha256 "7bbf7f532c4ce172a754232a6b8ad8066a7245db06147ef54c5b2901ffe60a3f" => :sierra
sha256 "8a4d45b766546eb80be55bb65c50b66a6d1e3b0f655646b222e5252384330b0f" => :el_capitan
sha256 "3ba5bc60511694dc4cdebbacc8f409fd4dc17ba12961bc78eccc2d1d3dfc7ade" => :yosemite
sha256 "2b0aab36735f07c2db20b45b8b381003d93898213c41ff6ed071cdd26da54346" => :mavericks
end
depends_on java: "1.7+"
depends_on macos: :mountain_lion
depends_on apr: :build
depends_on "maven" => :build
depends_on "subversion"
resource "protobuf" do
url "https://pypi.python.org/packages/source/p/protobuf/protobuf-2.6.1.tar.gz"
sha256 "8faca1fb462ee1be58d00f5efb4ca4f64bde92187fe61fde32615bbee7b3e745"
end
# build dependencies for protobuf
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/3e/f5/aad82824b369332a676a90a8c0d1e608b17e740bbb6aeeebca726f17b902/python-dateutil-2.5.3.tar.gz"
sha256 "1408fdb07c6a1fa9997567ce3fcee6a337b39a503d80699e0f213de4aa4b32ed"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/f7/c7/08e54702c74baf9d8f92d0bc331ecabf6d66a56f6d36370f0a672fc6a535/pytz-2016.6.1.tar.bz2"
sha256 "b5aff44126cf828537581e534cc94299b223b945a2bb3b5434d37bf8c7f3a10c"
end
resource "python-gflags" do
url "https://files.pythonhosted.org/packages/6b/1c/47996c14dc91249376f218c0f943da3b85ff7e9af9c5de05cd2600c8afb4/python-gflags-3.0.7.tar.gz"
sha256 "db889af55e39fa6a37125d6aa70dfdd788dbc180f9566d3053250e28877e68dc"
end
resource "google-apputils" do
url "https://files.pythonhosted.org/packages/69/66/a511c428fef8591c5adfa432a257a333e0d14184b6c5d03f1450827f7fe7/google-apputils-0.4.2.tar.gz"
sha256 "47959d0651c32102c10ad919b8a0ffe0ae85f44b8457ddcf2bdc0358fb03dc29"
end
needs :cxx11
def install
ENV.java_cache
# work around to avoid `_clock_gettime` symbol not found error.
if MacOS.version == "10.11" && MacOS::Xcode.installed? && MacOS::Xcode.version >= "8.0"
ENV["ac_have_clock_syscall"] = "no"
end
# work around distutils abusing CC instead of using CXX
# https://issues.apache.org/jira/browse/MESOS-799
# https://github.com/Homebrew/homebrew/pull/37087
native_patch = <<-EOS.undent
import os
os.environ["CC"] = os.environ["CXX"]
os.environ["LDFLAGS"] = "@LIBS@"
\\0
EOS
inreplace "src/python/executor/setup.py.in",
"import ext_modules",
native_patch
inreplace "src/python/scheduler/setup.py.in",
"import ext_modules",
native_patch
# skip build javadoc because Homebrew sandbox ENV.java_cache
# would trigger maven-javadoc-plugin bug.
# https://issues.apache.org/jira/browse/MESOS-3482
maven_javadoc_patch = <<-EOS.undent
<properties>
<maven.javadoc.skip>true</maven.javadoc.skip>
</properties>
\\0
EOS
inreplace "src/java/mesos.pom.in",
"<url>http://mesos.apache.org</url>",
maven_javadoc_patch
args = %W[
--prefix=#{prefix}
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--with-svn=#{Formula["subversion"].opt_prefix}
]
unless MacOS::CLT.installed?
args << "--with-apr=#{Formula["apr"].opt_libexec}"
end
ENV.cxx11
system "./configure", "--disable-python", *args
system "make"
system "make", "install"
# The native Python modules `executor` and `scheduler` (see below) fail to
# link to Subversion libraries if Homebrew isn't installed in `/usr/local`.
ENV.append_to_cflags "-L#{Formula["subversion"].opt_lib}"
system "./configure", "--enable-python", *args
["native", "interface", "executor", "scheduler", "cli", ""].each do |p|
cd "src/python/#{p}" do
system "python", *Language::Python.setup_install_args(prefix)
end
end
# stage protobuf build dependencies
ENV.prepend_create_path "PYTHONPATH", buildpath/"protobuf/lib/python2.7/site-packages"
%w[six python-dateutil pytz python-gflags google-apputils].each do |r|
resource(r).stage do
system "python", *Language::Python.setup_install_args(buildpath/"protobuf")
end
end
protobuf_path = libexec/"protobuf/lib/python2.7/site-packages"
ENV.prepend_create_path "PYTHONPATH", protobuf_path
resource("protobuf").stage do
ln_s buildpath/"protobuf/lib/python2.7/site-packages/google/apputils", "google/apputils"
system "python", *Language::Python.setup_install_args(libexec/"protobuf")
end
pth_contents = "import site; site.addsitedir('#{protobuf_path}')\n"
(lib/"python2.7/site-packages/homebrew-mesos-protobuf.pth").write pth_contents
end
test do
require "timeout"
master = fork do
exec "#{sbin}/mesos-master", "--ip=127.0.0.1",
"--registry=in_memory"
end
agent = fork do
exec "#{sbin}/mesos-agent", "--master=127.0.0.1:5050",
"--work_dir=#{testpath}"
end
Timeout.timeout(15) do
system "#{bin}/mesos", "execute",
"--master=127.0.0.1:5050",
"--name=execute-touch",
"--command=touch\s#{testpath}/executed"
end
Process.kill("TERM", master)
Process.kill("TERM", agent)
assert File.exist?("#{testpath}/executed")
system "python", "-c", "import mesos.native"
end
end
|
# encoding: utf-8
# author: Boris Barroso
# email: boriscyber@gmail.com
# Used to access the organisation_id in the models
class OrganisationSession
class << self
attr_reader :organisation
delegate :id, :name, :currency, :tenant, :emamil, :address, to: :organisation
# Stores using de application_controller the current_user for devise
# @param [Hash] details from the organisation
def organisation=(org)
raise "The OrganisationSession couln't be set' expected Organisation" unless org.is_a? Organisation
@organisation = org
end
end
end
Fixed with allow_nil
# encoding: utf-8
# author: Boris Barroso
# email: boriscyber@gmail.com
# Used to access the organisation_id in the models
class OrganisationSession
class << self
attr_reader :organisation
delegate :id, :name, :currency, :tenant, :emamil, :address, to: :organisation, allow_nil: true
# Stores using de application_controller the current_user for devise
# @param [Hash] details from the organisation
def organisation=(org)
raise "The OrganisationSession couln't be set' expected Organisation" unless org.is_a? Organisation
@organisation = org
end
end
end
|
# Capistrano Laravel 4 Deployment Tasks
# Watts Martin (layotl at gmail com)
# https://gist.github.com/chipotle/5506641
# updated 02-Aug-2013
# Assumptions:
#
# - You are using a .gitignore similar to Laravel's default, so your
# vendor directory and composer(.phar) are *not* under version control
# - Composer is installed as an executable at /usr/local/bin/composer
#
# If you don't have Composer installed globally, modify the appropriate task
# (:composer_install). Or just install Composer globally!
set :application, "Claw and Quill"
set :repository, "git@github.com:chipotle/quill.git"
set :scm, :git
set :scm_username, "chipotle"
role :web, "clawandquill.net"
role :app, "clawandquill.net"
role :db, "clawandquill.net", :primary => true
set :deploy_to, "/opt/nginx/sites/quill"
set :deploy_via, :remote_cache
set :use_sudo, false
set :ssh_options, {:forward_agent => true}
set :copy_exclude, [".git", ".gitignore", ".tags", ".tags_sorted_by_file"]
set :keep_releases, 4
# Nginx requires the php_fpm:reload task; other servers may not
after :deploy, "php_fpm:reload"
namespace :deploy do
task :update do
transaction do
update_code
copy_config
composer_install
link_shared
fix_permissions
end
end
task :finalize_update do
transaction do
run "chmod -R g+w #{releases_path}/#{release_name}"
symlink
end
end
task :symlink do
transaction do
run "ln -nfs #{current_release} #{deploy_to}/#{current_dir}"
end
end
desc "Link Laravel shared directories."
task :link_shared do
transaction do
run "ln -nfs #{shared_path}/system #{current_release}/public/system"
end
end
desc "Run Artisan migrate task."
task :migrate do
run "php #{current_release}/artisan migrate"
end
desc "Deploy and execute pending migrations."
task :migrations do
update_code
copy_config
composer_install
link_shared
fix_permissions
migrate
end
desc "Set Laravel storage directory world-writable."
task :fix_permissions do
transaction do
run "chmod -R a+w #{current_release}/app/storage"
end
end
desc "Install dependencies with Composer"
task :composer_install do
transaction do
run "cd #{current_release};/usr/local/bin/composer install --no-dev"
end
end
desc "Copy server-specific configuration files."
task :copy_config do
transaction do
run "cp #{shared_path}/config/* #{current_release}/app/config/"
end
end
end
# This command is tested on Arch Linux; other distributions/OSes may need a
# different configuration (or may not require this at all).
namespace :php_fpm do
desc "Reload PHP-FPM (requires sudo access to systemctl)."
task :reload, :roles => :app do
run "sudo /usr/bin/systemctl reload-or-restart php-fpm"
end
end
# Database dump task adapted from https://gist.github.com/rgo/318312
namespace :db do
task :backup_name, :roles => :db do
now = Time.now
run "mkdir -p #{shared_path}/db_backups"
backup_time = [now.year, now.month, now.day, now.hour, now.min].join('-')
set :backup_file, "#{shared_path}/db_backups/#{database}-#{backup_time}.sql"
end
desc "Backup MySQL or PostgreSQL database to shared_path/db_backups"
task :dump, :roles => :db do
run("php -r '$db=include\"#{shared_path}/config/database.php\";echo json_encode($db,JSON_UNESCAPED_SLASHES);'") { |channel, stream, data| @environment_info = YAML.load(data) }
default = @environment_info['default']
connection = @environment_info['connections'][default]
dbuser = connection['username']
dbpass = connection['password']
database = connection['database']
dbhost = connection['host']
set :database, database
backup_name
if connection['driver'] == 'mysql'
run "mysqldump --add-drop-table -u #{dbuser} -h #{dbhost} -p #{database} | bzip2 -c > #{backup_file}.bz2" do |ch, stream, out |
ch.send_data "#{dbpass}\n" if out=~ /^Enter password:/
end
else
run "pg_dump -W -c -U #{dbuser} -h #{dbhost} #{database} | bzip2 -c > #{backup_file}.bz2" do |ch, stream, out |
ch.send_data "#{dbpass}\n" if out=~ /^Password:/
end
end
end
desc "Sync production database to your local workstation"
task :clone_to_local, :roles => :db, :only => {:primary => true} do
dump
get "#{backup_file}.bz2", "/tmp/#{application}.sql.bz2"
data = `php -r '$db=include"app/config/database.php";echo json_encode($db,JSON_UNESCAPED_SLASHES);'`
development_info = YAML.load(data)
default = development_info['default']
connection = development_info['connections'][default]
dbuser = connection['username']
dbpass = connection['password']
database = connection['database']
dbhost = connection['host']
if connection['driver'] == 'mysql'
run_str = "bzcat '/tmp/#{application}.sql.bz2' | mysql -u #{dbuser} --password='#{dbpass}' -h #{dbhost} #{database}"
else
run_str = "PGPASSWORD=#{dbpass} bzcat '/tmp/#{application}.sql.bz2' | psql -U #{dbuser} -h #{dbhost} #{database}"
end
%x!#{run_str}!
end
end
Don't show progress bars in composer install
# Capistrano Laravel 4 Deployment Tasks
# Watts Martin (layotl at gmail com)
# https://gist.github.com/chipotle/5506641
# updated 14-Aug-2013
# Assumptions:
#
# - You are using a .gitignore similar to Laravel's default, so your
# vendor directory and composer(.phar) are *not* under version control
# - Composer is installed as an executable at /usr/local/bin/composer
#
# If you don't have Composer installed globally, modify the appropriate task
# (:composer_install). Or just install Composer globally!
set :application, "Claw and Quill"
set :repository, "git@github.com:chipotle/quill.git"
set :scm, :git
set :scm_username, "chipotle"
role :web, "clawandquill.net"
role :app, "clawandquill.net"
role :db, "clawandquill.net", :primary => true
set :deploy_to, "/opt/nginx/sites/quill"
set :deploy_via, :remote_cache
set :use_sudo, false
set :ssh_options, {:forward_agent => true}
set :copy_exclude, [".git", ".gitignore", ".tags", ".tags_sorted_by_file"]
set :keep_releases, 4
# Nginx requires the php_fpm:reload task; other servers may not
after :deploy, "php_fpm:reload"
namespace :deploy do
task :update do
transaction do
update_code
copy_config
composer_install
link_shared
fix_permissions
end
end
task :finalize_update do
transaction do
run "chmod -R g+w #{releases_path}/#{release_name}"
symlink
end
end
task :symlink do
transaction do
run "ln -nfs #{current_release} #{deploy_to}/#{current_dir}"
end
end
desc "Link Laravel shared directories."
task :link_shared do
transaction do
run "ln -nfs #{shared_path}/system #{current_release}/public/system"
end
end
desc "Run Artisan migrate task."
task :migrate do
run "php #{current_release}/artisan migrate"
end
desc "Deploy and execute pending migrations."
task :migrations do
update_code
copy_config
composer_install
link_shared
fix_permissions
migrate
end
desc "Set Laravel storage directory world-writable."
task :fix_permissions do
transaction do
run "chmod -R a+w #{current_release}/app/storage"
end
end
desc "Install dependencies with Composer"
task :composer_install do
transaction do
run "cd #{current_release};/usr/local/bin/composer install --no-dev --no-progress"
end
end
desc "Copy server-specific configuration files."
task :copy_config do
transaction do
run "cp #{shared_path}/config/* #{current_release}/app/config/"
end
end
end
# This command is tested on Arch Linux; other distributions/OSes may need a
# different configuration (or may not require this at all).
namespace :php_fpm do
desc "Reload PHP-FPM (requires sudo access to systemctl)."
task :reload, :roles => :app do
run "sudo /usr/bin/systemctl reload-or-restart php-fpm"
end
end
# Database dump task adapted from https://gist.github.com/rgo/318312
namespace :db do
task :backup_name, :roles => :db do
now = Time.now
run "mkdir -p #{shared_path}/db_backups"
backup_time = [now.year, now.month, now.day, now.hour, now.min].join('-')
set :backup_file, "#{shared_path}/db_backups/#{database}-#{backup_time}.sql"
end
desc "Backup MySQL or PostgreSQL database to shared_path/db_backups"
task :dump, :roles => :db do
run("php -r '$db=include\"#{shared_path}/config/database.php\";echo json_encode($db,JSON_UNESCAPED_SLASHES);'") { |channel, stream, data| @environment_info = YAML.load(data) }
default = @environment_info['default']
connection = @environment_info['connections'][default]
dbuser = connection['username']
dbpass = connection['password']
database = connection['database']
dbhost = connection['host']
set :database, database
backup_name
if connection['driver'] == 'mysql'
run "mysqldump --add-drop-table -u #{dbuser} -h #{dbhost} -p #{database} | bzip2 -c > #{backup_file}.bz2" do |ch, stream, out |
ch.send_data "#{dbpass}\n" if out=~ /^Enter password:/
end
else
run "pg_dump -W -c -U #{dbuser} -h #{dbhost} #{database} | bzip2 -c > #{backup_file}.bz2" do |ch, stream, out |
ch.send_data "#{dbpass}\n" if out=~ /^Password:/
end
end
end
desc "Sync production database to your local workstation"
task :clone_to_local, :roles => :db, :only => {:primary => true} do
dump
get "#{backup_file}.bz2", "/tmp/#{application}.sql.bz2"
data = `php -r '$db=include"app/config/database.php";echo json_encode($db,JSON_UNESCAPED_SLASHES);'`
development_info = YAML.load(data)
default = development_info['default']
connection = development_info['connections'][default]
dbuser = connection['username']
dbpass = connection['password']
database = connection['database']
dbhost = connection['host']
if connection['driver'] == 'mysql'
run_str = "bzcat '/tmp/#{application}.sql.bz2' | mysql -u #{dbuser} --password='#{dbpass}' -h #{dbhost} #{database}"
else
run_str = "PGPASSWORD=#{dbpass} bzcat '/tmp/#{application}.sql.bz2' | psql -U #{dbuser} -h #{dbhost} #{database}"
end
%x!#{run_str}!
end
end
|
class Micro < Formula
desc "Modern and intuitive terminal-based text editor"
homepage "https://github.com/zyedidia/micro"
url "https://github.com/zyedidia/micro.git",
:tag => "v2.0.0",
:revision => "399c6290768867351813250a6f1d8df7554917a5"
head "https://github.com/zyedidia/micro.git"
bottle do
cellar :any_skip_relocation
sha256 "00bdd2ff110e3ce9f3200b6c4674138409ffe964c018edc7500bc8fb54d0a762" => :catalina
sha256 "5c363d14693ada72f541daf6a6bc2a470ce3b0dd434996250b4723f44b71af93" => :mojave
sha256 "413d76c8af75c9647d19d9ab91c5a22f8621a67cb40214cd26085114b34da19d" => :high_sierra
end
depends_on "go" => :build
def install
system "make", "build"
bin.install "micro"
man1.install "assets/packaging/micro.1"
prefix.install_metafiles
end
test do
assert_match version.to_s, shell_output("#{bin}/micro -version")
end
end
micro: update 2.0.0 bottle.
class Micro < Formula
desc "Modern and intuitive terminal-based text editor"
homepage "https://github.com/zyedidia/micro"
url "https://github.com/zyedidia/micro.git",
:tag => "v2.0.0",
:revision => "399c6290768867351813250a6f1d8df7554917a5"
head "https://github.com/zyedidia/micro.git"
bottle do
cellar :any_skip_relocation
sha256 "00bdd2ff110e3ce9f3200b6c4674138409ffe964c018edc7500bc8fb54d0a762" => :catalina
sha256 "5c363d14693ada72f541daf6a6bc2a470ce3b0dd434996250b4723f44b71af93" => :mojave
sha256 "413d76c8af75c9647d19d9ab91c5a22f8621a67cb40214cd26085114b34da19d" => :high_sierra
sha256 "fb56dd8d44342315255f642167adb6cbd139393c09fb2d456097c256dd372d6e" => :x86_64_linux
end
depends_on "go" => :build
def install
system "make", "build"
bin.install "micro"
man1.install "assets/packaging/micro.1"
prefix.install_metafiles
end
test do
assert_match version.to_s, shell_output("#{bin}/micro -version")
end
end
|
module Renalware
class Medication < ActiveRecord::Base
attr_accessor :drug_select
acts_as_paranoid
has_paper_trail class_name: 'Renalware::MedicationVersion'
belongs_to :patient
belongs_to :drug, class_name: "Renalware::Drugs::Drug"
belongs_to :treatable, polymorphic: true
belongs_to :medication_route
validates :patient, presence: true
validates :treatable, presence: true
validates :drug, presence: true
validates :dose, presence: true
validates :medication_route, presence: true
validates :frequency, presence: true
validates :start_date, presence: true
validates :provider, presence: true
enum provider: Provider.codes
scope :ordered, -> { order(default_search_order) }
def self.default_search_order
"start_date desc"
end
def formatted
[].tap { |ary|
ary << drug.name if drug.present?
ary << dose
ary << medication_route.name if medication_route.present?
ary << frequency
ary << start_date
}.compact.join(', ')
end
def self.peritonitis
self.new(treatable_type: 'Renalware::PeritonitisEpisode')
end
def self.exit_site
self.new(treatable_type: 'Renalware::ExitSiteInfection')
end
end
end
Formatting
module Renalware
class Medication < ActiveRecord::Base
attr_accessor :drug_select
acts_as_paranoid
has_paper_trail class_name: 'Renalware::MedicationVersion'
belongs_to :patient
belongs_to :drug, class_name: "Renalware::Drugs::Drug"
belongs_to :treatable, polymorphic: true
belongs_to :medication_route
validates :patient, presence: true
validates :treatable, presence: true
validates :drug, presence: true
validates :dose, presence: true
validates :medication_route, presence: true
validates :frequency, presence: true
validates :start_date, presence: true
validates :provider, presence: true
enum provider: Provider.codes
scope :ordered, -> { order(default_search_order) }
def self.default_search_order
"start_date desc"
end
def self.peritonitis
self.new(treatable_type: 'Renalware::PeritonitisEpisode')
end
def self.exit_site
self.new(treatable_type: 'Renalware::ExitSiteInfection')
end
def formatted
[].tap { |ary|
ary << drug.name if drug.present?
ary << dose
ary << medication_route.name if medication_route.present?
ary << frequency
ary << start_date
}.compact.join(', ')
end
end
end
|
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2021-01-16T02-19-44Z",
revision: "7090bcc8e0cd31e44a6ee33cf8a9f83922159c6d"
version "20210116021944"
license "Apache-2.0"
head "https://github.com/minio/minio.git"
livecheck do
url :stable
regex(%r{href=.*?/tag/(?:RELEASE[._-]?)?([\d\-TZ]+)["' >]}i)
strategy :github_latest do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub(/\D/, "") }
end
end
bottle do
cellar :any_skip_relocation
sha256 "9194420e7aaf244cceea24ebbbe17874cb870a2d20742b10f5c5b8d8d9721623" => :big_sur
sha256 "67e58f58cf57674080c97e331c93fbffeb5d776e08d8b01a541b012c74b42f98" => :arm64_big_sur
sha256 "026cad09a08fb0c3cc01b0196a3113abb4152b8b821f5ad86f1ec0952ef41bdd" => :catalina
sha256 "578a015bae220112d9ab2da53964e77d7e2a59dec8687864fdb45491b3c98838" => :mojave
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", *std_go_args, "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options manual: "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
minio: update 20210116021944 bottle.
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2021-01-16T02-19-44Z",
revision: "7090bcc8e0cd31e44a6ee33cf8a9f83922159c6d"
version "20210116021944"
license "Apache-2.0"
head "https://github.com/minio/minio.git"
livecheck do
url :stable
regex(%r{href=.*?/tag/(?:RELEASE[._-]?)?([\d\-TZ]+)["' >]}i)
strategy :github_latest do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub(/\D/, "") }
end
end
bottle do
cellar :any_skip_relocation
sha256 "9194420e7aaf244cceea24ebbbe17874cb870a2d20742b10f5c5b8d8d9721623" => :big_sur
sha256 "67e58f58cf57674080c97e331c93fbffeb5d776e08d8b01a541b012c74b42f98" => :arm64_big_sur
sha256 "026cad09a08fb0c3cc01b0196a3113abb4152b8b821f5ad86f1ec0952ef41bdd" => :catalina
sha256 "578a015bae220112d9ab2da53964e77d7e2a59dec8687864fdb45491b3c98838" => :mojave
sha256 "111ba294d6f207128069e2da451beabbacac69f398c9394d4626f96aa0ae29bc" => :x86_64_linux
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", *std_go_args, "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options manual: "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
|
# frozen_string_literal: true.
module Spree
class CalculatorCar < BaseCalculator
def initialize(context:, product:, variant:, options:)
@product = product
@variant = variant
@options = options
@context_pickup_date = Date.parse(context.pickup_date(options).to_s)
@context_return_date = Date.parse(context.return_date(options).to_s)
end
def calculate_price
return [price: product.price.to_f] if product.rates.empty?
rates = variant.rates
array = []
rates.each do |rate|
next unless valid_dates?(rate)
days = (context_return_date - context_pickup_date).to_i
price = fetch_price(days, rate)
array << { price: price.format, rate: rate.id, avg: nil, variant: rate.variant }
end
array
end
private
attr_reader :product, :variant, :options, :context_pickup_date, :context_return_date
def valid_dates?(rate)
Date.parse(rate.start_date) <= context_pickup_date &&
Date.parse(rate.end_date) >= context_return_date
end
def fetch_price(days, rate)
rate_per_day = if days >= 3 && days <= 6
rate.three_six_days
elsif days >= 7 && days <= 13
rate.seven_thirteen_days
elsif days >= 14 && days <= 29
rate.fourteen_twentynine_days
end
days * rate_per_day
end
end
end
Why: When days where out of any of the ifs an error raised
Changed: Now a cero price is returned that needs to be showed propertly in the interface
# frozen_string_literal: true.
module Spree
class CalculatorCar < BaseCalculator
def initialize(context:, product:, variant:, options:)
@product = product
@variant = variant
@options = options
@context_pickup_date = Date.parse(context.pickup_date(options).to_s)
@context_return_date = Date.parse(context.return_date(options).to_s)
end
def calculate_price
return [price: product.price.to_f] if product.rates.empty?
rates = variant.rates
array = []
rates.each do |rate|
next unless valid_dates?(rate)
days = (context_return_date - context_pickup_date).to_i
price = fetch_price(days, rate)
array << { price: price.format, rate: rate.id, avg: nil, variant: rate.variant }
end
array
end
private
attr_reader :product, :variant, :options, :context_pickup_date, :context_return_date
def valid_dates?(rate)
Date.parse(rate.start_date) <= context_pickup_date &&
Date.parse(rate.end_date) >= context_return_date
end
def fetch_price(days, rate)
rate_per_day = if days >= 3 && days <= 6
rate.three_six_days
elsif days >= 7 && days <= 13
rate.seven_thirteen_days
elsif days >= 14 && days <= 29
rate.fourteen_twentynine_days
else
Spree::Money.new(0).money
end
days * rate_per_day
end
end
end
|
# deploy.rb - Capistrano config for The Colour Of.
# Copyright 2009 Rob Myers <rob@robmyers.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
# Config
################################################################################
set :application, "the_colour_of"
set :deploy_to, "/home/robmyers/daemons/#{application}"
set :scm, "git"
set :branch, "master"
set :repository, "http://robmyers.org/git/the_colour_of.git"
set :runner, "robmyers"
set :domain, "robmyers.vm.bytemark.co.uk"
role :web, domain
role :app, domain
role :db, domain, :primary => true
################################################################################
# Rake
################################################################################
namespace :install_rake_task do
task :add_cron_job do
tmpname = "/tmp/appname-crontab.#{Time.now.strftime('%s')}"
# run crontab -l or echo '' instead because the crontab command will fail if the user has no pre-existing crontab file.
# in this case, echo '' is run and the cap recipe won't fail altogether.
run "(crontab -l || echo '') | grep -v 'rake scrape' > #{tmpname}"
run "echo '@hourly cd #{current_path} && RAILS_ENV=production rake my_rake_task' >> #{tmpname}"
run "crontab #{tmpname}"
run "rm #{tmpname}"
end
end
namespace :rake do
task :populate_db do
run("cd #{deploy_to}/current; rake populate_db")
end
end
################################################################################
# Passenger
################################################################################
namespace :deploy do
task :start, :roles => :app do
run "touch #{current_release}/tmp/restart.txt"
end
task :stop, :roles => :app do
# Do nothing.
end
desc "Restart Application"
task :restart, :roles => :app do
run "touch #{current_release}/tmp/restart.txt"
end
end
################################################################################
# Configure the db yaml
################################################################################
namespace :deploy do
task :after_update_code do
db_yml_path = "#{current_path}/db/database.yml"
db_yml = IO.read(db_yml_path)
db_yml.sub!(/username:.*/, "username: #{Capistrano::CLI.ui.ask('Enter MySQL database user: ')}")
db.yml.sub!(/password:.*/, "password: #{Capistrano::CLI.ui.ask('Enter MySQL database password: ')}")
File.open(db_yml_path, 'w') {|f| f.write(db_yml) }
end
end
deploy path fix
# deploy.rb - Capistrano config for The Colour Of.
# Copyright 2009 Rob Myers <rob@robmyers.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
# Config
################################################################################
set :application, "the_colour_of"
set :deploy_to, "/home/robmyers/daemons/#{application}"
set :scm, "git"
set :branch, "master"
set :repository, "http://robmyers.org/git/the_colour_of.git"
set :runner, "robmyers"
set :domain, "robmyers.vm.bytemark.co.uk"
role :web, domain
role :app, domain
role :db, domain, :primary => true
################################################################################
# Rake
################################################################################
namespace :install_rake_task do
task :add_cron_job do
tmpname = "/tmp/appname-crontab.#{Time.now.strftime('%s')}"
# run crontab -l or echo '' instead because the crontab command will fail if the user has no pre-existing crontab file.
# in this case, echo '' is run and the cap recipe won't fail altogether.
run "(crontab -l || echo '') | grep -v 'rake scrape' > #{tmpname}"
run "echo '@hourly cd #{current_path} && RAILS_ENV=production rake my_rake_task' >> #{tmpname}"
run "crontab #{tmpname}"
run "rm #{tmpname}"
end
end
namespace :rake do
task :populate_db do
run("cd #{deploy_to}/current; rake populate_db")
end
end
################################################################################
# Passenger
################################################################################
namespace :deploy do
task :start, :roles => :app do
run "touch #{current_release}/tmp/restart.txt"
end
task :stop, :roles => :app do
# Do nothing.
end
desc "Restart Application"
task :restart, :roles => :app do
run "touch #{current_release}/tmp/restart.txt"
end
end
################################################################################
# Configure the db yaml
################################################################################
namespace :deploy do
task :after_update_code do
db_yml_path = "#{current_path}/config/database.yml"
db_yml = IO.read(db_yml_path)
db_yml.sub!(/username:.*/, "username: #{Capistrano::CLI.ui.ask('Enter MySQL database user: ')}")
db.yml.sub!(/password:.*/, "password: #{Capistrano::CLI.ui.ask('Enter MySQL database password: ')}")
File.open(db_yml_path, 'w') {|f| f.write(db_yml) }
end
end
|
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
:tag => "v5.23",
:revision => "87f989f74d0ac874e4038d47b77b3fd65f53bba4"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "4eb25c4d0c51406fa13d207e961e109a122810d3939486691373b184b9de99dc" => :catalina
sha256 "8713581d42ca4f50c401523025b4f78d13077bcbdb3ac20a77b7e1e2b4f7720e" => :mojave
sha256 "eb1c359a5892e67b19f5d04999c88ed2603e9d6ddff040663ed814de60fe5723" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.popen_read("git rev-parse HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.popen_read("#{bin}/mmctl completion bash")
(bash_completion/"mmctl").write output
output = Utils.popen_read("#{bin}/mmctl completion zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
mmctl: update 5.23 bottle.
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
:tag => "v5.23",
:revision => "87f989f74d0ac874e4038d47b77b3fd65f53bba4"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "0199ecc1ec2a6314704801f67b254bc684e2eb08a25b42bdd2f28ca1dc6144e3" => :catalina
sha256 "ce69eac322b02b6018e807f6a654a41c9c1d7401e91700bcaa02063c98edb396" => :mojave
sha256 "a2b0ea5a63c92a6b005c5cc7aa99eec5275294675ec03d80976493f4153aa9b7" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.popen_read("git rev-parse HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.popen_read("#{bin}/mmctl completion bash")
(bash_completion/"mmctl").write output
output = Utils.popen_read("#{bin}/mmctl completion zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
|
new_subject = 'test subject'
reply_user = 'Throne3d'
reply_id = 1024 # we split *after* this reply_id
Post.transaction do
user = User.find_by(username: reply_user)
abort("needs user") unless user
reply = Reply.find_by(user_id: user.id, id: reply_id)
abort("couldn't find reply") unless reply
old_post = reply.post
puts "splitting post #{old_post.id}: #{old_post.subject}, after reply #{reply_id}"
first_reply = old_post.replies.where('reply_order > ?', reply.reply_order).ordered.first
other_replies = old_post.replies.where('reply_order > ?', first_reply.reply_order).ordered
puts "from after reply #{reply.id}, ie starting at + onwards from #{first_reply.inspect}"
new_post = Post.new
puts "new_post: marking skip_edited & is_import"
new_post.skip_edited = new_post.is_import = true
[:character_id, :icon_id, :character_alias_id, :user_id, :content, :created_at, :updated_at].each do |atr|
new_value = first_reply.send(atr)
puts "new_post.#{atr} = #{new_value.inspect}"
new_post.send(atr.to_s + '=', new_value)
end
[:board_id, :section_id, :privacy, :status, :authors_locked].each do |atr|
new_value = old_post.send(atr)
puts "new_post.#{atr} = #{new_value.inspect}"
new_post.send(atr.to_s + '=', new_value)
end
puts "new_post.subject = #{new_subject}"
new_post.subject = new_subject
puts "new_post.edited_at = #{first_reply.updated_at.inspect}"
new_post.edited_at = first_reply.updated_at
new_post.save!
puts "new post: https://glowfic.com/posts/#{new_post.id}"
puts "now updating #{other_replies.count} replies to be in post ID #{new_post.id}"
new_authors = {}
other_replies.each_with_index do |other_reply, index|
new_authors[other_reply.user_id] ||= other_reply
other_reply.update_columns(post_id: new_post.id, reply_order: index)
end
puts "-> updated"
puts "deleting reply converted to post: #{first_reply.inspect}"
first_reply.destroy!
puts "-> deleted"
# TODO: ensure this works right (previously didn't, if condition missed .exists?)
puts "updating authors:"
new_authors.each do |user_id, reply|
next if PostAuthor.where(post_id: new_post.id, user_id: user_id).exists?
existing = PostAuthor.find_by(post_id: old_post.id, user_id: user_id)
puts "existing: #{existing.inspect}"
data = {
user_id: user_id,
post_id: new_post.id,
created_at: reply.created_at,
updated_at: [existing.updated_at, reply.created_at].max,
can_owe: existing.can_owe,
can_reply: existing.can_reply,
joined: existing.joined,
joined_at: reply.created_at,
}
puts "PostAuthor.create!(#{data}), for #{User.find(user_id)}"
PostAuthor.create!(data)
end
puts "-> new authors created"
still_valid = (old_post.replies.distinct.pluck(:user_id) + [old_post.user_id]).uniq
invalid = old_post.post_authors.where.not(user_id: still_valid)
puts "removing old invalid post authors: #{invalid.inspect}"
invalid.destroy_all
puts "-> removed"
new_last_reply = other_replies.last
new_post_cached_data = {
last_reply_id: new_last_reply.id,
last_user_id: new_last_reply.user_id,
tagged_at: new_last_reply.updated_at,
}
puts "updating new_post columns: #{new_post_cached_data}"
new_post.update_columns(new_post_cached_data)
last_reply = old_post.replies.ordered.last
post_cached_data = {
last_reply_id: last_reply.id,
last_user_id: last_reply.user_id,
tagged_at: last_reply.updated_at,
}
puts "updating post columns: #{post_cached_data}"
old_post.update_columns(post_cached_data)
end
Remove outdated TODO
new_subject = 'test subject'
reply_user = 'Throne3d'
reply_id = 1024 # we split *after* this reply_id
Post.transaction do
user = User.find_by(username: reply_user)
abort("needs user") unless user
reply = Reply.find_by(user_id: user.id, id: reply_id)
abort("couldn't find reply") unless reply
old_post = reply.post
puts "splitting post #{old_post.id}: #{old_post.subject}, after reply #{reply_id}"
first_reply = old_post.replies.where('reply_order > ?', reply.reply_order).ordered.first
other_replies = old_post.replies.where('reply_order > ?', first_reply.reply_order).ordered
puts "from after reply #{reply.id}, ie starting at + onwards from #{first_reply.inspect}"
new_post = Post.new
puts "new_post: marking skip_edited & is_import"
new_post.skip_edited = new_post.is_import = true
[:character_id, :icon_id, :character_alias_id, :user_id, :content, :created_at, :updated_at].each do |atr|
new_value = first_reply.send(atr)
puts "new_post.#{atr} = #{new_value.inspect}"
new_post.send(atr.to_s + '=', new_value)
end
[:board_id, :section_id, :privacy, :status, :authors_locked].each do |atr|
new_value = old_post.send(atr)
puts "new_post.#{atr} = #{new_value.inspect}"
new_post.send(atr.to_s + '=', new_value)
end
puts "new_post.subject = #{new_subject}"
new_post.subject = new_subject
puts "new_post.edited_at = #{first_reply.updated_at.inspect}"
new_post.edited_at = first_reply.updated_at
new_post.save!
puts "new post: https://glowfic.com/posts/#{new_post.id}"
puts "now updating #{other_replies.count} replies to be in post ID #{new_post.id}"
new_authors = {}
other_replies.each_with_index do |other_reply, index|
new_authors[other_reply.user_id] ||= other_reply
other_reply.update_columns(post_id: new_post.id, reply_order: index)
end
puts "-> updated"
puts "deleting reply converted to post: #{first_reply.inspect}"
first_reply.destroy!
puts "-> deleted"
puts "updating authors:"
new_authors.each do |user_id, reply|
next if PostAuthor.where(post_id: new_post.id, user_id: user_id).exists?
existing = PostAuthor.find_by(post_id: old_post.id, user_id: user_id)
puts "existing: #{existing.inspect}"
data = {
user_id: user_id,
post_id: new_post.id,
created_at: reply.created_at,
updated_at: [existing.updated_at, reply.created_at].max,
can_owe: existing.can_owe,
can_reply: existing.can_reply,
joined: existing.joined,
joined_at: reply.created_at,
}
puts "PostAuthor.create!(#{data}), for #{User.find(user_id)}"
PostAuthor.create!(data)
end
puts "-> new authors created"
still_valid = (old_post.replies.distinct.pluck(:user_id) + [old_post.user_id]).uniq
invalid = old_post.post_authors.where.not(user_id: still_valid)
puts "removing old invalid post authors: #{invalid.inspect}"
invalid.destroy_all
puts "-> removed"
new_last_reply = other_replies.last
new_post_cached_data = {
last_reply_id: new_last_reply.id,
last_user_id: new_last_reply.user_id,
tagged_at: new_last_reply.updated_at,
}
puts "updating new_post columns: #{new_post_cached_data}"
new_post.update_columns(new_post_cached_data)
last_reply = old_post.replies.ordered.last
post_cached_data = {
last_reply_id: last_reply.id,
last_user_id: last_reply.user_id,
tagged_at: last_reply.updated_at,
}
puts "updating post columns: #{post_cached_data}"
old_post.update_columns(post_cached_data)
end |
class Spree::MailToFriend
include ActiveModel::Validations
include ActiveModel::Conversion
attr_accessor :host, :subject, :sender_name, :sender_email, :recipient_name, :recipient_email, :message, :recipients, :invalid_recipients, :hide_recipients
EMAILREGEX = /^[-a-z0-9_+\.]+\@([-a-z0-9]+\.)+[a-z0-9]{2,8}$/i
validates :subject, :presence => true
validates :sender_name, :presence => true
validates :recipient_name, :presence => true, :unless => :is_multi
validates :sender_email, :format => { :with => EMAILREGEX }
validates :recipients, :length => {:minimum => 1, :message => "must contain at least one valid email address"}
validates :invalid_recipients, :length => {:maximum => 0, :message => "must be removed"}
def initialize(opts = {})
@sender_email = opts[:sender_email] || ' '
@sender_name = opts[:sender_name] || @sender_email.split('@', 2)[0].titleize
@subject = opts[:subject] || I18n.t('email_to_friend.you_would_like_this', :sender_name => @sender_name, :site => Spree::Config[:site_url])
@recipients = []
@invalid_recipients = []
@recipient_email = (opts[:recipient_email] || '').gsub(';', ',').gsub(/\s/ , '')
@recipient_email.split(',').each do |address|
if address =~ EMAILREGEX
@recipients << address
else
@invalid_recipients << address
end
end
@recipient_name = opts[:recipient_name]
@recipient_name ||= @recipients[0].split('@', 2)[0].titleize unless @recipients.empty?
@hide_recipients = opts[:hide_recipients] || false
@message = opts[:message]
end
def persisted?
false
end
def is_multi
(@recipients.size + @invalid_recipients.size) > 1
end
end
security warning fix for 2.0
Fixes #39
class Spree::MailToFriend
include ActiveModel::Validations
include ActiveModel::Conversion
attr_accessor :host, :subject, :sender_name, :sender_email, :recipient_name, :recipient_email, :message, :recipients, :invalid_recipients, :hide_recipients
EMAILREGEX = /\A[-a-z0-9_+\.]+\@([-a-z0-9]+\.)+[a-z0-9]{2,8}\z/i
validates :subject, :presence => true
validates :sender_name, :presence => true
validates :recipient_name, :presence => true, :unless => :is_multi
validates :sender_email, :format => { :with => EMAILREGEX }
validates :recipients, :length => {:minimum => 1, :message => "must contain at least one valid email address"}
validates :invalid_recipients, :length => {:maximum => 0, :message => "must be removed"}
def initialize(opts = {})
@sender_email = opts[:sender_email] || ' '
@sender_name = opts[:sender_name] || @sender_email.split('@', 2)[0].titleize
@subject = opts[:subject] || I18n.t('email_to_friend.you_would_like_this', :sender_name => @sender_name, :site => Spree::Config[:site_url])
@recipients = []
@invalid_recipients = []
@recipient_email = (opts[:recipient_email] || '').gsub(';', ',').gsub(/\s/ , '')
@recipient_email.split(',').each do |address|
if address =~ EMAILREGEX
@recipients << address
else
@invalid_recipients << address
end
end
@recipient_name = opts[:recipient_name]
@recipient_name ||= @recipients[0].split('@', 2)[0].titleize unless @recipients.empty?
@hide_recipients = opts[:hide_recipients] || false
@message = opts[:message]
end
def persisted?
false
end
def is_multi
(@recipients.size + @invalid_recipients.size) > 1
end
end
|
# config valid only for Capistrano 3.1
lock '3.1.0'
set :repo_url, 'git@github.com:mjacobus/rubyjobsbrazil.git'
ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
set :scm, :git
set :format, :pretty
set :log_level, :debug
set :pty, true
set :linked_files, %w{config/database.yml config/secrets.yml config/application.yml}
set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system public/uploads}
# Default branch is :master
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
# Default deploy_to directory is /var/www/my_app
# set :deploy_to, '/var/www/my_app'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, %w{config/database.yml}
# Default value for linked_dirs is []
# set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system}
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
# Your restart mechanism here, for example:
execute :touch, release_path.join('tmp/restart.txt')
end
end
after :publishing, :restart
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
Updated capistrano lock
# config valid only for Capistrano 3.1
lock '3.2.1'
set :repo_url, 'git@github.com:mjacobus/rubyjobsbrazil.git'
ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
set :scm, :git
set :format, :pretty
set :log_level, :debug
set :pty, true
set :linked_files, %w{config/database.yml config/secrets.yml config/application.yml}
set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system public/uploads}
# Default branch is :master
# ask :branch, proc { `git rev-parse --abbrev-ref HEAD`.chomp }
# Default deploy_to directory is /var/www/my_app
# set :deploy_to, '/var/www/my_app'
# Default value for :scm is :git
# set :scm, :git
# Default value for :format is :pretty
# set :format, :pretty
# Default value for :log_level is :debug
# set :log_level, :debug
# Default value for :pty is false
# set :pty, true
# Default value for :linked_files is []
# set :linked_files, %w{config/database.yml}
# Default value for linked_dirs is []
# set :linked_dirs, %w{bin log tmp/pids tmp/cache tmp/sockets vendor/bundle public/system}
# Default value for default_env is {}
# set :default_env, { path: "/opt/ruby/bin:$PATH" }
# Default value for keep_releases is 5
# set :keep_releases, 5
namespace :deploy do
desc 'Restart application'
task :restart do
on roles(:app), in: :sequence, wait: 5 do
# Your restart mechanism here, for example:
execute :touch, release_path.join('tmp/restart.txt')
end
end
after :publishing, :restart
after :restart, :clear_cache do
on roles(:web), in: :groups, limit: 3, wait: 10 do
# Here we can do anything such as:
# within release_path do
# execute :rake, 'cache:clear'
# end
end
end
end
|
class Monit < Formula
desc "Manage and monitor processes, files, directories, and devices"
homepage "https://mmonit.com/monit/"
url "https://mmonit.com/monit/dist/monit-5.14.tar.gz"
mirror "https://mirrors.kernel.org/debian/pool/main/m/monit/monit_5.14.orig.tar.gz"
sha256 "d0424c3ee8ed43d670ba039184a972ac9f3ad6f45b0806ec17c23820996256c6"
bottle do
cellar :any
sha256 "599c29599d179cd53f4f0983d1fe535aeee91b199b3d0acfd38089ae0fde94b9" => :yosemite
sha256 "c48854b626a95aa3015877bf3aba9e77c7beb7cf0d86d5bb74894564c6c1d360" => :mavericks
sha256 "ed2484446e8bbb5c0c5f8ce9c9559e7f181e4e7db97a19200ebcc72417b2cfe2" => :mountain_lion
end
depends_on "openssl"
def install
system "./configure", "--prefix=#{prefix}",
"--localstatedir=#{var}/monit",
"--sysconfdir=#{etc}/monit"
system "make", "install"
(share/"monit").install "monitrc"
end
test do
system bin/"monit", "-c", share/"monit/monitrc", "-t"
end
end
monit: fix ssl/tls detection
Closes Homebrew/homebrew#43926.
Closes Homebrew/homebrew#43944.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Monit < Formula
desc "Manage and monitor processes, files, directories, and devices"
homepage "https://mmonit.com/monit/"
url "https://mmonit.com/monit/dist/monit-5.14.tar.gz"
mirror "https://mirrors.kernel.org/debian/pool/main/m/monit/monit_5.14.orig.tar.gz"
sha256 "d0424c3ee8ed43d670ba039184a972ac9f3ad6f45b0806ec17c23820996256c6"
bottle do
cellar :any
sha256 "599c29599d179cd53f4f0983d1fe535aeee91b199b3d0acfd38089ae0fde94b9" => :yosemite
sha256 "c48854b626a95aa3015877bf3aba9e77c7beb7cf0d86d5bb74894564c6c1d360" => :mavericks
sha256 "ed2484446e8bbb5c0c5f8ce9c9559e7f181e4e7db97a19200ebcc72417b2cfe2" => :mountain_lion
end
depends_on "openssl"
def install
system "./configure", "--prefix=#{prefix}",
"--localstatedir=#{var}/monit",
"--sysconfdir=#{etc}/monit",
"--with-ssl-dir=#{Formula["openssl"].opt_prefix}"
system "make", "install"
(share/"monit").install "monitrc"
end
test do
system bin/"monit", "-c", share/"monit/monitrc", "-t"
end
end
|
REPLY_ATTRS = [:character_id, :icon_id, :character_alias_id, :user_id, :content, :created_at, :updated_at].map(&:to_s)
POST_ATTRS = [:board_id, :section_id, :privacy, :status, :authors_locked].map(&:to_s)
def split_post
print('Subject for new post? ')
new_subject = STDIN.gets.chomp
raise RuntimeError, "Invalid subject" if new_subject.blank?
print("\n")
print('First reply_id of new post? ')
reply_id = STDIN.gets.chomp
Post.transaction do
first_reply = Reply.find_by(id: reply_id)
raise RuntimeError, "Couldn't find reply" unless first_reply
old_post = first_reply.post
puts "splitting post #{old_post.id}: #{old_post.subject}, at #{reply_id}"
other_replies = old_post.replies.where('reply_order > ?', first_reply.reply_order).ordered
puts "ie starting at + onwards from #{first_reply.inspect}"
new_post = Post.new(first_reply.attributes.slice(*REPLY_ATTRS))
new_post.skip_edited = new_post.is_import = true
new_post.assign_attributes(old_post.attributes.slice(*POST_ATTRS))
new_post.subject = new_subject
new_post.edited_at = first_reply.updated_at
puts "new post: #{new_post.inspect}"
new_post.save!
puts "new post: https://glowfic.com/posts/#{new_post.id}"
puts "now updating #{other_replies.count} replies to be in post ID #{new_post.id}"
new_authors = {}
other_replies.each_with_index do |other_reply, index|
new_authors[other_reply.user_id] ||= other_reply
other_reply.update_columns(post_id: new_post.id, reply_order: index)
end
puts "-> updated"
puts "deleting reply converted to post: #{first_reply.inspect}"
first_reply.destroy!
puts "-> deleted"
puts "updating authors:"
new_authors.each do |user_id, reply|
next if PostAuthor.where(post_id: new_post.id, user_id: user_id).exists?
existing = PostAuthor.find_by(post_id: old_post.id, user_id: user_id)
puts "existing: #{existing.inspect}"
data = {
user_id: user_id,
post_id: new_post.id,
created_at: reply.created_at,
updated_at: [existing.updated_at, reply.created_at].max,
joined_at: reply.created_at,
}
data.merge!(existing.attributes.slice([:can_owe, :can_reply, :joined]))
puts "PostAuthor.create!(#{data}), for #{User.find(user_id).inspect}"
PostAuthor.create!(data)
end
puts "-> new authors created"
still_valid = (old_post.replies.distinct.pluck(:user_id) + [old_post.user_id]).uniq
invalid = old_post.post_authors.where.not(user_id: still_valid)
puts "removing old invalid post authors: #{invalid.inspect}"
invalid.destroy_all
puts "-> removed"
new_last_reply = other_replies.last
new_post_cached_data = {
last_reply_id: new_last_reply.id,
last_user_id: new_last_reply.user_id,
tagged_at: new_last_reply.updated_at,
}
puts "updating new_post columns: #{new_post_cached_data}"
new_post.update_columns(new_post_cached_data)
last_reply = old_post.replies.ordered.last
post_cached_data = {
last_reply_id: last_reply.id,
last_user_id: last_reply.user_id,
tagged_at: last_reply.updated_at,
}
puts "updating post columns: #{post_cached_data}"
old_post.update_columns(post_cached_data)
end
end
split_post if $PROGRAM_NAME == __FILE__
Split into methods
REPLY_ATTRS = [:character_id, :icon_id, :character_alias_id, :user_id, :content, :created_at, :updated_at].map(&:to_s)
POST_ATTRS = [:board_id, :section_id, :privacy, :status, :authors_locked].map(&:to_s)
def split_post
print('Subject for new post? ')
new_subject = STDIN.gets.chomp
raise RuntimeError, "Invalid subject" if new_subject.blank?
print("\n")
print('First reply_id of new post? ')
reply_id = STDIN.gets.chomp
Post.transaction do
first_reply = Reply.find_by(id: reply_id)
raise RuntimeError, "Couldn't find reply" unless first_reply
old_post = first_reply.post
puts "splitting post #{old_post.id}: #{old_post.subject}, at #{reply_id}"
other_replies = old_post.replies.where('reply_order > ?', first_reply.reply_order).ordered
puts "ie starting at + onwards from #{first_reply.inspect}"
new_post = create_post(first_reply, old_post: old_post, subject: new_subject)
new_authors = migrate_replies(other_replies, new_post)
cleanup_first(first_reply)
update_authors(new_authors, new_post: new_post, old_post: old_post)
update_caches(new_post, other_replies.last)
update_caches(old_post, old_post.replies.ordered.last)
end
end
def create_post(first_reply, old_post:, subject:)
new_post = Post.new(first_reply.attributes.slice(*REPLY_ATTRS))
new_post.skip_edited = true
new_post.is_import = true
new_post.assign_attributes(old_post.attributes.slice(*POST_ATTRS))
new_post.subject = subject
new_post.edited_at = first_reply.updated_at
puts "new post: #{new_post.inspect}"
new_post.save!
puts "new post: https://glowfic.com/posts/#{new_post.id}"
new_post
end
def migrate_replies(other_replies, new_post)
new_authors = {}
puts "now updating #{other_replies.count} replies to be in post ID #{new_post.id}"
other_replies.each_with_index do |other_reply, index|
new_authors[other_reply.user_id] ||= other_reply
other_reply.update_columns(post_id: new_post.id, reply_order: index)
end
puts "-> updated"
new_authors
end
def cleanup_first(first_reply)
puts "deleting reply converted to post: #{first_reply.inspect}"
first_reply.destroy!
puts "-> deleted"
end
def update_authors(new_authors, new_post:, old_post:)
puts "updating authors:"
new_authors.each do |user_id, reply|
next if PostAuthor.where(post_id: new_post.id, user_id: user_id).exists?
existing = PostAuthor.find_by(post_id: old_post.id, user_id: user_id)
puts "existing: #{existing.inspect}"
data = {
user_id: user_id,
post_id: new_post.id,
created_at: reply.created_at,
updated_at: [existing.updated_at, reply.created_at].max,
joined_at: reply.created_at,
}
data.merge!(existing.attributes.slice([:can_owe, :can_reply, :joined]))
puts "PostAuthor.create!(#{data}), for #{User.find(user_id).inspect}"
PostAuthor.create!(data)
end
puts "-> new authors created"
still_valid = (old_post.replies.distinct.pluck(:user_id) + [old_post.user_id]).uniq
invalid = old_post.post_authors.where.not(user_id: still_valid)
puts "removing old invalid post authors: #{invalid.inspect}"
invalid.destroy_all
puts "-> removed"
end
def update_caches(post, last_reply)
cached_data = {
last_reply_id: last_reply.id,
last_user_id: last_reply.user_id,
tagged_at: last_reply.updated_at,
}
puts "updating post columns: #{cached_data}"
post.update_columns(cached_data)
end
split_post if $PROGRAM_NAME == __FILE__
|
Gem::Specification.new do |spec|
spec.name = "embulk-input-elasticsearch"
spec.version = "0.3.3"
spec.authors = ["toyama0919"]
spec.summary = "Elasticsearch input plugin for Embulk"
spec.description = "Loads records from Elasticsearch. parallel query support."
spec.email = ["toyama0919@gmail.com"]
spec.licenses = ["MIT"]
spec.homepage = "https://github.com/toyama0919/embulk-input-elasticsearch"
spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
spec.test_files = spec.files.grep(%r{^(test|spec)/})
spec.require_paths = ["lib"]
spec.add_dependency 'elasticsearch'
spec.add_dependency 'excon'
spec.add_development_dependency 'embulk', ['>= 0.8.18']
spec.add_development_dependency 'bundler', ['>= 1.10.6']
spec.add_development_dependency 'rake', ['>= 10.0']
spec.add_development_dependency 'test-unit'
end
v0.3.4
Gem::Specification.new do |spec|
spec.name = "embulk-input-elasticsearch"
spec.version = "0.3.4"
spec.authors = ["toyama0919"]
spec.summary = "Elasticsearch input plugin for Embulk"
spec.description = "Loads records from Elasticsearch. parallel query support."
spec.email = ["toyama0919@gmail.com"]
spec.licenses = ["MIT"]
spec.homepage = "https://github.com/toyama0919/embulk-input-elasticsearch"
spec.files = `git ls-files`.split("\n") + Dir["classpath/*.jar"]
spec.test_files = spec.files.grep(%r{^(test|spec)/})
spec.require_paths = ["lib"]
spec.add_dependency 'elasticsearch'
spec.add_dependency 'excon'
spec.add_development_dependency 'embulk', ['>= 0.8.18']
spec.add_development_dependency 'bundler', ['>= 1.10.6']
spec.add_development_dependency 'rake', ['>= 10.0']
spec.add_development_dependency 'test-unit'
end
|
# frozen_string_literal: true
module Spree
class OrderContents
attr_accessor :order
def initialize(order)
@order = order
end
# Get current line item for variant if exists
# Add variant qty to line_item
def add(variant, quantity = 1, shipment = nil)
line_item = order.find_line_item_by_variant(variant)
add_to_line_item(line_item, variant, quantity, shipment)
end
# Get current line item for variant
# Remove variant qty from line_item
def remove(variant, quantity = 1, shipment = nil)
line_item = order.find_line_item_by_variant(variant)
unless line_item
raise ActiveRecord::RecordNotFound, "Line item not found for variant #{variant.sku}"
end
remove_from_line_item(line_item, variant, quantity, shipment)
end
def update_cart(params)
if order.update_attributes(params)
order.line_items = order.line_items.select {|li| li.quantity > 0 }
order.ensure_updated_shipments
update_order
true
else
false
end
end
private
def add_to_line_item(line_item, variant, quantity, shipment = nil)
if line_item
line_item.target_shipment = shipment
line_item.quantity += quantity.to_i
else
line_item = order.line_items.new(quantity: quantity, variant: variant)
line_item.target_shipment = shipment
line_item.price = variant.price
end
line_item.save
update_order
line_item
end
def remove_from_line_item(line_item, _variant, quantity, shipment = nil)
line_item.quantity += -quantity
line_item.target_shipment = shipment
if line_item.quantity == 0
Spree::OrderInventory.new(order).verify(line_item, shipment)
line_item.destroy
else
line_item.save!
end
update_order
line_item
end
def update_order
order.reload
end
end
end
Move order updating
# frozen_string_literal: true
module Spree
class OrderContents
attr_accessor :order
def initialize(order)
@order = order
end
# Get current line item for variant if exists
# Add variant qty to line_item
def add(variant, quantity = 1, shipment = nil)
line_item = order.find_line_item_by_variant(variant)
add_to_line_item(line_item, variant, quantity, shipment)
update_order
end
# Get current line item for variant
# Remove variant qty from line_item
def remove(variant, quantity = 1, shipment = nil)
line_item = order.find_line_item_by_variant(variant)
unless line_item
raise ActiveRecord::RecordNotFound, "Line item not found for variant #{variant.sku}"
end
remove_from_line_item(line_item, variant, quantity, shipment)
update_order
end
def update_cart(params)
if order.update_attributes(params)
order.line_items = order.line_items.select {|li| li.quantity > 0 }
order.ensure_updated_shipments
update_order
true
else
false
end
end
private
def add_to_line_item(line_item, variant, quantity, shipment = nil)
if line_item
line_item.target_shipment = shipment
line_item.quantity += quantity.to_i
else
line_item = order.line_items.new(quantity: quantity, variant: variant)
line_item.target_shipment = shipment
line_item.price = variant.price
end
line_item.save
line_item
end
def remove_from_line_item(line_item, _variant, quantity, shipment = nil)
line_item.quantity += -quantity
line_item.target_shipment = shipment
if line_item.quantity == 0
Spree::OrderInventory.new(order).verify(line_item, shipment)
line_item.destroy
else
line_item.save!
end
line_item
end
def update_order
order.reload
end
end
end
|
# config valid only for Capistrano 3.1
lock '3.2.1'
set :application, 'rails_survey'
set :deploy_user, 'dmtg'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt}
set :linked_dirs, fetch(:linked_dirs).push("bin" "log" "tmp/pids" "tmp/cache" "tmp/sockets" "vendor/bundle" "public/system")
set :branch, 'master'
namespace :deploy do
task :load_schema do
execute "cd #{current_path}; rake db:schema:load RAILS_ENV=#{rails_env}"
end
task :cold do
update
load_schema
start
end
# desc 'Start Forever'
# task :stop_node do
# execute "/usr/local/bin/forever stopall; true"
# end
#
# desc 'Stop Forever'
# task :start_node do
# execute "cd #{current_path}/node && sudo /usr/local/bin/forever start server.js 8080"
# end
# desc 'Restart Forever'
# task :restart_node do
# execute stop_node
# sleep 5
# execute start_node
# end
# desc "Start the Redis server"
# task :start_redis do
# execute "redis-server /etc/redis.conf"
# end
# desc "Stop the Redis server"
# task :stop_redis do
# execute 'echo "SHUTDOWN" | nc localhost 6379'
# end
desc 'Restart Application'
task :restart do
%w[start stop restart].each do |command|
desc "#{command} redis"
on roles(:web), in: :sequence, wait: 5 do
execute "sudo /etc/init.d/redis-server #{command}"
end
end
on roles(:app), in: :sequence, wait: 5 do
execute "/usr/local/bin/forever stopall; true"
execute "cd #{current_path}/node && sudo /usr/local/bin/forever start server.js"
end
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
after :finishing, 'deploy:cleanup'
#after :publishing, :restart
after 'deploy:publishing', 'deploy:restart'
end
clean up deploy.rb
# config valid only for Capistrano 3.1
lock '3.2.1'
set :application, 'rails_survey'
set :deploy_user, 'dmtg'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt}
set :linked_dirs, fetch(:linked_dirs).push("bin" "log" "tmp/pids" "tmp/cache" "tmp/sockets" "vendor/bundle" "public/system")
set :branch, 'master'
namespace :deploy do
task :load_schema do
execute "cd #{current_path}; rake db:schema:load RAILS_ENV=#{rails_env}"
end
task :cold do
update
load_schema
start
end
desc 'Restart Application'
task :restart do
desc "restart redis"
on roles(:app) do
execute "sudo /etc/init.d/redis-server restart"
end
desc "restart node"
on roles(:app), in: :sequence, wait: 5 do
execute "/usr/local/bin/forever stopall; true"
execute "cd #{current_path}/node && sudo /usr/local/bin/forever start server.js"
end
desc "restart phusion passenger"
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
after :finishing, 'deploy:cleanup'
after 'deploy:publishing', 'deploy:restart'
end
|
class Monit < Formula
desc "Manage and monitor processes, files, directories, and devices"
homepage "https://mmonit.com/monit/"
url "https://mmonit.com/monit/dist/monit-5.21.0.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/m/monit/monit_5.21.0.orig.tar.gz"
sha256 "fbf76163ed4a180854d378af60fed0cdbc5a8772823957234efc182ead10c03c"
bottle do
cellar :any
sha256 "be51a33474b2a3907899e345801a7af34cc5ae789beaecbadf747966928b4a87" => :sierra
sha256 "ea87a2ad323cf9219f8c70cb902d506172855f8dc1ef7e7b31fddc813db57829" => :el_capitan
sha256 "f51c2f901edf6939e3f90519fec401ce2912ec2be6e1a3a1c2a9c84970a31ccb" => :yosemite
end
depends_on "openssl"
def install
system "./configure", "--prefix=#{prefix}",
"--localstatedir=#{var}/monit",
"--sysconfdir=#{etc}/monit",
"--with-ssl-dir=#{Formula["openssl"].opt_prefix}"
system "make", "install"
pkgshare.install "monitrc"
end
test do
system bin/"monit", "-c", pkgshare/"monitrc", "-t"
end
end
monit: update 5.21.0 bottle.
class Monit < Formula
desc "Manage and monitor processes, files, directories, and devices"
homepage "https://mmonit.com/monit/"
url "https://mmonit.com/monit/dist/monit-5.21.0.tar.gz"
mirror "https://mirrors.ocf.berkeley.edu/debian/pool/main/m/monit/monit_5.21.0.orig.tar.gz"
sha256 "fbf76163ed4a180854d378af60fed0cdbc5a8772823957234efc182ead10c03c"
bottle do
sha256 "cf5fe0d85c03d64e0d1f213c76f70d25df9856b93f60cd5e4facc13d271e1cd6" => :sierra
sha256 "55f803929e1f12950d7f2ac0b373ba10a6aac7895a3b03f63c22eb0927c02673" => :el_capitan
sha256 "618b1981e0b7b71ec787ced7d321c9d4c6cfc0cf16774d962fea2302b92dd74d" => :yosemite
end
depends_on "openssl"
def install
system "./configure", "--prefix=#{prefix}",
"--localstatedir=#{var}/monit",
"--sysconfdir=#{etc}/monit",
"--with-ssl-dir=#{Formula["openssl"].opt_prefix}"
system "make", "install"
pkgshare.install "monitrc"
end
test do
system bin/"monit", "-c", pkgshare/"monitrc", "-t"
end
end
|
Spree.user_class.class_eval do
has_many :addresses, -> { where(:deleted_at => nil).order("updated_at DESC") }, :class_name => 'Spree::Address'
before_validation { uaddrcount(self.id ? self : nil, "U:B4VALIDATION") } # XXX
before_save { uaddrcount(self.id ? self : nil, "U:B4SAVE") } # XXX
after_save { uaddrcount(self.id ? self : nil, "U:AftSAVE") } # XXX
before_validation :link_address # XXX after_save
# After save hook that adds user_id to addresses that are assigned to the
# user's default address slots.
def link_address
uaddrcount self.id && self, "U:la:b4(#{changes})" # XXX
r = true
if self.bill_address && !self.bill_address.user
uaddrcount self.id && self, "U:la:bill" # XXX
unless self.bill_address.editable?
self.bill_address = self.bill_address.clone
end
self.bill_address.user = self
# XXX r &= self.bill_address.save
end
if self.ship_address && !self.ship_address.user
uaddrcount self.id && self, "U:la:ship" # XXX
unless self.ship_address.editable?
self.ship_address = self.ship_address.clone
end
self.ship_address.user = self
# XXX r &= self.ship_address.save
end
# XXX r &= save
uaddrcount self.id && self, "U:la:aft(#{r.inspect}/#{bill_address.try(:errors).try(:full_messages)}/#{ship_address.try(:errors).try(:full_messages)})" # XXX
r
end
def save_default_addresses(billing, shipping, address)
uaddrcount self, "U:sda:b4" # XXX
# TODO: is this supposed to set both to the same address ID?
r = update_attributes(bill_address_id: address.id) if billing.present?
r &= update_attributes(ship_address_id: address.id) if shipping.present?
uaddrcount self, "U:sda:aft(#{r.inspect}/#{errors.full_messages})" # XXX
r
end
# This is the method that Spree calls when the user has requested that the
# address be their default address. Spree makes a copy from the order. Instead
# we just want to reference the address so we don't create extra address objects.
def persist_order_address(order)
uaddrcount self, "U:poa:b4", order: order # XXX
r = update_attributes bill_address_id: order.bill_address_id
# May not be present if delivery step has been removed
r &= update_attributes ship_address_id: order.ship_address_id if order.ship_address
uaddrcount self, "U:poa:aft(#{r.inspect}/#{errors.full_messages})", order: order # XXX
r
end
end
Added a validation to verify user's default address ownership.
Spree.user_class.class_eval do
has_many :addresses, -> { where(:deleted_at => nil).order("updated_at DESC") }, :class_name => 'Spree::Address'
before_validation { uaddrcount(self.id ? self : nil, "U:B4VALIDATION") } # XXX
before_save { uaddrcount(self.id ? self : nil, "U:B4SAVE") } # XXX
after_save { uaddrcount(self.id ? self : nil, "U:AftSAVE") } # XXX
before_validation :link_address # XXX after_save
# XXX / TODO: Probably want to get rid of this validation before deploying to
# production because there is old invalid data.
validate :verify_address_owners
# XXX
# Validates that the default addresses are owned by the user.
def verify_address_owners
if bill_address && bill_address.user_id != self.id
errors.add(:bill_address, 'Billing address does not belong to this user')
end
if ship_address && ship_address.user_id != self.id
errors.add(:ship_address, 'Shipping address does not belong to this user')
end
end
# Pre-validation hook that adds user_id to addresses that are assigned to the
# user's default address slots.
def link_address
uaddrcount self.id && self, "U:la:b4(#{changes})" # XXX
r = true
if self.bill_address && !self.bill_address.user
uaddrcount self.id && self, "U:la:bill" # XXX
unless self.bill_address.editable?
self.bill_address = self.bill_address.clone
end
self.bill_address.user = self
# XXX r &= self.bill_address.save
end
if self.ship_address && !self.ship_address.user
uaddrcount self.id && self, "U:la:ship" # XXX
unless self.ship_address.editable?
self.ship_address = self.ship_address.clone
end
self.ship_address.user = self
# XXX r &= self.ship_address.save
end
# XXX r &= save
uaddrcount self.id && self, "U:la:aft(#{r.inspect}/#{bill_address.try(:errors).try(:full_messages)}/#{ship_address.try(:errors).try(:full_messages)})" # XXX
r
end
def save_default_addresses(billing, shipping, address)
uaddrcount self, "U:sda:b4" # XXX
# TODO: is this supposed to set both to the same address ID?
r = update_attributes(bill_address_id: address.id) if billing.present?
r &= update_attributes(ship_address_id: address.id) if shipping.present?
uaddrcount self, "U:sda:aft(#{r.inspect}/#{errors.full_messages})" # XXX
r
end
# This is the method that Spree calls when the user has requested that the
# address be their default address. Spree makes a copy from the order. Instead
# we just want to reference the address so we don't create extra address objects.
def persist_order_address(order)
uaddrcount self, "U:poa:b4", order: order # XXX
r = update_attributes bill_address_id: order.bill_address_id
# May not be present if delivery step has been removed
r &= update_attributes ship_address_id: order.ship_address_id if order.ship_address
uaddrcount self, "U:poa:aft(#{r.inspect}/#{errors.full_messages})", order: order # XXX
r
end
end
|
# Example Usage:
# deploy to staging: cap staging deploy
# deploy a specific branch to qa: cap -s branch=cappy qa deploy
# deploy a specific revision to staging: cap -s revision=c9800f1 staging deploy
# deploy a specific tag to production: cap -s tag=my_tag production deploy
# keep only the last 3 releases on staging: cap -s keep_releases=3 staging deploy:cleanup
require 'bundler/capistrano'
require 'capistrano-rbenv'
require 'capistrano/ext/multistage'
require 'whenever/capistrano'
require 'capistrano-notification'
set :application, "scholarsphere"
set :whenever_command, "bundle exec whenever"
set :scm, :git
set :deploy_via, :remote_cache
set :repository, "https://github.com/psu-stewardship/#{application}.git"
set :deploy_to, "/opt/heracles/deploy/#{application}"
set :user, "deploy"
ssh_options[:forward_agent] = true
ssh_options[:keys] = [File.join(ENV["HOME"], ".ssh", "id_deploy_rsa")]
set :use_sudo, false
default_run_options[:pty] = true
set :rbenv_ruby_version, "2.0.0-p247"
set :rbenv_setup_shell, false
notification.irc do |irc|
irc.user 'cappy'
irc.host 'chat.freenode.net'
irc.channel '#scholarsphere'
irc.message { "[#{irc.user}] #{local_user} deployed #{application} to #{stage}" }
end
# override default restart task for apache passenger
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "touch #{current_path}/tmp/restart.txt"
end
end
# insert new task to symlink shared files
namespace :deploy do
desc "Link shared files"
task :symlink_shared do
run <<-CMD.compact
ln -sf /dlt/#{application}/config_#{stage}/#{application}/database.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/fedora.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/hydra-ldap.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/solr.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/redis.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/secret_token.rb #{release_path}/config/initializers/
CMD
end
end
before "deploy:finalize_update", "deploy:symlink_shared"
# Always run migrations.
after "deploy:update_code", "deploy:migrate"
# Resolrize.
namespace :deploy do
desc "Re-solrize objects"
task :resolrize, :roles => :solr do
run <<-CMD.compact
cd -- #{latest_release} &&
RAILS_ENV=#{rails_env.to_s.shellescape} #{rake} #{application}:resolrize
CMD
end
end
#after "deploy:migrate", "deploy:resolrize"
# Restart resque-pool.
namespace :deploy do
desc "restart resque-pool"
task :resquepoolrestart do
run "sudo /sbin/service resque_pool restart"
end
end
before "deploy:restart", "deploy:resquepoolrestart"
# config/deploy/_passenger.rb hooks.
after "rbenv:setup", "passenger:install"
after "deploy:restart", "passenger:warmup"
# Keep the last X number of releases.
set :keep_releases, 5
#after "passenger:warmup", "deploy:cleanup"
# end
adding resolrize back into deployments
# Example Usage:
# deploy to staging: cap staging deploy
# deploy a specific branch to qa: cap -s branch=cappy qa deploy
# deploy a specific revision to staging: cap -s revision=c9800f1 staging deploy
# deploy a specific tag to production: cap -s tag=my_tag production deploy
# keep only the last 3 releases on staging: cap -s keep_releases=3 staging deploy:cleanup
require 'bundler/capistrano'
require 'capistrano-rbenv'
require 'capistrano/ext/multistage'
require 'whenever/capistrano'
require 'capistrano-notification'
set :application, "scholarsphere"
set :whenever_command, "bundle exec whenever"
set :scm, :git
set :deploy_via, :remote_cache
set :repository, "https://github.com/psu-stewardship/#{application}.git"
set :deploy_to, "/opt/heracles/deploy/#{application}"
set :user, "deploy"
ssh_options[:forward_agent] = true
ssh_options[:keys] = [File.join(ENV["HOME"], ".ssh", "id_deploy_rsa")]
set :use_sudo, false
default_run_options[:pty] = true
set :rbenv_ruby_version, "2.0.0-p247"
set :rbenv_setup_shell, false
notification.irc do |irc|
irc.user 'cappy'
irc.host 'chat.freenode.net'
irc.channel '#scholarsphere'
irc.message { "[#{irc.user}] #{local_user} deployed #{application} to #{stage}" }
end
# override default restart task for apache passenger
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "touch #{current_path}/tmp/restart.txt"
end
end
# insert new task to symlink shared files
namespace :deploy do
desc "Link shared files"
task :symlink_shared do
run <<-CMD.compact
ln -sf /dlt/#{application}/config_#{stage}/#{application}/database.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/fedora.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/hydra-ldap.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/solr.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/redis.yml #{release_path}/config/ &&
ln -sf /dlt/#{application}/config_#{stage}/#{application}/secret_token.rb #{release_path}/config/initializers/
CMD
end
end
before "deploy:finalize_update", "deploy:symlink_shared"
# Always run migrations.
after "deploy:update_code", "deploy:migrate"
# Resolrize.
namespace :deploy do
desc "Re-solrize objects"
task :resolrize, :roles => :solr do
run <<-CMD.compact
cd -- #{latest_release} &&
RAILS_ENV=#{rails_env.to_s.shellescape} #{rake} #{application}:resolrize
CMD
end
end
after "deploy:migrate", "deploy:resolrize"
# Restart resque-pool.
namespace :deploy do
desc "restart resque-pool"
task :resquepoolrestart do
run "sudo /sbin/service resque_pool restart"
end
end
before "deploy:restart", "deploy:resquepoolrestart"
# config/deploy/_passenger.rb hooks.
after "rbenv:setup", "passenger:install"
after "deploy:restart", "passenger:warmup"
# Keep the last X number of releases.
set :keep_releases, 5
#after "passenger:warmup", "deploy:cleanup"
# end
|
class Mpssh < Formula
desc "Mass parallel ssh"
homepage "https://github.com/ndenev/mpssh"
license "BSD-3-Clause"
head "https://github.com/ndenev/mpssh.git", branch: "master"
stable do
url "https://github.com/ndenev/mpssh/archive/1.3.3.tar.gz"
sha256 "510e11c3e177a31c1052c8b4ec06357c147648c86411ac3ed4ac814d0d927f2f"
patch do
# don't install binaries as root (upstream commit)
url "https://github.com/ndenev/mpssh/commit/3cbb868b6fdf8dff9ab86868510c0455ad1ec1b3.patch?full_index=1"
sha256 "a6c596c87a4945e6a77b779fcc42867033dbfd95e27ede492e8b841738a67316"
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bcef6cadd8e60b9856c5cc99d1047deaee4a18a852127c0e4f22fb59f9751371"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f9d5c61b0953345267eda6e05e0c712823ecf4d037e2960ebcd4d836c045ef4d"
sha256 cellar: :any_skip_relocation, monterey: "94d6b1821f850cb852373d0e46b9da787b799d726f4917ece31a0e0dc149b25a"
sha256 cellar: :any_skip_relocation, big_sur: "d6e032b03f612d0be60c38b1af6688f8786e9c097d52c2e8bd3cd507290e4482"
sha256 cellar: :any_skip_relocation, catalina: "714e7b0e97a942f68885baefa599d97e143631154480d0246d04e21a49910acf"
sha256 cellar: :any_skip_relocation, mojave: "e37b5e479ba7f9ad86373e646c63485b55dd1381c2cbc130150e108886675b72"
sha256 cellar: :any_skip_relocation, high_sierra: "1057c47b866d50031a23a0bd244d3bc056b9f12a4d9bf0aeebc0ea292c484638"
sha256 cellar: :any_skip_relocation, sierra: "90d758a0f7accf0b63755c3de8100a880b500e732fc8924123ab2a1c7ce688f8"
sha256 cellar: :any_skip_relocation, el_capitan: "e5ac485861dfca0be2bb1ca2eb5826b5ca5977c0d2abb12dc58de011c18046f1"
sha256 cellar: :any_skip_relocation, yosemite: "2b91c9a9dbae19e99b8b8735bb3292cc056dcf8e06472c0b2d354f64896a4186"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5eabc527317cd3a5091e6efabe168b926693d6eb64644fec082a251a99725669"
end
def install
system "make", "install", "CC=#{ENV.cc}", "BIN=#{bin}"
man1.install "mpssh.1"
end
test do
system "#{bin}/mpssh"
end
end
mpssh: remove Yosemite bottle
Yosemite support was removed in Homebrew 3.5.0.
class Mpssh < Formula
desc "Mass parallel ssh"
homepage "https://github.com/ndenev/mpssh"
license "BSD-3-Clause"
head "https://github.com/ndenev/mpssh.git", branch: "master"
stable do
url "https://github.com/ndenev/mpssh/archive/1.3.3.tar.gz"
sha256 "510e11c3e177a31c1052c8b4ec06357c147648c86411ac3ed4ac814d0d927f2f"
patch do
# don't install binaries as root (upstream commit)
url "https://github.com/ndenev/mpssh/commit/3cbb868b6fdf8dff9ab86868510c0455ad1ec1b3.patch?full_index=1"
sha256 "a6c596c87a4945e6a77b779fcc42867033dbfd95e27ede492e8b841738a67316"
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "bcef6cadd8e60b9856c5cc99d1047deaee4a18a852127c0e4f22fb59f9751371"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f9d5c61b0953345267eda6e05e0c712823ecf4d037e2960ebcd4d836c045ef4d"
sha256 cellar: :any_skip_relocation, monterey: "94d6b1821f850cb852373d0e46b9da787b799d726f4917ece31a0e0dc149b25a"
sha256 cellar: :any_skip_relocation, big_sur: "d6e032b03f612d0be60c38b1af6688f8786e9c097d52c2e8bd3cd507290e4482"
sha256 cellar: :any_skip_relocation, catalina: "714e7b0e97a942f68885baefa599d97e143631154480d0246d04e21a49910acf"
sha256 cellar: :any_skip_relocation, mojave: "e37b5e479ba7f9ad86373e646c63485b55dd1381c2cbc130150e108886675b72"
sha256 cellar: :any_skip_relocation, high_sierra: "1057c47b866d50031a23a0bd244d3bc056b9f12a4d9bf0aeebc0ea292c484638"
sha256 cellar: :any_skip_relocation, sierra: "90d758a0f7accf0b63755c3de8100a880b500e732fc8924123ab2a1c7ce688f8"
sha256 cellar: :any_skip_relocation, el_capitan: "e5ac485861dfca0be2bb1ca2eb5826b5ca5977c0d2abb12dc58de011c18046f1"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5eabc527317cd3a5091e6efabe168b926693d6eb64644fec082a251a99725669"
end
def install
system "make", "install", "CC=#{ENV.cc}", "BIN=#{bin}"
man1.install "mpssh.1"
end
test do
system "#{bin}/mpssh"
end
end
|
require 'json'
require 'google_chart'
class SvtDeviationSpider < ActiveRecord::Base
has_many :svt_deviation_spider_consolidations
has_many :svt_deviation_spider_deliverables
has_many :svt_deviation_spider_activity_values
has_many :svt_deviation_spider_deliverable_values
has_many :svt_deviation_spider_maturities
belongs_to :milestone
belongs_to :project
Spider_parameters = Struct.new(:deliverables, :activities, :psu_imported)
Chart = Struct.new(:meta_activity_name, :titles, :points, :points_ref)
Setting_to_chart = Struct.new(:setting, :weight)
# ***
# SET
# ***
def init_spider_data
spider_parameters = self.get_parameters
questions = self.get_questions
if spider_parameters and spider_parameters.deliverables.count > 0
spider_parameters.deliverables.each do |deliverable|
add_deliverable(questions, deliverable, spider_parameters.activities, spider_parameters.psu_imported)
end
end
deliverables_added_by_hand = self.get_deliverables_added_by_hand_in_previous_milestones
if deliverables_added_by_hand and deliverables_added_by_hand.count > 0
deliverables_added_by_hand.each do |deliverable|
add_deliverable(questions, deliverable, spider_parameters.activities, spider_parameters.psu_imported, true, false)
end
end
end
def get_questions
questions = SvtDeviationQuestion.find(:all,
:joins => ["JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ?", self.milestone.project.lifecycle_object.id, self.milestone.name],
:group=>"svt_deviation_questions.id")
return questions
end
def add_deliverable(questions, deliverable, activities, psu_imported, is_added_by_hand=false, init_answers=false)
#check if we didn't already recorded this deliverable for this spider
new_spider_deliverable = SvtDeviationSpiderDeliverable.find(:first, :conditions=>["svt_deviation_spider_id = ? and svt_deviation_deliverable_id = ?", self.id, deliverable.id])
if !new_spider_deliverable
new_spider_deliverable = SvtDeviationSpiderDeliverable.new
new_spider_deliverable.svt_deviation_spider_id = self.id
new_spider_deliverable.svt_deviation_deliverable_id = deliverable.id
if deliverable_not_done?(deliverable.id)
new_spider_deliverable.not_done = true
end
end
new_spider_deliverable.is_added_by_hand = is_added_by_hand
new_spider_deliverable.save
project_id = self.milestone.project_id
last_reference = SvtDeviationSpiderReference.find(:last, :conditions => ["project_id = ?", project_id], :order => "version_number asc")
if activities and activities.count > 0
activities.each do |activity|
to_add = false
setting = SvtDeviationSpiderSetting.find(:all, :conditions=>["svt_deviation_spider_reference_id = ? and deliverable_name = ? and activity_name = ?", last_reference, deliverable.name, activity.name])
if setting and setting.count == 1
if (new_spider_deliverable.is_added_by_hand or setting[0].answer_1 == "Yes" or setting[0].answer_3 == "Another template is used")
to_add = true
end
elsif setting and setting.count > 1
setting.each do |sett|
if (new_spider_deliverable.is_added_by_hand or sett.answer_1 == "Yes" or sett.answer_3 == "Another template is used")
to_add = true
end
end
elsif !psu_imported or new_spider_deliverable.is_added_by_hand
to_add = true
end
if to_add
if questions and questions.count > 0
questions.each do |question|
if question.is_active and question.svt_deviation_activity_id == activity.id and question.svt_deviation_deliverable_id == deliverable.id
question_already_recorded = SvtDeviationSpiderValue.find(:first, :conditions=>["svt_deviation_spider_deliverable_id = ? and svt_deviation_question_id = ?", deliverable.id, question.id])
if !question_already_recorded
new_deviation_spider_values = SvtDeviationSpiderValue.new
new_deviation_spider_values.svt_deviation_question_id = question.id
new_deviation_spider_values.svt_deviation_spider_deliverable_id = new_spider_deliverable.id
if new_spider_deliverable.not_done
new_deviation_spider_values.answer = false
else
new_deviation_spider_values.answer = nil
end
new_deviation_spider_values.answer_reference = question.answer_reference
new_deviation_spider_values.save
end
end
end
end
to_add = false
end
end
end
end
# ***
# GET
# ***
def get_parameters
activities = Array.new
deliverables = Array.new
psu_imported = false
# Check PSU
deviation_spider_reference = self.milestone.project.get_current_svt_deviation_spider_reference
if deviation_spider_reference
deviation_spider_reference.svt_deviation_spider_settings.each do |setting|
activity_parameter = SvtDeviationActivity.find(:first, :conditions => ["name = ? and is_active = ?", setting.activity_name, true])
deliverable_parameter = SvtDeviationDeliverable.find(:first, :conditions => ["name = ? and is_active = ?", setting.deliverable_name, true])
if activity_parameter and deliverable_parameter
if !activities.include? activity_parameter
activities << activity_parameter
end
if setting.answer_1 == "Yes" or setting.answer_3 == "Another template is used"
if !deliverables.include? deliverable_parameter
deliverables << deliverable_parameter
end
end
psu_imported = true
end
end
else
deliverables = self.get_deliverables
activities = SvtDeviationActivity.find(:all,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_activity_id = svt_deviation_activities.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_activities.is_active = ?", self.milestone.project.lifecycle_object.id, self.milestone.name, true],
:group => "svt_deviation_questions.svt_deviation_activity_id")
end
return_parameters = Spider_parameters.new
return_parameters.activities = activities
return_parameters.deliverables = deliverables
return_parameters.psu_imported = psu_imported
return return_parameters
end
def get_deliverables
SvtDeviationDeliverable.find(:all,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_deliverable_id = svt_deviation_deliverables.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_deliverables.is_active = ?", self.milestone.project.lifecycle_object.id, self.milestone.name, true],
:group => "svt_deviation_questions.svt_deviation_deliverable_id")
end
# ***
# GET DELIVERABLES FROM PREVIOUS SPIDER
# ***
def get_deliverables_added_by_hand_in_previous_milestones
deliverables_found = Array.new
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
# Get milestone index
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
# Search for each last spider consolidated for each previous milestone if we have deviation_deliverable added by hand and with questions availables for the milestone of our current spider
for i in 0..self_index
project_milestone = project_milestones[i]
last_spider = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", project_milestone.id] )
if last_spider != nil
last_spider.svt_deviation_spider_deliverables.each do |spider_deliverable|
if spider_deliverable.is_added_by_hand and !deviation_deliverables.include? spider_deliverable.svt_deviation_deliverable
deliverables_found << spider_deliverable.svt_deviation_deliverable
end
end
end
end
return deliverables_found
end
# Return a list of deliverables are not be completed on the previous milestone.
# This requirement is not needed anymore.
def get_deliverables_not_completed
deliverables_availables = Array.new
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
# Check last milestone
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
# If the self milestone was found and it's not the first
if self_index > 0
previous_milestone = project_milestones[self_index-1]
last_spider = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", previous_milestone.id] )
# Check for each deliverable if one question is to false, if this is the case, we add the deliverable.
if last_spider != nil
last_spider.svt_deviation_spider_deliverables.each do |spider_deliverable|
deliverable_not_completed = false
# If one not is to null or answer is different from the ref
spider_deliverable.svt_deviation_spider_values.each do |spider_value|
if spider_value.answer == nil or spider_value.answer != spider_value.answer_reference
deliverable_not_completed = true
end
end
# Add the delivrable as not completed if not completed AND not already present in the current spider
if deliverable_not_completed and !deviation_deliverables.include? spider_deliverable.svt_deviation_deliverable
deliverables_availables << spider_deliverable.svt_deviation_deliverable
end
end
end
end
return deliverables_availables
end
def deliverable_not_done?(deliverable_id)
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
deliverable_not_done = false
if self_index > 0
for i in 0..(self_index-1)
milestone_to_analyze = project_milestones[i]
spider_to_analyze = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", milestone_to_analyze.id] )
if spider_to_analyze != nil
spider_deliverable_to_analyze = SvtDeviationSpiderDeliverable.find(:first, :conditions => ["svt_deviation_spider_id = ? and svt_deviation_deliverable_id = ?", spider_to_analyze.id, deliverable_id])
if spider_deliverable_to_analyze != nil
if spider_deliverable_to_analyze.not_done == true
deliverable_not_done = true
end
end
end
end
end
return deliverable_not_done
end
# ***
# GET MILESTONES
# ***
# Return a list of milestones sorted and available for spiders
def get_project_milestones_with_spider
return self.milestone.project.sorted_milestones.select { |m| m.is_eligible_for_spider? and m.name[0..1]!='QG' and m.is_virtual == false }
end
# Return the index of the milestone of the current spider from the array of sorted milestones
def get_spider_milestone_index
project_milestones = get_project_milestones_with_spider()
i = 0
self_index = -1
project_milestones.each do |sorted_milestone|
if sorted_milestone.id == self.milestone.id
self_index = i
end
i = i + 1
end
return self_index
end
# ***
# CHARTS
# ***
def generate_deliverable_chart(meta_activity_id)
chart_questions = SvtDeviationSpiderValue.find(:all,
:joins => [
"JOIN svt_deviation_spider_deliverables ON svt_deviation_spider_deliverables.id = svt_deviation_spider_values.svt_deviation_spider_deliverable_id",
"JOIN svt_deviation_deliverables ON svt_deviation_deliverables.id = svt_deviation_spider_deliverables.svt_deviation_deliverable_id",
"JOIN svt_deviation_questions ON svt_deviation_questions.id = svt_deviation_spider_values.svt_deviation_question_id",
"JOIN svt_deviation_activities ON svt_deviation_activities.id = svt_deviation_questions.svt_deviation_activity_id"
],
:conditions => ["svt_deviation_spider_deliverables.svt_deviation_spider_id = ? and svt_deviation_activities.svt_deviation_meta_activity_id = ? and svt_deviation_deliverables.is_active = ?", self.id, meta_activity_id, true],
:order => "svt_deviation_deliverables.id")
chart = Chart.new
if chart_questions
meta_activity_name = SvtDeviationMetaActivity.find(:first, :conditions => ["id = ?", meta_activity_id]).name
chart.meta_activity_name = meta_activity_name
chart.titles = Array.new
chart.points = Array.new
chart.points_ref = Array.new
current_deliverable = nil
current_yes_count = 0.0
current_question_count = 0.0
chart_questions.each do |question|
# Deliverable
if (current_deliverable == nil) or (current_deliverable.id != question.svt_deviation_spider_deliverable.svt_deviation_deliverable.id)
if current_deliverable != nil
chart.titles << current_deliverable.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
end
current_deliverable = question.svt_deviation_spider_deliverable.svt_deviation_deliverable
current_yes_count = 0
current_question_count = 0
end
# Question
if question.answer == true
current_yes_count = current_yes_count + 1.0
end
current_question_count = current_question_count + 1.0
end
if current_deliverable
chart.titles << current_deliverable.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
if chart.titles.count <= 2
chart.titles << ""
chart.points << 0.0
chart.points_ref << 0.0
end
end
end
return chart
end
def generate_activity_chart(meta_activity_id)
chart_questions = SvtDeviationSpiderValue.find(:all,
:joins => ["JOIN svt_deviation_spider_deliverables ON svt_deviation_spider_deliverables.id = svt_deviation_spider_values.svt_deviation_spider_deliverable_id",
"JOIN svt_deviation_questions ON svt_deviation_questions.id = svt_deviation_spider_values.svt_deviation_question_id",
"JOIN svt_deviation_activities ON svt_deviation_activities.id = svt_deviation_questions.svt_deviation_activity_id"],
:conditions => ["svt_deviation_spider_deliverables.svt_deviation_spider_id = ? and svt_deviation_activities.svt_deviation_meta_activity_id = ? and svt_deviation_activities.is_active = ?", self.id, meta_activity_id, true],
:order => "svt_deviation_activities.id")
chart = Chart.new
if chart_questions
meta_activity_name = SvtDeviationMetaActivity.find(:first, :conditions => ["id = ?", meta_activity_id]).name
chart.titles = Array.new
chart.points = Array.new
chart.points_ref = Array.new
chart.meta_activity_name = meta_activity_name
current_activity = nil
current_yes_count = 0.0
current_question_count = 0.0
chart_questions.each do |question|
# Activities
if (current_activity == nil) or (current_activity.id != question.svt_deviation_question.svt_deviation_activity_id)
if current_activity != nil
chart.titles << current_activity.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
end
current_activity = question.svt_deviation_question.svt_deviation_activity
current_yes_count = 0.0
current_question_count = 0.0
end
# Question
if question.answer == true
current_yes_count = current_yes_count + 1.0
end
current_question_count = current_question_count + 1.0
end
if current_activity
chart.titles << current_activity.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
if chart.titles.count <= 2
chart.titles << ""
chart.points << 0.0
chart.points_ref << 0.0
end
end
end
return chart
end
def generate_deliverable_charts
meta_activities = SvtDeviationMetaActivity.find(:all, :conditions=>["is_active = 1"])
charts = Array.new
meta_activities.each do |meta_activity|
charts << generate_deliverable_chart(meta_activity)
end
return charts
end
def generate_activity_charts
meta_activities = SvtDeviationMetaActivity.find(:all, :conditions=>["is_active = 1"])
charts = Array.new
meta_activities.each do |meta_activity|
charts << generate_activity_chart(meta_activity)
end
return charts
end
def generate_pie_chart
standard = customization = deviation = total_number = 0
setting_to_count_array = Array.new
reference = SvtDeviationSpiderReference.find(:last, :conditions=>["project_id = ?", self.milestone.project_id], :order=>"version_number")
if reference
settings = SvtDeviationSpiderSetting.find(:all, :conditions=>["svt_deviation_spider_reference_id = ?", reference])
if settings.count > 0
settings.each do |setting|
setting_to_count_array = update_setting_to_count_array(setting_to_count_array, setting)
end
setting_to_count_array.each do |setting_to_count|
if setting_to_count.setting.answer_1 == "Yes"
standard = standard + 1
elsif setting_to_count.setting.answer_1 == "No" and setting_to_count.setting.answer_2 == "No"
deviation = deviation + 1
else
customization = customization + 1
end
total_number = total_number + 1
end
if total_number == 0
total_number = 1
end
standard = standard.to_f / total_number.to_f * 100
customization = customization.to_f / total_number.to_f * 100
deviation = deviation.to_f / total_number.to_f * 100
else
standard = 100
end
else
standard = 100
end
lifecycle_name = Lifecycle.find(:first, :conditions=>["id = ?", milestone.project.lifecycle_id]).name
chart = GoogleChart::PieChart.new('500x220', "Result of "+ lifecycle_name +" adherence") do |pie_chart|
pie_chart.data "Forecast deviation " + deviation.round.to_s + "%", deviation, "0101DF"
pie_chart.data "Forecast customization " + customization.round.to_s + "%", customization, "5858FA"
pie_chart.data "Standard " + standard.round.to_s + "%", standard, "A9A9F5"
end
return chart
end
def generate_devia_pie_chart(consolidations)
standard = get_devia_standard(consolidations)
deviation = 100 - standard
lifecycle_name = Lifecycle.find(:first, :conditions=>["id = ?", self.milestone.project.lifecycle_id]).name
chart = GoogleChart::PieChart.new('500x220', "Result of PQP adherence") do |pie_chart|
pie_chart.data "Standard " + standard.round.to_s + "%", standard, "2E2EFE"
pie_chart.data "Deviation " + deviation.round.to_s + "%", deviation, "DF0101"
end
return chart
end
def get_devia_standard(consolidations)
standard_number = number_of_duplicate = number_of_deliverables = 0
duplicate_conso = duplicate_conso_not_set = duplicate_conso_set_no = Array.new
last_reference = SvtDeviationSpiderReference.find(:last, :conditions => ["project_id = ?", self.milestone.project_id], :order => "version_number asc")
consolidations.each do |conso|
duplication_to_delete = false
as_been_added = false
if conso.score == 3 or conso.score == 2
SvtDeviationSpiderSetting.find(:all, :conditions => ["svt_deviation_spider_reference_id = ? and deliverable_name = ?", last_reference, conso.deliverable.name]).each do |sett|
if sett.answer_1 == "Yes" or sett.answer_3 == "Another template is used"
if !duplicate_conso.include?(conso.deliverable.name)
standard_number = standard_number + 1
duplicate_conso.push(conso.deliverable.name)
as_been_added = true
else
duplication_to_delete = true
end
else
if !duplicate_conso_set_no.include?(conso.deliverable.name)
duplicate_conso_set_no.push(conso.deliverable.name)
else
duplication_to_delete = true
end
end
end
else
if !duplicate_conso_not_set.include?(conso.deliverable.name)
duplicate_conso_not_set.push(conso.deliverable.name)
else
duplication_to_delete = true
end
end
if duplication_to_delete and !as_been_added
number_of_duplicate = number_of_duplicate + 1
end
end
number_of_deliverables = consolidations.count - number_of_duplicate
standard = standard_number.to_f / number_of_deliverables.to_f * 100
return standard
end
def update_setting_to_count_array(setting_to_count_array, setting)
setting_to_count = Setting_to_chart.new
val = true
setting_to_count.setting = setting
setting_to_count.weight = get_weight(setting)
deliverable_name = setting.deliverable_name
setting_to_count_array.each do |sett|
if sett.setting.deliverable_name == deliverable_name
weight_from_duplicate = sett.weight
if setting_to_count.weight > sett.weight
setting_to_count_array.delete(sett)
else
val = false
end
end
end
if val
setting_to_count_array.push(setting_to_count)
end
return setting_to_count_array
end
def get_weight(setting)
weight = 0
if setting.answer_1 == "Yes"
weight = 4
elsif setting.answer_2 == "No"
weight = 1
elsif setting.answer_3 == "Another template is used"
weight = 3
else
weight = 2
end
return weight
end
def is_not_consolidated?
result = false
temp = SvtDeviationSpiderConsolidationTemp.find(:first, :conditions=>["svt_deviation_spider_id = ?", self.id])
if temp
result = true
end
end
def get_deviation_maturity
maturity = i = u = 0
SvtDeviationSpiderDeliverable.find(:all, :conditions=>["svt_deviation_spider_id = ?", self.id]).each do |deliverable|
SvtDeviationSpiderValue.find(:all, :conditions=>["svt_deviation_spider_deliverable_id = ?", deliverable.id]).each do |value|
i = i+1
if value.answer == true
u = u+1
end
end
end
maturity = u*100/i
return maturity
end
end
Bug spider deliverable
require 'json'
require 'google_chart'
class SvtDeviationSpider < ActiveRecord::Base
has_many :svt_deviation_spider_consolidations
has_many :svt_deviation_spider_deliverables
has_many :svt_deviation_spider_activity_values
has_many :svt_deviation_spider_deliverable_values
has_many :svt_deviation_spider_maturities
belongs_to :milestone
belongs_to :project
Spider_parameters = Struct.new(:deliverables, :activities, :psu_imported)
Chart = Struct.new(:meta_activity_name, :titles, :points, :points_ref)
Setting_to_chart = Struct.new(:setting, :weight)
# ***
# SET
# ***
def init_spider_data
spider_parameters = self.get_parameters
questions = self.get_questions
if spider_parameters and spider_parameters.deliverables.count > 0
spider_parameters.deliverables.each do |deliverable|
add_deliverable(questions, deliverable, spider_parameters.activities, spider_parameters.psu_imported)
end
end
deliverables_added_by_hand = self.get_deliverables_added_by_hand_in_previous_milestones
if deliverables_added_by_hand and deliverables_added_by_hand.count > 0
deliverables_added_by_hand.each do |deliverable|
add_deliverable(questions, deliverable, spider_parameters.activities, spider_parameters.psu_imported, true, false)
end
end
end
def get_questions
questions = SvtDeviationQuestion.find(:all,
:joins => ["JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ?", self.milestone.project.lifecycle_object.id, self.milestone.name],
:group=>"svt_deviation_questions.id")
return questions
end
def add_deliverable(questions, deliverable, activities, psu_imported, is_added_by_hand=false, init_answers=false)
#check if we didn't already recorded this deliverable for this spider
new_spider_deliverable = SvtDeviationSpiderDeliverable.find(:first, :conditions=>["svt_deviation_spider_id = ? and svt_deviation_deliverable_id = ?", self.id, deliverable.id])
if !new_spider_deliverable
new_spider_deliverable = SvtDeviationSpiderDeliverable.new
new_spider_deliverable.svt_deviation_spider_id = self.id
new_spider_deliverable.svt_deviation_deliverable_id = deliverable.id
if deliverable_not_done?(deliverable.id)
new_spider_deliverable.not_done = true
end
end
new_spider_deliverable.is_added_by_hand = is_added_by_hand
new_spider_deliverable.save
project_id = self.milestone.project_id
last_reference = SvtDeviationSpiderReference.find(:last, :conditions => ["project_id = ?", project_id], :order => "version_number asc")
if activities and activities.count > 0
activities.each do |activity|
to_add = false
setting = SvtDeviationSpiderSetting.find(:all, :conditions=>["svt_deviation_spider_reference_id = ? and deliverable_name = ? and activity_name = ?", last_reference, deliverable.name, activity.name])
if setting and setting.count == 1
if (new_spider_deliverable.is_added_by_hand or setting[0].answer_1 == "Yes" or setting[0].answer_3 == "Another template is used")
to_add = true
end
elsif setting and setting.count > 1
setting.each do |sett|
if (new_spider_deliverable.is_added_by_hand or sett.answer_1 == "Yes" or sett.answer_3 == "Another template is used")
to_add = true
end
end
elsif !psu_imported or new_spider_deliverable.is_added_by_hand
to_add = true
end
if to_add
if questions and questions.count > 0
questions.each do |question|
if question.is_active and question.svt_deviation_activity_id == activity.id and question.svt_deviation_deliverable_id == deliverable.id
question_already_recorded = SvtDeviationSpiderValue.find(:first, :conditions=>["svt_deviation_spider_deliverable_id = ? and svt_deviation_question_id = ?", deliverable.id, question.id])
if !question_already_recorded
new_deviation_spider_values = SvtDeviationSpiderValue.new
new_deviation_spider_values.svt_deviation_question_id = question.id
new_deviation_spider_values.svt_deviation_spider_deliverable_id = new_spider_deliverable.id
if new_spider_deliverable.not_done
new_deviation_spider_values.answer = false
else
new_deviation_spider_values.answer = nil
end
new_deviation_spider_values.answer_reference = question.answer_reference
new_deviation_spider_values.save
end
end
end
end
to_add = false
end
end
end
end
# ***
# GET
# ***
def get_parameters
activities = Array.new
deliverables = Array.new
psu_imported = false
# Check PSU
deviation_spider_reference = self.project.get_current_svt_deviation_spider_reference
if deviation_spider_reference
deviation_spider_reference.svt_deviation_spider_settings.each do |setting|
activity_parameter = SvtDeviationActivity.find(:all,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_activity_id = svt_deviation_activities.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_activities.is_active = ? and svt_deviation_activities.name = ?", self.project.lifecycle_object.id, self.milestone.name, true, setting.activity_name])
deliverable_parameter = SvtDeviationDeliverable.find(:first,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_deliverable_id = svt_deviation_deliverables.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_deliverables.is_active = ? and svt_deviation_deliverables.name = ?", self.project.lifecycle_object.id, self.milestone.name, true, setting.deliverable_name])
if activity_parameter and deliverable_parameter
if !activities.include? activity_parameter
activities << activity_parameter
end
if setting.answer_1 == "Yes" or setting.answer_3 == "Another template is used"
if !deliverables.include? deliverable_parameter
deliverables << deliverable_parameter
end
end
psu_imported = true
end
end
else
deliverables = SvtDeviationDeliverable.find(:all,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_deliverable_id = svt_deviation_deliverables.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_deliverables.is_active = ?", self.project.lifecycle_object.id, self.milestone.name, true],
:group => "svt_deviation_questions.svt_deviation_deliverable_id")
activities = SvtDeviationActivity.find(:all,
:joins => ["JOIN svt_deviation_questions ON svt_deviation_questions.svt_deviation_activity_id = svt_deviation_activities.id",
"JOIN svt_deviation_question_milestone_names ON svt_deviation_question_milestone_names.svt_deviation_question_id = svt_deviation_questions.id",
"JOIN milestone_names ON milestone_names.id = svt_deviation_question_milestone_names.milestone_name_id",
"JOIN svt_deviation_question_lifecycles ON svt_deviation_question_lifecycles.svt_deviation_question_id = svt_deviation_questions.id"],
:conditions => ["svt_deviation_question_lifecycles.lifecycle_id = ? and milestone_names.title = ? and svt_deviation_activities.is_active = ?", self.project.lifecycle_object.id, self.milestone.name, true],
:group => "svt_deviation_questions.svt_deviation_activity_id")
end
return_parameters = Spider_parameters.new
return_parameters.activities = activities
return_parameters.deliverables = deliverables
return_parameters.psu_imported = psu_imported
return return_parameters
end
# ***
# GET DELIVERABLES FROM PREVIOUS SPIDER
# ***
def get_deliverables_added_by_hand_in_previous_milestones
deliverables_found = Array.new
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
# Get milestone index
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
# Search for each last spider consolidated for each previous milestone if we have deviation_deliverable added by hand and with questions availables for the milestone of our current spider
for i in 0..self_index
project_milestone = project_milestones[i]
last_spider = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", project_milestone.id] )
if last_spider != nil
last_spider.svt_deviation_spider_deliverables.each do |spider_deliverable|
if spider_deliverable.is_added_by_hand and !deviation_deliverables.include? spider_deliverable.svt_deviation_deliverable
deliverables_found << spider_deliverable.svt_deviation_deliverable
end
end
end
end
return deliverables_found
end
# Return a list of deliverables are not be completed on the previous milestone.
# This requirement is not needed anymore.
def get_deliverables_not_completed
deliverables_availables = Array.new
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
# Check last milestone
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
# If the self milestone was found and it's not the first
if self_index > 0
previous_milestone = project_milestones[self_index-1]
last_spider = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", previous_milestone.id] )
# Check for each deliverable if one question is to false, if this is the case, we add the deliverable.
if last_spider != nil
last_spider.svt_deviation_spider_deliverables.each do |spider_deliverable|
deliverable_not_completed = false
# If one not is to null or answer is different from the ref
spider_deliverable.svt_deviation_spider_values.each do |spider_value|
if spider_value.answer == nil or spider_value.answer != spider_value.answer_reference
deliverable_not_completed = true
end
end
# Add the delivrable as not completed if not completed AND not already present in the current spider
if deliverable_not_completed and !deviation_deliverables.include? spider_deliverable.svt_deviation_deliverable
deliverables_availables << spider_deliverable.svt_deviation_deliverable
end
end
end
end
return deliverables_availables
end
def deliverable_not_done?(deliverable_id)
deviation_deliverables = Array.new
self.svt_deviation_spider_deliverables.each do |spider_deliverable|
deviation_deliverables << spider_deliverable.svt_deviation_deliverable
end
project_milestones = get_project_milestones_with_spider()
self_index = get_spider_milestone_index()
deliverable_not_done = false
if self_index > 0
for i in 0..(self_index-1)
milestone_to_analyze = project_milestones[i]
spider_to_analyze = SvtDeviationSpider.find(:last, :joins=>["JOIN svt_deviation_spider_consolidations ON svt_deviation_spiders.id = svt_deviation_spider_consolidations.svt_deviation_spider_id"], :conditions =>["milestone_id = ?", milestone_to_analyze.id] )
if spider_to_analyze != nil
spider_deliverable_to_analyze = SvtDeviationSpiderDeliverable.find(:first, :conditions => ["svt_deviation_spider_id = ? and svt_deviation_deliverable_id = ?", spider_to_analyze.id, deliverable_id])
if spider_deliverable_to_analyze != nil
if spider_deliverable_to_analyze.not_done == true
deliverable_not_done = true
end
end
end
end
end
return deliverable_not_done
end
# ***
# GET MILESTONES
# ***
# Return a list of milestones sorted and available for spiders
def get_project_milestones_with_spider
return self.milestone.project.sorted_milestones.select { |m| m.is_eligible_for_spider? and m.name[0..1]!='QG' and m.is_virtual == false }
end
# Return the index of the milestone of the current spider from the array of sorted milestones
def get_spider_milestone_index
project_milestones = get_project_milestones_with_spider()
i = 0
self_index = -1
project_milestones.each do |sorted_milestone|
if sorted_milestone.id == self.milestone.id
self_index = i
end
i = i + 1
end
return self_index
end
# ***
# CHARTS
# ***
def generate_deliverable_chart(meta_activity_id)
chart_questions = SvtDeviationSpiderValue.find(:all,
:joins => [
"JOIN svt_deviation_spider_deliverables ON svt_deviation_spider_deliverables.id = svt_deviation_spider_values.svt_deviation_spider_deliverable_id",
"JOIN svt_deviation_deliverables ON svt_deviation_deliverables.id = svt_deviation_spider_deliverables.svt_deviation_deliverable_id",
"JOIN svt_deviation_questions ON svt_deviation_questions.id = svt_deviation_spider_values.svt_deviation_question_id",
"JOIN svt_deviation_activities ON svt_deviation_activities.id = svt_deviation_questions.svt_deviation_activity_id"
],
:conditions => ["svt_deviation_spider_deliverables.svt_deviation_spider_id = ? and svt_deviation_activities.svt_deviation_meta_activity_id = ? and svt_deviation_deliverables.is_active = ?", self.id, meta_activity_id, true],
:order => "svt_deviation_deliverables.id")
chart = Chart.new
if chart_questions
meta_activity_name = SvtDeviationMetaActivity.find(:first, :conditions => ["id = ?", meta_activity_id]).name
chart.meta_activity_name = meta_activity_name
chart.titles = Array.new
chart.points = Array.new
chart.points_ref = Array.new
current_deliverable = nil
current_yes_count = 0.0
current_question_count = 0.0
chart_questions.each do |question|
# Deliverable
if (current_deliverable == nil) or (current_deliverable.id != question.svt_deviation_spider_deliverable.svt_deviation_deliverable.id)
if current_deliverable != nil
chart.titles << current_deliverable.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
end
current_deliverable = question.svt_deviation_spider_deliverable.svt_deviation_deliverable
current_yes_count = 0
current_question_count = 0
end
# Question
if question.answer == true
current_yes_count = current_yes_count + 1.0
end
current_question_count = current_question_count + 1.0
end
if current_deliverable
chart.titles << current_deliverable.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
if chart.titles.count <= 2
chart.titles << ""
chart.points << 0.0
chart.points_ref << 0.0
end
end
end
return chart
end
def generate_activity_chart(meta_activity_id)
chart_questions = SvtDeviationSpiderValue.find(:all,
:joins => ["JOIN svt_deviation_spider_deliverables ON svt_deviation_spider_deliverables.id = svt_deviation_spider_values.svt_deviation_spider_deliverable_id",
"JOIN svt_deviation_questions ON svt_deviation_questions.id = svt_deviation_spider_values.svt_deviation_question_id",
"JOIN svt_deviation_activities ON svt_deviation_activities.id = svt_deviation_questions.svt_deviation_activity_id"],
:conditions => ["svt_deviation_spider_deliverables.svt_deviation_spider_id = ? and svt_deviation_activities.svt_deviation_meta_activity_id = ? and svt_deviation_activities.is_active = ?", self.id, meta_activity_id, true],
:order => "svt_deviation_activities.id")
chart = Chart.new
if chart_questions
meta_activity_name = SvtDeviationMetaActivity.find(:first, :conditions => ["id = ?", meta_activity_id]).name
chart.titles = Array.new
chart.points = Array.new
chart.points_ref = Array.new
chart.meta_activity_name = meta_activity_name
current_activity = nil
current_yes_count = 0.0
current_question_count = 0.0
chart_questions.each do |question|
# Activities
if (current_activity == nil) or (current_activity.id != question.svt_deviation_question.svt_deviation_activity_id)
if current_activity != nil
chart.titles << current_activity.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
end
current_activity = question.svt_deviation_question.svt_deviation_activity
current_yes_count = 0.0
current_question_count = 0.0
end
# Question
if question.answer == true
current_yes_count = current_yes_count + 1.0
end
current_question_count = current_question_count + 1.0
end
if current_activity
chart.titles << current_activity.name
chart.points << (current_yes_count / current_question_count)
chart.points_ref << 1.0
if chart.titles.count <= 2
chart.titles << ""
chart.points << 0.0
chart.points_ref << 0.0
end
end
end
return chart
end
def generate_deliverable_charts
meta_activities = SvtDeviationMetaActivity.find(:all, :conditions=>["is_active = 1"])
charts = Array.new
meta_activities.each do |meta_activity|
charts << generate_deliverable_chart(meta_activity)
end
return charts
end
def generate_activity_charts
meta_activities = SvtDeviationMetaActivity.find(:all, :conditions=>["is_active = 1"])
charts = Array.new
meta_activities.each do |meta_activity|
charts << generate_activity_chart(meta_activity)
end
return charts
end
def generate_pie_chart
standard = customization = deviation = total_number = 0
setting_to_count_array = Array.new
reference = SvtDeviationSpiderReference.find(:last, :conditions=>["project_id = ?", self.milestone.project_id], :order=>"version_number")
if reference
settings = SvtDeviationSpiderSetting.find(:all, :conditions=>["svt_deviation_spider_reference_id = ?", reference])
if settings.count > 0
settings.each do |setting|
setting_to_count_array = update_setting_to_count_array(setting_to_count_array, setting)
end
setting_to_count_array.each do |setting_to_count|
if setting_to_count.setting.answer_1 == "Yes"
standard = standard + 1
elsif setting_to_count.setting.answer_1 == "No" and setting_to_count.setting.answer_2 == "No"
deviation = deviation + 1
else
customization = customization + 1
end
total_number = total_number + 1
end
if total_number == 0
total_number = 1
end
standard = standard.to_f / total_number.to_f * 100
customization = customization.to_f / total_number.to_f * 100
deviation = deviation.to_f / total_number.to_f * 100
else
standard = 100
end
else
standard = 100
end
lifecycle_name = Lifecycle.find(:first, :conditions=>["id = ?", milestone.project.lifecycle_id]).name
chart = GoogleChart::PieChart.new('500x220', "Result of "+ lifecycle_name +" adherence") do |pie_chart|
pie_chart.data "Forecast deviation " + deviation.round.to_s + "%", deviation, "0101DF"
pie_chart.data "Forecast customization " + customization.round.to_s + "%", customization, "5858FA"
pie_chart.data "Standard " + standard.round.to_s + "%", standard, "A9A9F5"
end
return chart
end
def generate_devia_pie_chart(consolidations)
standard = get_devia_standard(consolidations)
deviation = 100 - standard
lifecycle_name = Lifecycle.find(:first, :conditions=>["id = ?", self.milestone.project.lifecycle_id]).name
chart = GoogleChart::PieChart.new('500x220', "Result of PQP adherence") do |pie_chart|
pie_chart.data "Standard " + standard.round.to_s + "%", standard, "2E2EFE"
pie_chart.data "Deviation " + deviation.round.to_s + "%", deviation, "DF0101"
end
return chart
end
def get_devia_standard(consolidations)
standard_number = number_of_duplicate = number_of_deliverables = 0
duplicate_conso = duplicate_conso_not_set = duplicate_conso_set_no = Array.new
last_reference = SvtDeviationSpiderReference.find(:last, :conditions => ["project_id = ?", self.milestone.project_id], :order => "version_number asc")
consolidations.each do |conso|
duplication_to_delete = false
as_been_added = false
if conso.score == 3 or conso.score == 2
SvtDeviationSpiderSetting.find(:all, :conditions => ["svt_deviation_spider_reference_id = ? and deliverable_name = ?", last_reference, conso.deliverable.name]).each do |sett|
if sett.answer_1 == "Yes" or sett.answer_3 == "Another template is used"
if !duplicate_conso.include?(conso.deliverable.name)
standard_number = standard_number + 1
duplicate_conso.push(conso.deliverable.name)
as_been_added = true
else
duplication_to_delete = true
end
else
if !duplicate_conso_set_no.include?(conso.deliverable.name)
duplicate_conso_set_no.push(conso.deliverable.name)
else
duplication_to_delete = true
end
end
end
else
if !duplicate_conso_not_set.include?(conso.deliverable.name)
duplicate_conso_not_set.push(conso.deliverable.name)
else
duplication_to_delete = true
end
end
if duplication_to_delete and !as_been_added
number_of_duplicate = number_of_duplicate + 1
end
end
number_of_deliverables = consolidations.count - number_of_duplicate
standard = standard_number.to_f / number_of_deliverables.to_f * 100
return standard
end
def update_setting_to_count_array(setting_to_count_array, setting)
setting_to_count = Setting_to_chart.new
val = true
setting_to_count.setting = setting
setting_to_count.weight = get_weight(setting)
deliverable_name = setting.deliverable_name
setting_to_count_array.each do |sett|
if sett.setting.deliverable_name == deliverable_name
weight_from_duplicate = sett.weight
if setting_to_count.weight > sett.weight
setting_to_count_array.delete(sett)
else
val = false
end
end
end
if val
setting_to_count_array.push(setting_to_count)
end
return setting_to_count_array
end
def get_weight(setting)
weight = 0
if setting.answer_1 == "Yes"
weight = 4
elsif setting.answer_2 == "No"
weight = 1
elsif setting.answer_3 == "Another template is used"
weight = 3
else
weight = 2
end
return weight
end
def is_not_consolidated?
result = false
temp = SvtDeviationSpiderConsolidationTemp.find(:first, :conditions=>["svt_deviation_spider_id = ?", self.id])
if temp
result = true
end
end
def get_deviation_maturity
maturity = i = u = 0
SvtDeviationSpiderDeliverable.find(:all, :conditions=>["svt_deviation_spider_id = ?", self.id]).each do |deliverable|
SvtDeviationSpiderValue.find(:all, :conditions=>["svt_deviation_spider_deliverable_id = ?", deliverable.id]).each do |value|
i = i+1
if value.answer == true
u = u+1
end
end
end
maturity = u*100/i
return maturity
end
end
|
lock '3.2.1'
set :application, 'rails_survey'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt config/local_env.yml}
set :linked_dirs, %w(bin log tmp/pids tmp/cache tmp/sockets vendor/bundle)
set :linked_dirs, fetch(:linked_dirs) + %w{ files updates }
set :branch, 'master'
set :sidekiq_pid, File.join(shared_path, 'tmp', 'pids', 'sidekiq.pid')
set :sidekiq_log, File.join(shared_path, 'log', 'sidekiq.log')
set :sidekiq_concurrency, 25
set :sidekiq_processes, 2
#set :sidekiq_service_name, {:application}_{:rails_env}
namespace :deploy do
desc 'Restart Application'
task :restart do
desc "restart redis"
on roles(:app) do
execute "sudo /etc/init.d/redis-server restart"
end
desc "restart node"
on roles(:app), in: :sequence, wait: 5 do
execute "sudo restart realtime-app || sudo start realtime-app"
end
desc "restart phusion passenger"
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
task :npm_install do
on roles(:app) do
execute "cd #{release_path}/node && sudo rm -rf node_modules && npm install"
end
end
after :finishing, 'deploy:cleanup'
after 'deploy:publishing', 'deploy:restart'
after 'deploy:updated', 'deploy:npm_install'
#after 'sidekiq:start', 'load:defaults'
#after 'load:defaults', 'sidekiq:monit:config'
#after 'sidekiq:monit:config', 'sidekiq:monit:restart'
#after 'sidekiq:monit:restart', 'sidekiq:monit:monitor'
end
namespace :sidekiq do
desc "Restart sidekiq"
task :restart, :roles => :app, :on_no_matching_servers => :continue do
execute "sudo /usr/bin/monit restart sidekiq"
end
end
override sidekiq:restart
lock '3.2.1'
set :application, 'rails_survey'
set :scm, :git
set :repo_url, 'git@github.com:mnipper/rails_survey.git'
set :use_sudo, false
set :rails_env, 'production'
set :deploy_via, :copy
set :ssh_options, { :forward_agent => true, :port => 2222 }
set :pty, false
set :format, :pretty
set :keep_releases, 5
set :linked_files, %w{config/database.yml config/secret_token.txt config/local_env.yml}
set :linked_dirs, %w(bin log tmp/pids tmp/cache tmp/sockets vendor/bundle)
set :linked_dirs, fetch(:linked_dirs) + %w{ files updates }
set :branch, 'master'
set :sidekiq_pid, File.join(shared_path, 'tmp', 'pids', 'sidekiq.pid')
set :sidekiq_log, File.join(shared_path, 'log', 'sidekiq.log')
set :sidekiq_concurrency, 25
set :sidekiq_processes, 2
#set :sidekiq_service_name, {:application}_{:rails_env}
namespace :deploy do
desc 'Restart Application'
task :restart do
desc "restart redis"
on roles(:app) do
execute "sudo /etc/init.d/redis-server restart"
end
desc "restart node"
on roles(:app), in: :sequence, wait: 5 do
execute "sudo restart realtime-app || sudo start realtime-app"
end
desc "restart phusion passenger"
on roles(:app), in: :sequence, wait: 5 do
execute :touch, current_path.join('tmp/restart.txt')
end
end
task :npm_install do
on roles(:app) do
execute "cd #{release_path}/node && sudo rm -rf node_modules && npm install"
end
end
after :finishing, 'deploy:cleanup'
after 'deploy:publishing', 'deploy:restart'
after 'deploy:updated', 'deploy:npm_install'
#after 'sidekiq:start', 'load:defaults'
#after 'load:defaults', 'sidekiq:monit:config'
#after 'sidekiq:monit:config', 'sidekiq:monit:restart'
#after 'sidekiq:monit:restart', 'sidekiq:monit:monitor'
end
namespace :sidekiq do
desc "Restart sidekiq"
task :restart do
on roles(:app) do
execute "sudo /usr/bin/monit restart sidekiq"
end
end
end
|
require 'rack/protection'
require 'digest'
require 'logger'
require 'uri'
module Rack
module Protection
class Base
DEFAULT_OPTIONS = {
:reaction => :default_reaction, :logging => true,
:message => 'Forbidden', :encryptor => Digest::SHA1,
:session_key => 'rack.session', :status => 403,
:allow_empty_referrer => true
}
attr_reader :app, :options
def self.default_options(options)
define_method(:default_options) { super().merge(options) }
end
def self.default_reaction(reaction)
alias_method(:default_reaction, reaction)
end
def default_options
DEFAULT_OPTIONS
end
def initialize(app, options = {})
@app, @options = app, default_options.merge(options)
end
def safe?(env)
%w[GET HEAD OPTIONS TRACE].include? env['REQUEST_METHOD']
end
def accepts?(env)
raise NotImplementedError, "#{self.class} implementation pending"
end
def call(env)
unless accepts? env
warn env, "attack prevented by #{self.class}"
result = react env
end
result or app.call(env)
end
def react(env)
result = send(options[:reaction], env)
result if Array === result and result.size == 3
end
def warn(env, message)
return unless options[:logging]
l = options[:logger] || env['rack.logger'] || ::Logger.new(env['rack.errors'])
l.warn(message)
end
def deny(env)
[options[:status], {'Content-Type' => 'text/plain'}, [options[:message]]]
end
def session?(env)
env.include? options[:session_key]
end
def session(env)
return env[options[:session_key]] if session? env
fail "you need to set up a session middleware *before* #{self.class}"
end
def drop_session(env)
session(env).clear if session? env
end
def referrer(env)
ref = env['HTTP_REFERER'].to_s
return if !options[:allow_empty_referrer] and ref.empty?
URI.parse(ref).host || Request.new(env).host
end
def random_string(secure = defined? SecureRandom)
secure ? SecureRandom.hex(32) : "%032x" % rand(2**128-1)
rescue NotImpelentedError
random_string false
end
def encrypt(value)
options[:encryptor].hexdigest value.to_s
end
alias default_reaction deny
end
end
end
NotImpelentedError typo fix
require 'rack/protection'
require 'digest'
require 'logger'
require 'uri'
module Rack
module Protection
class Base
DEFAULT_OPTIONS = {
:reaction => :default_reaction, :logging => true,
:message => 'Forbidden', :encryptor => Digest::SHA1,
:session_key => 'rack.session', :status => 403,
:allow_empty_referrer => true
}
attr_reader :app, :options
def self.default_options(options)
define_method(:default_options) { super().merge(options) }
end
def self.default_reaction(reaction)
alias_method(:default_reaction, reaction)
end
def default_options
DEFAULT_OPTIONS
end
def initialize(app, options = {})
@app, @options = app, default_options.merge(options)
end
def safe?(env)
%w[GET HEAD OPTIONS TRACE].include? env['REQUEST_METHOD']
end
def accepts?(env)
raise NotImplementedError, "#{self.class} implementation pending"
end
def call(env)
unless accepts? env
warn env, "attack prevented by #{self.class}"
result = react env
end
result or app.call(env)
end
def react(env)
result = send(options[:reaction], env)
result if Array === result and result.size == 3
end
def warn(env, message)
return unless options[:logging]
l = options[:logger] || env['rack.logger'] || ::Logger.new(env['rack.errors'])
l.warn(message)
end
def deny(env)
[options[:status], {'Content-Type' => 'text/plain'}, [options[:message]]]
end
def session?(env)
env.include? options[:session_key]
end
def session(env)
return env[options[:session_key]] if session? env
fail "you need to set up a session middleware *before* #{self.class}"
end
def drop_session(env)
session(env).clear if session? env
end
def referrer(env)
ref = env['HTTP_REFERER'].to_s
return if !options[:allow_empty_referrer] and ref.empty?
URI.parse(ref).host || Request.new(env).host
end
def random_string(secure = defined? SecureRandom)
secure ? SecureRandom.hex(32) : "%032x" % rand(2**128-1)
rescue NotImplementedError
random_string false
end
def encrypt(value)
options[:encryptor].hexdigest value.to_s
end
alias default_reaction deny
end
end
end
|
class Mruby < Formula
desc "Lightweight implementation of the Ruby language"
homepage "https://mruby.org/"
url "https://github.com/mruby/mruby/archive/2.1.2.tar.gz"
sha256 "4dc0017e36d15e81dc85953afb2a643ba2571574748db0d8ede002cefbba053b"
license "MIT"
head "https://github.com/mruby/mruby.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e46379c5d7600c8783732c5a4a93b9c3ce26370865e34f5b1f4c6459d2e8d94e"
sha256 cellar: :any_skip_relocation, big_sur: "e1d5fd18c921cf19742a2c484f472d04b41fe84297c6c193ffc932cbc6c375df"
sha256 cellar: :any_skip_relocation, catalina: "b6a638c04a991a249a737d0ad0d9f7fac31d35a7b2fd3c8507304e68f13bc983"
sha256 cellar: :any_skip_relocation, mojave: "1f31eadc8801f65d42e2cfec43cda336324daf86978529dfc76338e6b094b16c"
sha256 cellar: :any_skip_relocation, high_sierra: "5b5dca177d9fdd6a2b543c9aeb2117e0d112d1578fadbb709d8565d83b21d6a7"
end
depends_on "bison" => :build
uses_from_macos "ruby"
def install
system "make"
cd "build/host/" do
lib.install Dir["lib/*.a"]
prefix.install %w[bin mrbgems mrblib]
end
prefix.install "include"
end
test do
system "#{bin}/mruby", "-e", "true"
end
end
mruby: build with full-core
class Mruby < Formula
desc "Lightweight implementation of the Ruby language"
homepage "https://mruby.org/"
url "https://github.com/mruby/mruby/archive/2.1.2.tar.gz"
sha256 "4dc0017e36d15e81dc85953afb2a643ba2571574748db0d8ede002cefbba053b"
license "MIT"
revision 1
head "https://github.com/mruby/mruby.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e46379c5d7600c8783732c5a4a93b9c3ce26370865e34f5b1f4c6459d2e8d94e"
sha256 cellar: :any_skip_relocation, big_sur: "e1d5fd18c921cf19742a2c484f472d04b41fe84297c6c193ffc932cbc6c375df"
sha256 cellar: :any_skip_relocation, catalina: "b6a638c04a991a249a737d0ad0d9f7fac31d35a7b2fd3c8507304e68f13bc983"
sha256 cellar: :any_skip_relocation, mojave: "1f31eadc8801f65d42e2cfec43cda336324daf86978529dfc76338e6b094b16c"
sha256 cellar: :any_skip_relocation, high_sierra: "5b5dca177d9fdd6a2b543c9aeb2117e0d112d1578fadbb709d8565d83b21d6a7"
end
depends_on "bison" => :build
uses_from_macos "ruby"
def install
inreplace "build_config.rb", /default/, "full-core"
system "make"
cd "build/host/" do
lib.install Dir["lib/*.a"]
prefix.install %w[bin mrbgems mrblib]
end
prefix.install "include"
end
test do
system "#{bin}/mruby", "-e", "true"
end
end
|
module Util
class UserDbManager < DbManager
def self.change_password(user, pwd)
new.change_password(user, pwd)
end
def create_user_account(user)
begin
return false if !can_create_user_account?(user)
pub_con.execute("create user \"#{user.username}\" password '#{user.password}';")
pub_con.execute("alter user #{user.username} nologin;") # can't login until they confirm their email
return true
rescue => e
user.errors.add(:base, e.message)
return false
end
end
def can_create_user_account?(user)
return false if user_account_exists?(user.username)
return false if !public_db_accessible?
return true
end
def user_account_exists?(username)
return true if username == 'postgres'
return true if username == 'ctti'
pub_con.execute("SELECT usename FROM pg_catalog.pg_user where usename = '#{username}' UNION
SELECT groname FROM pg_catalog.pg_group where groname = '#{username}'").count > 0
end
def remove_user(username)
begin
return false if !user_account_exists?(username)
revoke_db_privs(username)
pub_con.execute("reassign owned by #{username} to postgres;")
pub_con.execute("drop owned by #{username};")
pub_con.execute("drop user #{username};")
return true
rescue => e
raise e
end
end
def change_password(user,pwd)
begin
pub_con.execute("alter user \"#{user.username}\" password '#{pwd}';")
rescue => e
user.errors.add(:base, e.message)
end
end
def backup_user_info
fm=Util::FileManager.new
file_prefix="#{fm.backup_directory}/#{Time.zone.now.strftime('%Y%m%d')}"
table_file_name="#{file_prefix}_aact_users_table.sql"
event_file_name="#{file_prefix}_aact_user_events.sql"
account_file_name="#{file_prefix}_aact_user_accounts.sql"
File.delete(table_file_name) if File.exist?(table_file_name)
File.delete(event_file_name) if File.exist?(event_file_name)
File.delete(account_file_name) if File.exist?(account_file_name)
log "dumping Users table..."
cmd="pg_dump --no-owner --host=localhost -U #{ENV['DB_SUPER_USERNAME']} --table=Users --data-only aact_admin > #{table_file_name}"
run_command_line(cmd)
log "dumping User events..."
cmd="pg_dump --no-owner --host=localhost -U #{ENV['DB_SUPER_USERNAME']} --table=User_Events --data-only aact_admin > #{event_file_name}"
run_command_line(cmd)
log "dumping User accounts..."
cmd="/opt/rh/rh-postgresql96/root/bin/pg_dumpall -U #{ENV['DB_SUPER_USERNAME']} -h #{public_host_name} --globals-only > #{account_file_name}"
run_command_line(cmd)
event=Admin::UserEvent.new({:event_type=>'backup', :file_names=>" #{table_file_name}, #{event_file_name}, #{account_file_name}" })
UserMailer.report_backup(event).deliver_now
end
def grant_db_privs(username)
pub_con.execute("alter role \"#{username}\" IN DATABASE aact set search_path = ctgov;")
pub_con.execute("grant connect on database aact to \"#{username}\";")
pub_con.execute("grant usage on schema ctgov TO \"#{username}\";")
pub_con.execute("grant select on all tables in schema ctgov to \"#{username}\";")
pub_con.execute("alter user \"#{username}\" login;")
end
def revoke_db_privs(username)
terminate_sessions_for(username)
pub_con.execute("alter user #{username} nologin;")
end
def terminate_sessions_for(username)
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
end
end
Remove deliver_now
module Util
class UserDbManager < DbManager
def self.change_password(user, pwd)
new.change_password(user, pwd)
end
def create_user_account(user)
begin
return false if !can_create_user_account?(user)
pub_con.execute("create user \"#{user.username}\" password '#{user.password}';")
pub_con.execute("alter user #{user.username} nologin;") # can't login until they confirm their email
return true
rescue => e
user.errors.add(:base, e.message)
return false
end
end
def can_create_user_account?(user)
return false if user_account_exists?(user.username)
return false if !public_db_accessible?
return true
end
def user_account_exists?(username)
return true if username == 'postgres'
return true if username == 'ctti'
pub_con.execute("SELECT usename FROM pg_catalog.pg_user where usename = '#{username}' UNION
SELECT groname FROM pg_catalog.pg_group where groname = '#{username}'").count > 0
end
def remove_user(username)
begin
return false if !user_account_exists?(username)
revoke_db_privs(username)
pub_con.execute("reassign owned by #{username} to postgres;")
pub_con.execute("drop owned by #{username};")
pub_con.execute("drop user #{username};")
return true
rescue => e
raise e
end
end
def change_password(user,pwd)
begin
pub_con.execute("alter user \"#{user.username}\" password '#{pwd}';")
rescue => e
user.errors.add(:base, e.message)
end
end
def backup_user_info
fm=Util::FileManager.new
file_prefix="#{fm.backup_directory}/#{Time.zone.now.strftime('%Y%m%d')}"
table_file_name="#{file_prefix}_aact_users_table.sql"
event_file_name="#{file_prefix}_aact_user_events.sql"
account_file_name="#{file_prefix}_aact_user_accounts.sql"
File.delete(table_file_name) if File.exist?(table_file_name)
File.delete(event_file_name) if File.exist?(event_file_name)
File.delete(account_file_name) if File.exist?(account_file_name)
log "dumping Users table..."
cmd="pg_dump --no-owner --host=localhost -U #{ENV['DB_SUPER_USERNAME']} --table=Users --data-only aact_admin > #{table_file_name}"
run_command_line(cmd)
log "dumping User events..."
cmd="pg_dump --no-owner --host=localhost -U #{ENV['DB_SUPER_USERNAME']} --table=User_Events --data-only aact_admin > #{event_file_name}"
run_command_line(cmd)
log "dumping User accounts..."
cmd="/opt/rh/rh-postgresql96/root/bin/pg_dumpall -U #{ENV['DB_SUPER_USERNAME']} -h #{public_host_name} --globals-only > #{account_file_name}"
run_command_line(cmd)
event=Admin::UserEvent.new({:event_type=>'backup', :file_names=>" #{table_file_name}, #{event_file_name}, #{account_file_name}" })
UserMailer.report_backup(event)
end
def grant_db_privs(username)
pub_con.execute("alter role \"#{username}\" IN DATABASE aact set search_path = ctgov;")
pub_con.execute("grant connect on database aact to \"#{username}\";")
pub_con.execute("grant usage on schema ctgov TO \"#{username}\";")
pub_con.execute("grant select on all tables in schema ctgov to \"#{username}\";")
pub_con.execute("alter user \"#{username}\" login;")
end
def revoke_db_privs(username)
terminate_sessions_for(username)
pub_con.execute("alter user #{username} nologin;")
end
def terminate_sessions_for(username)
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
end
end
|
require 'thinking_sphinx/deploy/capistrano'
default_run_options[:pty] = true # Must be set for the password prompt from git to work
set :application, "kassi"
set :repository, "git://github.com/sizzlelab/kassi.git"
set :user, "kassi" # The server's user for deploys
ssh_options[:forward_agent] = true
set :scm, :git
set :deploy_via, :remote_cache
set :deploy_to, "/var/datat/kassi"
if ENV['DEPLOY_ENV'] == "beta"
set :server_name, "beta"
set :host, "beta.sizl.org"
set :branch, ENV['BRANCH'] || "production"
elsif ENV['DEPLOY_ENV'] == "icsi"
set :deploy_to, "/opt/kassi"
set :server_name, "icsi"
set :host, "sizl.icsi.berkeley.edu"
set :user, "amvirola"
set :branch, ENV['BRANCH'] || "production"
elsif ENV['DEPLOY_ENV'] == "delta"
set :server_name, "alpha"
set :host, "alpha.sizl.org"
set :branch, ENV['BRANCH'] || "production"
set :deploy_to, "/var/datat/deltakassi"
else
set :server_name, "alpha"
set :host, "alpha.sizl.org"
set :branch, ENV['BRANCH'] || "master"
end
set :path, "$PATH:/var/lib/gems/1.8/bin"
role :app, host
role :web, host
role :db, host, :primary => true
set :rails_env, :production
set :use_sudo, false
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
task :preparations do
#run "killall mongrel_rails" rescue nil
#run "killall searchd" rescue nil
end
task :symlinks_to_shared_path do
run "rm -rf #{release_path}/public/images/listing_images"
run "rm -rf #{release_path}/tmp/performance"
run "ln -fs #{shared_path}/listing_images/ #{release_path}/public/images/listing_images"
run "ln -fs #{shared_path}/performance/ #{release_path}/tmp/performance"
run "ln -nfs #{shared_path}/system/database.yml #{release_path}/config/database.yml"
run "ln -nfs #{shared_path}/system/session_secret #{release_path}/config/session_secret"
run "ln -nfs #{shared_path}/system/config.yml #{release_path}/config/config.yml"
run "ln -nfs #{shared_path}/system/gmaps_api_key.yml #{release_path}/config/gmaps_api_key.yml"
run "ln -nfs #{shared_path}/db/sphinx #{release_path}/db/sphinx"
run "ln -nfs #{shared_path}/vendor_bundle #{release_path}/vendor/bundle"
if ENV['DEPLOY_ENV'] == "dbtest"
run "ln -nfs #{shared_path}/system/sphinx.yml #{release_path}/config/sphinx.yml"
end
end
desc "Run the bundle install on the server"
task :bundle do
run "cd #{release_path} && RAILS_ENV=#{rails_env} bundle install --deployment --without test"
end
task :finalize do
#whenever.write_crontab
end
end
before "cold" do
preparations
end
after %w(deploy:migrations deploy:cold deploy:start ) do
deploy.finalize
end
after "deploy:update_code" do
deploy.symlinks_to_shared_path
deploy.bundle
thinking_sphinx.rebuild
whenever.update_crontab
end
after "deploy:setup" do
thinking_sphinx.shared_sphinx_folder
thinking_sphinx.configure
thinking_sphinx.index
thinking_sphinx.start
end
require 'config/boot'
require 'hoptoad_notifier/capistrano'
add "dbtest" option back to deploy.rb
require 'thinking_sphinx/deploy/capistrano'
default_run_options[:pty] = true # Must be set for the password prompt from git to work
set :application, "kassi"
set :repository, "git://github.com/sizzlelab/kassi.git"
set :user, "kassi" # The server's user for deploys
ssh_options[:forward_agent] = true
set :scm, :git
set :deploy_via, :remote_cache
set :deploy_to, "/var/datat/kassi"
if ENV['DEPLOY_ENV'] == "beta"
set :server_name, "beta"
set :host, "beta.sizl.org"
set :branch, ENV['BRANCH'] || "production"
elsif ENV['DEPLOY_ENV'] == "icsi"
set :deploy_to, "/opt/kassi"
set :server_name, "icsi"
set :host, "sizl.icsi.berkeley.edu"
set :user, "amvirola"
set :branch, ENV['BRANCH'] || "production"
elsif ENV['DEPLOY_ENV'] == "delta"
set :server_name, "alpha"
set :host, "alpha.sizl.org"
set :branch, ENV['BRANCH'] || "production"
set :deploy_to, "/var/datat/deltakassi"
elsif ENV['DEPLOY_ENV'] == "dbtest"
set :deploy_to, "/var/datat/kassi2dbtest"
set :server_name, "alpha"
set :host, "alpha.sizl.org"
set :branch, ENV['BRANCH'] || "master"
else
set :server_name, "alpha"
set :host, "alpha.sizl.org"
set :branch, ENV['BRANCH'] || "master"
end
set :path, "$PATH:/var/lib/gems/1.8/bin"
role :app, host
role :web, host
role :db, host, :primary => true
set :rails_env, :production
set :use_sudo, false
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
task :preparations do
#run "killall mongrel_rails" rescue nil
#run "killall searchd" rescue nil
end
task :symlinks_to_shared_path do
run "rm -rf #{release_path}/public/images/listing_images"
run "rm -rf #{release_path}/tmp/performance"
run "ln -fs #{shared_path}/listing_images/ #{release_path}/public/images/listing_images"
run "ln -fs #{shared_path}/performance/ #{release_path}/tmp/performance"
run "ln -nfs #{shared_path}/system/database.yml #{release_path}/config/database.yml"
run "ln -nfs #{shared_path}/system/session_secret #{release_path}/config/session_secret"
run "ln -nfs #{shared_path}/system/config.yml #{release_path}/config/config.yml"
run "ln -nfs #{shared_path}/system/gmaps_api_key.yml #{release_path}/config/gmaps_api_key.yml"
run "ln -nfs #{shared_path}/db/sphinx #{release_path}/db/sphinx"
run "ln -nfs #{shared_path}/vendor_bundle #{release_path}/vendor/bundle"
if ENV['DEPLOY_ENV'] == "dbtest"
run "ln -nfs #{shared_path}/system/sphinx.yml #{release_path}/config/sphinx.yml"
end
end
desc "Run the bundle install on the server"
task :bundle do
run "cd #{release_path} && RAILS_ENV=#{rails_env} bundle install --deployment --without test"
end
task :finalize do
#whenever.write_crontab
end
end
before "cold" do
preparations
end
after %w(deploy:migrations deploy:cold deploy:start ) do
deploy.finalize
end
after "deploy:update_code" do
deploy.symlinks_to_shared_path
deploy.bundle
thinking_sphinx.rebuild
whenever.update_crontab
end
after "deploy:setup" do
thinking_sphinx.shared_sphinx_folder
thinking_sphinx.configure
thinking_sphinx.index
thinking_sphinx.start
end
require 'config/boot'
require 'hoptoad_notifier/capistrano'
|
#########################################################
# Julien Fouilhé : julien.fouilhe@mobile-devices.fr
# Mobile Devices 2014
#########################################################
module RagentApi
module CollectionDefinitionMapping
def self.invalid_map
@mapping_collection_definition_number = nil
end
def self.fetch_default_map
@default_track_field_info ||= begin
path = File.expand_path("..", __FILE__)
CC.logger.info("fetch_default_map fetched")
JSON.parse(File.read("#{path}/default_collection_definitions_info.json"))
end
end
def self.fetch_map(account)
@mapping_collection_definitions_number ||= { 'tests' => self.fetch_default_map }
if !(@mapping_collection_definitions_number.has_key?(account))
CC.logger.info("Collection definitions fetch_map #{account}")
ret = CC::RagentHttpApiV3.request_http_cloud_api(account, '/collection_definitions.json')
if ret != nil
CC.logger.info("Collection definitions fetch_map success for account #{account} = #{ret}")
@mapping_collection_definitions_number[account] = ret
else
raise "Account '#{account}' not available."
end
end
@mapping_collection_definitions_number[account]
end
# collections definitions look like :
# {
# "name": "Trips example",
# "type": "tracks",
# "collect": [ "track", "message" ],
# "start_conditions": {
# "DIO_IGNITION": true
# },
# "stop_conditions": {
# "DIO_IGNITION": false
# },
# "assets": [ "FAKE_IMEI" ], # if assets is empty, it means it should match every asset
# }
# returns a collection definitions structs array
def self.get_all(account)
if RAGENT.running_env_name == 'sdk-vm'
account = 'default'
end
return self.fetch_map(account)
end
end
end
Change to default account
#########################################################
# Julien Fouilhé : julien.fouilhe@mobile-devices.fr
# Mobile Devices 2014
#########################################################
module RagentApi
module CollectionDefinitionMapping
def self.invalid_map
@mapping_collection_definition_number = nil
end
def self.fetch_default_map
@default_track_field_info ||= begin
path = File.expand_path("..", __FILE__)
CC.logger.info("fetch_default_map fetched")
JSON.parse(File.read("#{path}/default_collection_definitions_info.json"))
end
end
def self.fetch_map(account)
@mapping_collection_definitions_number ||= { 'default' => self.fetch_default_map }
if !(@mapping_collection_definitions_number.has_key?(account))
CC.logger.info("Collection definitions fetch_map #{account}")
ret = CC::RagentHttpApiV3.request_http_cloud_api(account, '/collection_definitions.json')
if ret != nil
CC.logger.info("Collection definitions fetch_map success for account #{account} = #{ret}")
@mapping_collection_definitions_number[account] = ret
else
raise "Account '#{account}' not available."
end
end
@mapping_collection_definitions_number[account]
end
# collections definitions look like :
# {
# "name": "Trips example",
# "type": "tracks",
# "collect": [ "track", "message" ],
# "start_conditions": {
# "DIO_IGNITION": true
# },
# "stop_conditions": {
# "DIO_IGNITION": false
# },
# "assets": [ "FAKE_IMEI" ], # if assets is empty, it means it should match every asset
# }
# returns a collection definitions structs array
def self.get_all(account)
if RAGENT.running_env_name == 'sdk-vm'
account = 'default'
end
return self.fetch_map(account)
end
end
end
|
class Mruby < Formula
desc "Lightweight implementation of the Ruby language"
homepage "https://mruby.org/"
url "https://github.com/mruby/mruby/archive/2.0.0.tar.gz"
sha256 "fa495898d51130c69480a13e90df5dc18cb1a9d9a31836268a895989d902048f"
head "https://github.com/mruby/mruby.git"
bottle do
cellar :any_skip_relocation
sha256 "3a1dcc1710f107e993a38b3262c7235f71cf12a80ca7a5dd0cd3c579430b7a20" => :mojave
sha256 "235c0c374c2dce600e00ac2b95fa7575a541a725f0c55e06bd3f7577b5309ed8" => :high_sierra
sha256 "5d41f43e7524997f9bcba2ca181dc838e47543941fc44ec686460ef7c675754c" => :sierra
sha256 "27ac59355313fec745b4537112e49162decc0782f206a1256d36a49764ee58a5" => :x86_64_linux
end
depends_on "bison" => :build
depends_on "ruby" unless OS.mac?
def install
system "make"
cd "build/host/" do
lib.install Dir["lib/*.a"]
prefix.install %w[bin mrbgems mrblib]
end
prefix.install "include"
end
test do
system "#{bin}/mruby", "-e", "true"
end
end
mruby: bump revision for readline
Closes Linuxbrew/homebrew-core#11223.
Signed-off-by: Michka Popoff <7b0496f66f66ee22a38826c310c38b415671b832@gmail.com>
class Mruby < Formula
desc "Lightweight implementation of the Ruby language"
homepage "https://mruby.org/"
url "https://github.com/mruby/mruby/archive/2.0.0.tar.gz"
sha256 "fa495898d51130c69480a13e90df5dc18cb1a9d9a31836268a895989d902048f"
head "https://github.com/mruby/mruby.git"
revision 1 unless OS.mac?
bottle do
cellar :any_skip_relocation
sha256 "3a1dcc1710f107e993a38b3262c7235f71cf12a80ca7a5dd0cd3c579430b7a20" => :mojave
sha256 "235c0c374c2dce600e00ac2b95fa7575a541a725f0c55e06bd3f7577b5309ed8" => :high_sierra
sha256 "5d41f43e7524997f9bcba2ca181dc838e47543941fc44ec686460ef7c675754c" => :sierra
end
depends_on "bison" => :build
depends_on "ruby" unless OS.mac?
def install
system "make"
cd "build/host/" do
lib.install Dir["lib/*.a"]
prefix.install %w[bin mrbgems mrblib]
end
prefix.install "include"
end
test do
system "#{bin}/mruby", "-e", "true"
end
end
|
module Util
class UserDbManager < DbManager
def self.change_password(user, pwd)
new.change_password(user, pwd)
end
def create_user_account(user)
begin
return false if !can_create_user_account?(user)
#user.skip_password_validation=true # don't validate that user entered current password. already validated
#pub_con = PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
pub_con.execute("create user \"#{user.username}\" password '#{user.password}';")
pub_con.execute("revoke connect on database aact from #{user.username};")
#pub_con.disconnect!
#@pub_con=nil
return true
rescue => e
user.errors.add(:base, e.message)
return false
end
end
def can_create_user_account?(user)
if user_account_exists?(user.username)
user.errors.add(:Username, "Database account already exists for username '#{user.username}'")
return false
else
return true
end
end
def user_account_exists?(username)
res=pub_con.execute("SELECT * FROM pg_catalog.pg_user where usename = '#{username}'").count > 0
end
def remove_user(username)
begin
return false if !user_account_exists?(username)
revoke_db_privs(username)
pub_con.execute("drop user #{username};")
return true
rescue => e
raise e
end
end
def change_password(user,pwd)
puts "=========== about to set password to #{pwd} ===================================================="
puts self.inspect
puts "==============================================================="
begin
pub_con.execute("alter user \"#{user.username}\" password '#{pwd}';")
rescue => e
user.errors.add(:base, e.message)
end
end
def grant_db_privs(username)
pub_con.execute("grant connect on database aact to \"#{username}\";")
pub_con.execute("grant usage on schema ctgov TO \"#{username}\";")
pub_con.execute("grant select on all tables in schema ctgov to \"#{username}\";")
end
def revoke_db_privs(username)
pub_con = PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
pub_con.execute("reassign owned by #{username} to postgres;")
pub_con.execute("drop owned by #{username};")
pub_con.execute("revoke all on schema ctgov from #{username};")
pub_con.execute("revoke connect on database #{public_db_name} from #{username};")
pub_con.disconnect!
@pub_con=nil
end
def terminate_sessions_for(user)
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{user.username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
end
end
remove puts
module Util
class UserDbManager < DbManager
def self.change_password(user, pwd)
new.change_password(user, pwd)
end
def create_user_account(user)
begin
return false if !can_create_user_account?(user)
#user.skip_password_validation=true # don't validate that user entered current password. already validated
#pub_con = PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
pub_con.execute("create user \"#{user.username}\" password '#{user.password}';")
pub_con.execute("revoke connect on database aact from #{user.username};")
#pub_con.disconnect!
#@pub_con=nil
return true
rescue => e
user.errors.add(:base, e.message)
return false
end
end
def can_create_user_account?(user)
if user_account_exists?(user.username)
user.errors.add(:Username, "Database account already exists for username '#{user.username}'")
return false
else
return true
end
end
def user_account_exists?(username)
res=pub_con.execute("SELECT * FROM pg_catalog.pg_user where usename = '#{username}'").count > 0
end
def remove_user(username)
begin
return false if !user_account_exists?(username)
revoke_db_privs(username)
pub_con.execute("drop user #{username};")
return true
rescue => e
raise e
end
end
def change_password(user,pwd)
begin
pub_con.execute("alter user \"#{user.username}\" password '#{pwd}';")
rescue => e
user.errors.add(:base, e.message)
end
end
def grant_db_privs(username)
pub_con.execute("grant connect on database aact to \"#{username}\";")
pub_con.execute("grant usage on schema ctgov TO \"#{username}\";")
pub_con.execute("grant select on all tables in schema ctgov to \"#{username}\";")
end
def revoke_db_privs(username)
pub_con = PublicBase.establish_connection(ENV["AACT_PUBLIC_DATABASE_URL"]).connection
pub_con.execute("reassign owned by #{username} to postgres;")
pub_con.execute("drop owned by #{username};")
pub_con.execute("revoke all on schema ctgov from #{username};")
pub_con.execute("revoke connect on database #{public_db_name} from #{username};")
pub_con.disconnect!
@pub_con=nil
end
def terminate_sessions_for(user)
con.select_all("select * from pg_stat_activity order by pid;").each { |session|
if session['usename']=="#{user.username}"
con.execute("select pg_terminate_backend(#{session['pid']})")
end
}
end
end
end
|
lock '3.2.1'
set :domain, 'bitaculous.com'
set :application, 'locoroco'
set :linked_dirs, %w{log public/system public/sites tmp/cache tmp/pids tmp/sockets}
set :scm, :git
set :repo_url, 'https://github.com/bitaculous/locoroco.git'
# set :rvm_type, :system
# set :rvm_ruby_string, 'ruby-2.1.2@bitaculous-locoroco'
# set :rvm_path, '/usr/local/rvm'
set :bundle_flags, '--deployment --quiet'
set :bundle_without, 'development test deployment misc'
set :pty, true
# Set formatter (`:black_hole`, `:dot`, `:pretty` or `:simple_text`)
set :format, :pretty
# Set log level (`:debug`, `:error` or `:info`)
set :log_level, :info
after 'deploy:publishing', 'deploy:restart'
namespace :deploy do
task :restart do
invoke 'unicorn:restart'
end
end
[➠] Changed capistrano log level to `:error`.
lock '3.2.1'
set :domain, 'bitaculous.com'
set :application, 'locoroco'
set :linked_dirs, %w{log public/system public/sites tmp/cache tmp/pids tmp/sockets}
set :scm, :git
set :repo_url, 'https://github.com/bitaculous/locoroco.git'
# set :rvm_type, :system
# set :rvm_ruby_string, 'ruby-2.1.2@bitaculous-locoroco'
# set :rvm_path, '/usr/local/rvm'
set :bundle_flags, '--deployment --quiet'
set :bundle_without, 'development test deployment misc'
set :pty, true
# Set formatter (`:black_hole`, `:dot`, `:pretty` or `:simple_text`)
set :format, :pretty
# Set log level (`:debug`, `:error` or `:info`)
set :log_level, :error
after 'deploy:publishing', 'deploy:restart'
namespace :deploy do
task :restart do
invoke 'unicorn:restart'
end
end |
namespace :doc do
desc "Generate documentation for the application. Set custom template with TEMPLATE=/path/to/rdoc/template.rb or title with TITLE=\"Custom Title\""
Rake::RDocTask.new("app") { |rdoc|
rdoc.rdoc_dir = 'doc/app'
rdoc.template = ENV['template'] if ENV['template']
rdoc.title = ENV['title'] || "Rails Application Documentation"
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.options << '--charset' << 'utf-8'
rdoc.rdoc_files.include('doc/README_FOR_APP')
rdoc.rdoc_files.include('app/**/*.rb')
rdoc.rdoc_files.include('lib/**/*.rb')
}
desc "Generate documentation for the Rails framework"
Rake::RDocTask.new("rails") { |rdoc|
rdoc.rdoc_dir = 'doc/api'
rdoc.template = "#{ENV['template']}.rb" if ENV['template']
rdoc.title = "Rails Framework Documentation"
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.rdoc_files.include('README')
rdoc.rdoc_files.include('vendor/rails/railties/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/railties/MIT-LICENSE')
rdoc.rdoc_files.include('vendor/rails/railties/README')
rdoc.rdoc_files.include('vendor/rails/railties/lib/{*.rb,commands/*.rb,generators/*.rb}')
rdoc.rdoc_files.include('vendor/rails/activerecord/README')
rdoc.rdoc_files.include('vendor/rails/activerecord/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activerecord/lib/active_record/**/*.rb')
rdoc.rdoc_files.exclude('vendor/rails/activerecord/lib/active_record/vendor/*')
rdoc.rdoc_files.include('vendor/rails/activeresource/README')
rdoc.rdoc_files.include('vendor/rails/activeresource/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activeresource/lib/active_resource.rb')
rdoc.rdoc_files.include('vendor/rails/activeresource/lib/active_resource/*')
rdoc.rdoc_files.include('vendor/rails/actionpack/README')
rdoc.rdoc_files.include('vendor/rails/actionpack/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/actionpack/lib/action_controller/**/*.rb')
rdoc.rdoc_files.include('vendor/rails/actionpack/lib/action_view/**/*.rb')
rdoc.rdoc_files.include('vendor/rails/actionmailer/README')
rdoc.rdoc_files.include('vendor/rails/actionmailer/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/actionmailer/lib/action_mailer/base.rb')
rdoc.rdoc_files.include('vendor/rails/activesupport/README')
rdoc.rdoc_files.include('vendor/rails/activesupport/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activesupport/lib/active_support/**/*.rb')
}
plugins = FileList['vendor/plugins/**'].collect { |plugin| File.basename(plugin) }
desc "Generate documentation for all installed plugins"
task :plugins => plugins.collect { |plugin| "doc:plugins:#{plugin}" }
desc "Remove plugin documentation"
task :clobber_plugins do
rm_rf 'doc/plugins' rescue nil
end
desc "Generate Rails guides"
task :guides do
require File.join(RAILTIES_PATH, "guides/rails_guides")
RailsGuides::Generator.new(File.join(RAILS_ROOT, "doc/guides")).generate
end
namespace :plugins do
# Define doc tasks for each plugin
plugins.each do |plugin|
desc "Generate documentation for the #{plugin} plugin"
task(plugin => :environment) do
plugin_base = "vendor/plugins/#{plugin}"
options = []
files = Rake::FileList.new
options << "-o doc/plugins/#{plugin}"
options << "--title '#{plugin.titlecase} Plugin Documentation'"
options << '--line-numbers' << '--inline-source'
options << '--charset' << 'utf-8'
options << '-T html'
files.include("#{plugin_base}/lib/**/*.rb")
if File.exist?("#{plugin_base}/README")
files.include("#{plugin_base}/README")
options << "--main '#{plugin_base}/README'"
end
files.include("#{plugin_base}/CHANGELOG") if File.exist?("#{plugin_base}/CHANGELOG")
options << files.to_s
sh %(rdoc #{options * ' '})
end
end
end
end
Fix RAILS_ROOT deprecation on guides generation. [#3710 status:resolved]
Signed-off-by: José Valim <0c2436ea76ed86e37cc05f66dea18d48ef390882@gmail.com>
namespace :doc do
desc "Generate documentation for the application. Set custom template with TEMPLATE=/path/to/rdoc/template.rb or title with TITLE=\"Custom Title\""
Rake::RDocTask.new("app") { |rdoc|
rdoc.rdoc_dir = 'doc/app'
rdoc.template = ENV['template'] if ENV['template']
rdoc.title = ENV['title'] || "Rails Application Documentation"
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.options << '--charset' << 'utf-8'
rdoc.rdoc_files.include('doc/README_FOR_APP')
rdoc.rdoc_files.include('app/**/*.rb')
rdoc.rdoc_files.include('lib/**/*.rb')
}
desc "Generate documentation for the Rails framework"
Rake::RDocTask.new("rails") { |rdoc|
rdoc.rdoc_dir = 'doc/api'
rdoc.template = "#{ENV['template']}.rb" if ENV['template']
rdoc.title = "Rails Framework Documentation"
rdoc.options << '--line-numbers' << '--inline-source'
rdoc.rdoc_files.include('README')
rdoc.rdoc_files.include('vendor/rails/railties/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/railties/MIT-LICENSE')
rdoc.rdoc_files.include('vendor/rails/railties/README')
rdoc.rdoc_files.include('vendor/rails/railties/lib/{*.rb,commands/*.rb,generators/*.rb}')
rdoc.rdoc_files.include('vendor/rails/activerecord/README')
rdoc.rdoc_files.include('vendor/rails/activerecord/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activerecord/lib/active_record/**/*.rb')
rdoc.rdoc_files.exclude('vendor/rails/activerecord/lib/active_record/vendor/*')
rdoc.rdoc_files.include('vendor/rails/activeresource/README')
rdoc.rdoc_files.include('vendor/rails/activeresource/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activeresource/lib/active_resource.rb')
rdoc.rdoc_files.include('vendor/rails/activeresource/lib/active_resource/*')
rdoc.rdoc_files.include('vendor/rails/actionpack/README')
rdoc.rdoc_files.include('vendor/rails/actionpack/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/actionpack/lib/action_controller/**/*.rb')
rdoc.rdoc_files.include('vendor/rails/actionpack/lib/action_view/**/*.rb')
rdoc.rdoc_files.include('vendor/rails/actionmailer/README')
rdoc.rdoc_files.include('vendor/rails/actionmailer/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/actionmailer/lib/action_mailer/base.rb')
rdoc.rdoc_files.include('vendor/rails/activesupport/README')
rdoc.rdoc_files.include('vendor/rails/activesupport/CHANGELOG')
rdoc.rdoc_files.include('vendor/rails/activesupport/lib/active_support/**/*.rb')
}
plugins = FileList['vendor/plugins/**'].collect { |plugin| File.basename(plugin) }
desc "Generate documentation for all installed plugins"
task :plugins => plugins.collect { |plugin| "doc:plugins:#{plugin}" }
desc "Remove plugin documentation"
task :clobber_plugins do
rm_rf 'doc/plugins' rescue nil
end
desc "Generate Rails guides"
task :guides do
require File.join(RAILTIES_PATH, "guides/rails_guides")
RailsGuides::Generator.new(Rails.root.join("doc/guides")).generate
end
namespace :plugins do
# Define doc tasks for each plugin
plugins.each do |plugin|
desc "Generate documentation for the #{plugin} plugin"
task(plugin => :environment) do
plugin_base = "vendor/plugins/#{plugin}"
options = []
files = Rake::FileList.new
options << "-o doc/plugins/#{plugin}"
options << "--title '#{plugin.titlecase} Plugin Documentation'"
options << '--line-numbers' << '--inline-source'
options << '--charset' << 'utf-8'
options << '-T html'
files.include("#{plugin_base}/lib/**/*.rb")
if File.exist?("#{plugin_base}/README")
files.include("#{plugin_base}/README")
options << "--main '#{plugin_base}/README'"
end
files.include("#{plugin_base}/CHANGELOG") if File.exist?("#{plugin_base}/CHANGELOG")
options << files.to_s
sh %(rdoc #{options * ' '})
end
end
end
end
|
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "https://marlam.de/msmtp/"
url "https://marlam.de/msmtp/releases/msmtp-1.8.20.tar.xz"
sha256 "d93ae2aafc0f48af7dc9d0b394df1bb800588b8b4e8d096d8b3cf225344eb111"
license "GPL-3.0-or-later"
livecheck do
url "https://marlam.de/msmtp/download/"
regex(/href=.*?msmtp[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_monterey: "8508ac9dfbeac3274e8b90b458ce4d23e763d69dcda5c433f95a26c2850549d0"
sha256 arm64_big_sur: "b5dcd7d18d087c04bde608df5229416315c15a7b48f2551a20ae1bf443b0936d"
sha256 monterey: "a61abd779581e23ffee661d226448a5897e16d1ba1b7cbdaec926d7711127e9a"
sha256 big_sur: "f354e83b318837c07c0dddf7f194f3b7b777017616bc7ebce5a79bb037163c8b"
sha256 catalina: "34bc2d711bcf14a0f42d2fd9a5500b9fa3e662ea0387de45d3dd1907638e1e73"
sha256 x86_64_linux: "758636ba630b46c2edc955438a6828ececd7b2ce79d3960cc9467d80aa7859f5"
end
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "gnutls"
depends_on "libidn2"
def install
system "./configure", *std_configure_args, "--disable-silent-rules", "--with-macosx-keyring"
system "make", "install"
(pkgshare/"scripts").install "scripts/msmtpq"
end
test do
system bin/"msmtp", "--help"
end
end
msmtp 1.8.22
Closes #107550.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "https://marlam.de/msmtp/"
url "https://marlam.de/msmtp/releases/msmtp-1.8.22.tar.xz"
sha256 "1b04206286a5b82622335e4eb09e17074368b7288e53d134543cbbc6b79ea3e7"
license "GPL-3.0-or-later"
livecheck do
url "https://marlam.de/msmtp/download/"
regex(/href=.*?msmtp[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_monterey: "8508ac9dfbeac3274e8b90b458ce4d23e763d69dcda5c433f95a26c2850549d0"
sha256 arm64_big_sur: "b5dcd7d18d087c04bde608df5229416315c15a7b48f2551a20ae1bf443b0936d"
sha256 monterey: "a61abd779581e23ffee661d226448a5897e16d1ba1b7cbdaec926d7711127e9a"
sha256 big_sur: "f354e83b318837c07c0dddf7f194f3b7b777017616bc7ebce5a79bb037163c8b"
sha256 catalina: "34bc2d711bcf14a0f42d2fd9a5500b9fa3e662ea0387de45d3dd1907638e1e73"
sha256 x86_64_linux: "758636ba630b46c2edc955438a6828ececd7b2ce79d3960cc9467d80aa7859f5"
end
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "gnutls"
depends_on "libidn2"
def install
system "./configure", *std_configure_args, "--disable-silent-rules", "--with-macosx-keyring"
system "make", "install"
(pkgshare/"scripts").install "scripts/msmtpq"
end
test do
system bin/"msmtp", "--help"
end
end
|
module XmlSourceProcessor
@text_dirty = false
@translation_dirty = false
#@fields = false
def source_text=(text)
@text_dirty = true
super
end
def source_translation=(translation)
@translation_dirty = true
super
end
def validate_source
if self.source_text.blank?
return
end
validate_links(self.source_text)
end
def validate_source_translation
if self.source_translation.blank?
return
end
validate_links(self.source_translation)
end
#check the text for problems or typos with the subject links
def validate_links(text)
# split on all begin-braces
tags = text.split('[[')
# remove the initial string which occurs before the first tag
debug("validate_source: tags to process are #{tags.inspect}")
tags = tags - [tags[0]]
debug("validate_source: massaged tags to process are #{tags.inspect}")
for tag in tags
debug(tag)
if tag.include?(']]]')
errors.add(:base, "Subject Linking Error: Tags should be created using 2 brackets, not 3")
return
end
unless tag.include?(']]')
tag = tag.strip
errors.add(:base, "Subject Linking Error: Wrong number of closing braces after \"[[#{tag}\"")
end
# just pull the pieces between the braces
inner_tag = tag.split(']]')[0]
if inner_tag =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank tag in \"[[#{tag}\"")
end
#check for unclosed single bracket
if inner_tag.include?('[')
unless inner_tag.include?(']')
errors.add(:base, "Subject Linking Error: Unclosed bracket within \"#{inner_tag}\"")
end
end
# check for blank title or display name with pipes
if inner_tag.include?("|")
tag_parts = inner_tag.split('|')
debug("validate_source: inner tag parts are #{tag_parts.inspect}")
if tag_parts[0] =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank subject in \"[[#{inner_tag}]]\"")
end
if tag_parts[1] =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank text in \"[[#{inner_tag}]]\"")
end
end
end
# return errors.size > 0
end
##############################################
# All code to convert transcriptions from source
# format to canonical xml format belongs here.
#
#
##############################################
def process_source
if @text_dirty
self.xml_text = wiki_to_xml(self, Page::TEXT_TYPE::TRANSCRIPTION)
end
if @translation_dirty
self.xml_translation = wiki_to_xml(self, Page::TEXT_TYPE::TRANSLATION)
end
end
def wiki_to_xml(page, text_type)
subjects_disabled = page.collection.subjects_disabled
source_text = case text_type
when Page::TEXT_TYPE::TRANSCRIPTION
page.source_text
when Page::TEXT_TYPE::TRANSLATION
page.source_translation
else
""
end
xml_string = String.new(source_text)
xml_string = process_latex_snippets(xml_string)
xml_string = clean_bad_braces(xml_string)
xml_string = process_square_braces(xml_string) unless subjects_disabled
xml_string = process_linewise_markup(xml_string)
xml_string = process_line_breaks(xml_string)
xml_string = valid_xml_from_source(xml_string)
xml_string = update_links_and_xml(xml_string, false, text_type)
postprocess_sections
xml_string
end
BAD_SHIFT_REGEX = /\[\[([[[:alpha:]][[:blank:]]|,\(\)\-[[:digit:]]]+)\}\}/
def clean_bad_braces(text)
text.gsub BAD_SHIFT_REGEX, "[[\\1]]"
end
BRACE_REGEX = /\[\[.*?\]\]/m
def process_square_braces(text)
# find all the links
wikilinks = text.scan(BRACE_REGEX)
wikilinks.each do |wikilink_contents|
# strip braces
munged = wikilink_contents.sub('[[','')
munged = munged.sub(']]','')
# extract the title and display
if munged.include? '|'
parts = munged.split '|'
title = parts[0]
verbatim = parts[1]
else
title = munged
verbatim = munged
end
title = canonicalize_title(title)
replacement = "<link target_title=\"#{title}\">#{verbatim}</link>"
text.sub!(wikilink_contents, replacement)
end
text
end
def remove_square_braces(text)
new_text = text.scan(BRACE_REGEX)
new_text.each do |results|
changed = results
#remove title
if results.include?('|')
changed = results.sub(/\[\[.*?\|/, '')
end
changed = changed.sub('[[', '')
changed = changed.sub(']]', '')
text.sub!(results, changed)
end
text
end
LATEX_SNIPPET = /(\{\{tex:?(.*?):?tex\}\})/m
def process_latex_snippets(text)
return text unless self.respond_to? :tex_figures
replacements = {}
figures = self.tex_figures.to_a
text.scan(LATEX_SNIPPET).each_with_index do |pair, i|
with_tags = pair[0]
contents = pair[1]
replacements[with_tags] = "<texFigure position=\"#{i+1}\"/>" # position attribute in acts as list starts with 1
figure = figures[i] || TexFigure.new
figure.source = contents unless figure.source == contents
figures[i] = figure
end
self.tex_figures = figures
replacements.each_pair do |s,r|
text.sub!(s,r)
end
text
end
HEADER = /\s\|\s/
SEPARATOR = /---.*\|/
ROW = HEADER
def process_linewise_markup(text)
@tables = []
@sections = []
new_lines = []
current_table = nil
text.lines.each do |line|
# first deal with any sections
line = process_any_sections(line)
# look for a header
if !current_table
if line.match(HEADER)
line.chomp
current_table = { :header => [], :rows => [], :section => @sections.last }
# fill the header
cells = line.split(/\s*\|\s*/)
cells.shift if line.match(/^\|/) # remove leading pipe
current_table[:header] = cells.map{ |cell_title| cell_title.sub(/^!\s*/,'') }
heading = cells.map do |cell|
if cell.match(/^!/)
"<th class=\"bang\">#{cell.sub(/^!\s*/,'')}</th>"
else
"<th>#{cell}</th>"
end
end.join(" ")
new_lines << "<table class=\"tabular\">\n<thead>\n<tr>#{heading}</tr></thead>"
else
# no current table, no table contents -- NO-OP
new_lines << line
end
else
#this is either an end or a separator
if line.match(SEPARATOR)
# NO-OP
elsif line.match(ROW)
# remove leading and trailing delimiters
clean_line=line.chomp.sub(/^\s*\|/, '').sub(/\|\s*$/,'')
# fill the row
cells = clean_line.split(/\s*\|\s*/,-1) # -1 means "don't prune empty values at the end"
current_table[:rows] << cells
rowline = ""
cells.each_with_index do |cell, i|
head = current_table[:header][i]
role_string = " role=\"#{head}\""
rowline += "<td>#{cell}</td> "
end
if current_table[:rows].size == 1
new_lines << "<tbody>"
end
new_lines << "<tr>#{rowline}</tr>"
else
# finished the last row
if current_table[:rows].size > 0 # only process tables with bodies
@tables << current_table
new_lines << "</tbody>"
end
new_lines << "</table>"
current_table = nil
end
end
end
if current_table
# unclosed table
@tables << current_table
if current_table[:rows].size > 0 # only process tables with bodies
@tables << current_table
new_lines << "</tbody>"
end
new_lines << "</table>"
end
# do something with the table data
new_lines.join(" ")
end
def process_any_sections(line)
6.downto(2) do |depth|
line.scan(/(={#{depth}}(.+)={#{depth}})/).each do |wiki_title|
verbatim = XmlSourceProcessor.cell_to_plaintext(wiki_title.last)
line = line.sub(wiki_title.first, "<entryHeading title=\"#{verbatim}\" depth=\"#{depth}\" >#{wiki_title.last}</entryHeading>")
@sections << Section.new(:title => wiki_title.last, :depth => depth)
end
end
line
end
def postprocess_sections
@sections.each do |section|
doc = XmlSourceProcessor.cell_to_xml(section.title)
doc.elements.each("//link") do |e|
title = e.attributes['target_title']
article = collection.articles.where(:title => title).first
if article
e.add_attribute('target_id', article.id.to_s)
end
end
section.title = XmlSourceProcessor.xml_to_cell(doc)
end
end
def canonicalize_title(title)
# kill all tags
title = title.gsub(/<.*?>/, '')
# linebreaks -> spaces
title = title.gsub(/\n/, ' ')
# multiple spaces -> single spaces
title = title.gsub(/\s+/, ' ')
# change double quotes to proper xml
title = title.gsub(/\"/, '"')
title
end
# transformations converting source mode transcription to xml
def process_line_breaks(text)
text="<p>#{text}</p>"
text = text.gsub(/\s*\n\s*\n\s*/, "</p><p>")
text = text.gsub(/-\r\n\s*/, '<lb break="no" />')
text = text.gsub(/\r\n\s*/, "<lb/>")
text = text.gsub(/-\n\s*/, '<lb break="no" />')
text = text.gsub(/\n\s*/, "<lb/>")
text = text.gsub(/-\r\s*/, '<lb break="no" />')
text = text.gsub(/\r\s*/, "<lb/>")
return text
end
def valid_xml_from_source(source)
source = source || ""
safe = source.gsub /\&/, '&'
safe.gsub! /\&amp;/, '&'
string = <<EOF
<?xml version="1.0" encoding="UTF-8"?>
<page>
#{safe}
</page>
EOF
end
def update_links_and_xml(xml_string, preview_mode=false, text_type)
# first clear out the existing links
clear_links(text_type)
processed = ""
# process it
doc = REXML::Document.new xml_string
doc.elements.each("//link") do |element|
# default the title to the text if it's not specified
if !(title=element.attributes['target_title'])
title = element.text
end
#display_text = element.text
display_text = ""
element.children.each do |e|
display_text += e.to_s
end
debug("link display_text = #{display_text}")
#change the xml version of quotes back to double quotes for article title
title = title.gsub('"', '"')
# create new blank articles if they don't exist already
if !(article = collection.articles.where(:title => title).first)
article = Article.new
article.title = title
article.collection = collection
article.save! unless preview_mode
end
link_id = create_link(article, display_text, text_type) unless preview_mode
# now update the attribute
link_element = REXML::Element.new("link")
element.children.each { |c| link_element.add(c) }
link_element.add_attribute('target_title', title)
debug("element="+link_element.inspect)
debug("article="+article.inspect)
link_element.add_attribute('target_id', article.id.to_s) unless preview_mode
link_element.add_attribute('link_id', link_id.to_s) unless preview_mode
element.replace_with(link_element)
end
doc.write(processed)
return processed
end
CELL_PREFIX = "<?xml version='1.0' encoding='UTF-8'?><cell>"
CELL_SUFFIX = '</cell>'
def self.cell_to_xml(cell)
REXML::Document.new(CELL_PREFIX + cell.gsub('&','&') + CELL_SUFFIX)
end
def self.xml_to_cell(doc)
text = ""
doc.write(text)
text.sub(CELL_PREFIX,'').sub(CELL_SUFFIX,'')
end
def self.cell_to_plaintext(cell)
# binding.pry if cell.content =~ /Brimstone/
doc = cell_to_xml(cell)
doc.each_element('.//text()') { |e| p e.text }.join
end
def self.cell_to_subject(cell)
doc = cell_to_xml(cell)
subjects = ""
doc.elements.each("//link") do |e|
title = e.attributes['target_title']
subjects << title
subjects << "\n"
end
subjects
end
def self.cell_to_category(cell)
doc = cell_to_xml(cell)
categories = ""
doc.elements.each("//link") do |e|
id = e.attributes['target_id']
if id
article = Article.find(id)
article.categories.each do |category|
categories << category.title
categories << "\n"
end
end
end
categories
end
##############################################
# Code to rename links within the text.
# This assumes that the name change has already
# taken place within the article table in the DB
##############################################
def rename_article_links(old_title, new_title)
title_regex = old_title.gsub(/\s+/, '\s+')
self.source_text = rename_link_in_text(source_text, title_regex, new_title)
# Articles don't have translations, but we still need to update pages.source_translation
if has_attribute?(:source_translation) && !source_translation.nil?
self.source_translation = rename_link_in_text(source_translation, title_regex, new_title)
end
end
def rename_link_in_text(text, title_regex, new_title)
# handle links of the format [[Old Title|Display Text]]
text = text.gsub(/\[\[#{title_regex}\|/, "[[#{new_title}|")
# handle links of the format [[Old Title]]
text = text.gsub(/\[\[(#{title_regex})\]\]/, "[[#{new_title}|\\1]]")
text
end
def debug(msg)
logger.debug("DEBUG: #{msg}")
end
end
don't regenerate links in preview
module XmlSourceProcessor
@text_dirty = false
@translation_dirty = false
#@fields = false
def source_text=(text)
@text_dirty = true
super
end
def source_translation=(translation)
@translation_dirty = true
super
end
def validate_source
if self.source_text.blank?
return
end
validate_links(self.source_text)
end
def validate_source_translation
if self.source_translation.blank?
return
end
validate_links(self.source_translation)
end
#check the text for problems or typos with the subject links
def validate_links(text)
# split on all begin-braces
tags = text.split('[[')
# remove the initial string which occurs before the first tag
debug("validate_source: tags to process are #{tags.inspect}")
tags = tags - [tags[0]]
debug("validate_source: massaged tags to process are #{tags.inspect}")
for tag in tags
debug(tag)
if tag.include?(']]]')
errors.add(:base, "Subject Linking Error: Tags should be created using 2 brackets, not 3")
return
end
unless tag.include?(']]')
tag = tag.strip
errors.add(:base, "Subject Linking Error: Wrong number of closing braces after \"[[#{tag}\"")
end
# just pull the pieces between the braces
inner_tag = tag.split(']]')[0]
if inner_tag =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank tag in \"[[#{tag}\"")
end
#check for unclosed single bracket
if inner_tag.include?('[')
unless inner_tag.include?(']')
errors.add(:base, "Subject Linking Error: Unclosed bracket within \"#{inner_tag}\"")
end
end
# check for blank title or display name with pipes
if inner_tag.include?("|")
tag_parts = inner_tag.split('|')
debug("validate_source: inner tag parts are #{tag_parts.inspect}")
if tag_parts[0] =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank subject in \"[[#{inner_tag}]]\"")
end
if tag_parts[1] =~ /^\s*$/
errors.add(:base, "Subject Linking Error: Blank text in \"[[#{inner_tag}]]\"")
end
end
end
# return errors.size > 0
end
##############################################
# All code to convert transcriptions from source
# format to canonical xml format belongs here.
#
#
##############################################
def process_source
if @text_dirty
self.xml_text = wiki_to_xml(self, Page::TEXT_TYPE::TRANSCRIPTION)
end
if @translation_dirty
self.xml_translation = wiki_to_xml(self, Page::TEXT_TYPE::TRANSLATION)
end
end
def wiki_to_xml(page, text_type)
subjects_disabled = page.collection.subjects_disabled
source_text = case text_type
when Page::TEXT_TYPE::TRANSCRIPTION
page.source_text
when Page::TEXT_TYPE::TRANSLATION
page.source_translation
else
""
end
xml_string = String.new(source_text)
xml_string = process_latex_snippets(xml_string)
xml_string = clean_bad_braces(xml_string)
xml_string = process_square_braces(xml_string) unless subjects_disabled
xml_string = process_linewise_markup(xml_string)
xml_string = process_line_breaks(xml_string)
xml_string = valid_xml_from_source(xml_string)
xml_string = update_links_and_xml(xml_string, false, text_type)
postprocess_sections
xml_string
end
BAD_SHIFT_REGEX = /\[\[([[[:alpha:]][[:blank:]]|,\(\)\-[[:digit:]]]+)\}\}/
def clean_bad_braces(text)
text.gsub BAD_SHIFT_REGEX, "[[\\1]]"
end
BRACE_REGEX = /\[\[.*?\]\]/m
def process_square_braces(text)
# find all the links
wikilinks = text.scan(BRACE_REGEX)
wikilinks.each do |wikilink_contents|
# strip braces
munged = wikilink_contents.sub('[[','')
munged = munged.sub(']]','')
# extract the title and display
if munged.include? '|'
parts = munged.split '|'
title = parts[0]
verbatim = parts[1]
else
title = munged
verbatim = munged
end
title = canonicalize_title(title)
replacement = "<link target_title=\"#{title}\">#{verbatim}</link>"
text.sub!(wikilink_contents, replacement)
end
text
end
def remove_square_braces(text)
new_text = text.scan(BRACE_REGEX)
new_text.each do |results|
changed = results
#remove title
if results.include?('|')
changed = results.sub(/\[\[.*?\|/, '')
end
changed = changed.sub('[[', '')
changed = changed.sub(']]', '')
text.sub!(results, changed)
end
text
end
LATEX_SNIPPET = /(\{\{tex:?(.*?):?tex\}\})/m
def process_latex_snippets(text)
return text unless self.respond_to? :tex_figures
replacements = {}
figures = self.tex_figures.to_a
text.scan(LATEX_SNIPPET).each_with_index do |pair, i|
with_tags = pair[0]
contents = pair[1]
replacements[with_tags] = "<texFigure position=\"#{i+1}\"/>" # position attribute in acts as list starts with 1
figure = figures[i] || TexFigure.new
figure.source = contents unless figure.source == contents
figures[i] = figure
end
self.tex_figures = figures
replacements.each_pair do |s,r|
text.sub!(s,r)
end
text
end
HEADER = /\s\|\s/
SEPARATOR = /---.*\|/
ROW = HEADER
def process_linewise_markup(text)
@tables = []
@sections = []
new_lines = []
current_table = nil
text.lines.each do |line|
# first deal with any sections
line = process_any_sections(line)
# look for a header
if !current_table
if line.match(HEADER)
line.chomp
current_table = { :header => [], :rows => [], :section => @sections.last }
# fill the header
cells = line.split(/\s*\|\s*/)
cells.shift if line.match(/^\|/) # remove leading pipe
current_table[:header] = cells.map{ |cell_title| cell_title.sub(/^!\s*/,'') }
heading = cells.map do |cell|
if cell.match(/^!/)
"<th class=\"bang\">#{cell.sub(/^!\s*/,'')}</th>"
else
"<th>#{cell}</th>"
end
end.join(" ")
new_lines << "<table class=\"tabular\">\n<thead>\n<tr>#{heading}</tr></thead>"
else
# no current table, no table contents -- NO-OP
new_lines << line
end
else
#this is either an end or a separator
if line.match(SEPARATOR)
# NO-OP
elsif line.match(ROW)
# remove leading and trailing delimiters
clean_line=line.chomp.sub(/^\s*\|/, '').sub(/\|\s*$/,'')
# fill the row
cells = clean_line.split(/\s*\|\s*/,-1) # -1 means "don't prune empty values at the end"
current_table[:rows] << cells
rowline = ""
cells.each_with_index do |cell, i|
head = current_table[:header][i]
role_string = " role=\"#{head}\""
rowline += "<td>#{cell}</td> "
end
if current_table[:rows].size == 1
new_lines << "<tbody>"
end
new_lines << "<tr>#{rowline}</tr>"
else
# finished the last row
if current_table[:rows].size > 0 # only process tables with bodies
@tables << current_table
new_lines << "</tbody>"
end
new_lines << "</table>"
current_table = nil
end
end
end
if current_table
# unclosed table
@tables << current_table
if current_table[:rows].size > 0 # only process tables with bodies
@tables << current_table
new_lines << "</tbody>"
end
new_lines << "</table>"
end
# do something with the table data
new_lines.join(" ")
end
def process_any_sections(line)
6.downto(2) do |depth|
line.scan(/(={#{depth}}(.+)={#{depth}})/).each do |wiki_title|
verbatim = XmlSourceProcessor.cell_to_plaintext(wiki_title.last)
line = line.sub(wiki_title.first, "<entryHeading title=\"#{verbatim}\" depth=\"#{depth}\" >#{wiki_title.last}</entryHeading>")
@sections << Section.new(:title => wiki_title.last, :depth => depth)
end
end
line
end
def postprocess_sections
@sections.each do |section|
doc = XmlSourceProcessor.cell_to_xml(section.title)
doc.elements.each("//link") do |e|
title = e.attributes['target_title']
article = collection.articles.where(:title => title).first
if article
e.add_attribute('target_id', article.id.to_s)
end
end
section.title = XmlSourceProcessor.xml_to_cell(doc)
end
end
def canonicalize_title(title)
# kill all tags
title = title.gsub(/<.*?>/, '')
# linebreaks -> spaces
title = title.gsub(/\n/, ' ')
# multiple spaces -> single spaces
title = title.gsub(/\s+/, ' ')
# change double quotes to proper xml
title = title.gsub(/\"/, '"')
title
end
# transformations converting source mode transcription to xml
def process_line_breaks(text)
text="<p>#{text}</p>"
text = text.gsub(/\s*\n\s*\n\s*/, "</p><p>")
text = text.gsub(/-\r\n\s*/, '<lb break="no" />')
text = text.gsub(/\r\n\s*/, "<lb/>")
text = text.gsub(/-\n\s*/, '<lb break="no" />')
text = text.gsub(/\n\s*/, "<lb/>")
text = text.gsub(/-\r\s*/, '<lb break="no" />')
text = text.gsub(/\r\s*/, "<lb/>")
return text
end
def valid_xml_from_source(source)
source = source || ""
safe = source.gsub /\&/, '&'
safe.gsub! /\&amp;/, '&'
string = <<EOF
<?xml version="1.0" encoding="UTF-8"?>
<page>
#{safe}
</page>
EOF
end
def update_links_and_xml(xml_string, preview_mode=false, text_type)
# first clear out the existing links
clear_links(text_type) unless preview_mode
processed = ""
# process it
doc = REXML::Document.new xml_string
doc.elements.each("//link") do |element|
# default the title to the text if it's not specified
if !(title=element.attributes['target_title'])
title = element.text
end
#display_text = element.text
display_text = ""
element.children.each do |e|
display_text += e.to_s
end
debug("link display_text = #{display_text}")
#change the xml version of quotes back to double quotes for article title
title = title.gsub('"', '"')
# create new blank articles if they don't exist already
if !(article = collection.articles.where(:title => title).first)
article = Article.new
article.title = title
article.collection = collection
article.save! unless preview_mode
end
link_id = create_link(article, display_text, text_type) unless preview_mode
# now update the attribute
link_element = REXML::Element.new("link")
element.children.each { |c| link_element.add(c) }
link_element.add_attribute('target_title', title)
debug("element="+link_element.inspect)
debug("article="+article.inspect)
link_element.add_attribute('target_id', article.id.to_s) unless preview_mode
link_element.add_attribute('link_id', link_id.to_s) unless preview_mode
element.replace_with(link_element)
end
doc.write(processed)
return processed
end
CELL_PREFIX = "<?xml version='1.0' encoding='UTF-8'?><cell>"
CELL_SUFFIX = '</cell>'
def self.cell_to_xml(cell)
REXML::Document.new(CELL_PREFIX + cell.gsub('&','&') + CELL_SUFFIX)
end
def self.xml_to_cell(doc)
text = ""
doc.write(text)
text.sub(CELL_PREFIX,'').sub(CELL_SUFFIX,'')
end
def self.cell_to_plaintext(cell)
# binding.pry if cell.content =~ /Brimstone/
doc = cell_to_xml(cell)
doc.each_element('.//text()') { |e| p e.text }.join
end
def self.cell_to_subject(cell)
doc = cell_to_xml(cell)
subjects = ""
doc.elements.each("//link") do |e|
title = e.attributes['target_title']
subjects << title
subjects << "\n"
end
subjects
end
def self.cell_to_category(cell)
doc = cell_to_xml(cell)
categories = ""
doc.elements.each("//link") do |e|
id = e.attributes['target_id']
if id
article = Article.find(id)
article.categories.each do |category|
categories << category.title
categories << "\n"
end
end
end
categories
end
##############################################
# Code to rename links within the text.
# This assumes that the name change has already
# taken place within the article table in the DB
##############################################
def rename_article_links(old_title, new_title)
title_regex = old_title.gsub(/\s+/, '\s+')
self.source_text = rename_link_in_text(source_text, title_regex, new_title)
# Articles don't have translations, but we still need to update pages.source_translation
if has_attribute?(:source_translation) && !source_translation.nil?
self.source_translation = rename_link_in_text(source_translation, title_regex, new_title)
end
end
def rename_link_in_text(text, title_regex, new_title)
# handle links of the format [[Old Title|Display Text]]
text = text.gsub(/\[\[#{title_regex}\|/, "[[#{new_title}|")
# handle links of the format [[Old Title]]
text = text.gsub(/\[\[(#{title_regex})\]\]/, "[[#{new_title}|\\1]]")
text
end
def debug(msg)
logger.debug("DEBUG: #{msg}")
end
end
|
require 'thinking_sphinx/deploy/capistrano'
default_run_options[:pty] = true # Must be set for the password prompt from git to work
set :application, "kassi2"
set :repository, "git://github.com/sizzlelab/kassi.git"
set :user, "kassi" # The server's user for deploys
ssh_options[:forward_agent] = true
set :scm, :git
set :branch, "kassi2"
set :deploy_via, :remote_cache
set :deploy_to, "/var/datat/kassi2"
#set :host, "alpha.sizl.org"
if ENV['DEPLOY_ENV']
set :server_name, ENV['DEPLOY_ENV']
set :host, "#{ENV['DEPLOY_ENV']}.sizl.org"
else
set :server_name, "alpha"
set :host, "alpha.sizl.org"
end
# mongrel_cluster_size = {
# "alpha" => 2,
# "beta" => 3,
# "localhost" => 1
# }
#set :mongrel_cluster_size, mongrel_cluster_size[server_name]
set :mongrel_conf, "#{shared_path}/system/mongrel_cluster.yml"
set :rails_env, :production
set :path, "$PATH:/var/lib/gems/1.8/bin"
role :app, host
role :web, host
role :db, host, :primary => true
set :use_sudo, false
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
# namespace :deploy do
# task :start do ; end
# task :stop do ; end
# task :restart, :roles => :app, :except => { :no_release => true } do
# run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
# end
# end
namespace :deploy do
task :preparations do
run "killall mongrel_rails" rescue nil
run "killall searchd" rescue nil
end
# task :before_start do
# mongrel.configure
# end
task :symlinks_to_shared_path do
run "rm -rf #{release_path}/public/images/listing_images"
run "rm -rf #{release_path}/tmp/performance"
run "ln -fs #{shared_path}/listing_images/ #{release_path}/public/images/listing_images"
run "ln -fs #{shared_path}/performance/ #{release_path}/tmp/performance"
run "ln -nfs #{shared_path}/system/database.yml #{release_path}/config/database.yml"
run "ln -nfs #{shared_path}/system/session_secret #{release_path}/config/session_secret"
run "ln -nfs #{shared_path}/system/config.yml #{release_path}/config/config.yml"
run "ln -nfs #{shared_path}/system/gmaps_api_key.yml #{release_path}/config/gmaps_api_key.yml"
run "ln -nfs #{shared_path}/db/sphinx #{release_path}/db/sphinx"
end
desc "Run the bundle install on the server"
task :bundle do
run "cd #{release_path} && RAILS_ENV=#{rails_env} bundle install #{shared_path}/gems/cache --without test"
end
desc "Modified restart task to work with mongrel cluster"
task :restart, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::restart -C
# #{shared_path}/system/mongrel_cluster.yml"
deploy.stop
deploy.start
end
desc "Modified start task to work with mongrel cluster"
task :start, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::start -C
# #{shared_path}/system/mongrel_cluster.yml"
run "cd #{deploy_to}/current && rails server -p 3500 -e production -d"
end
desc "Modified stop task to work with mongrel cluster"
task :stop, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::stop -C
# #{shared_path}/system/mongrel_cluster.yml"
run "cd #{current_path} && mongrel_rails stop -p tmp/pids/server.pid" rescue nil
end
task :finalize do
#whenever.write_crontab
#apache.restart
run "sudo /etc/init.d/apache2 restart"
end
end
before "cold" do
preparations
end
after %w(deploy deploy:migrations deploy:cold deploy:start ) do
deploy.finalize
end
after "deploy:update_code" do
deploy.symlinks_to_shared_path
deploy.bundle
thinking_sphinx.rebuild
whenever.update_crontab
end
after "deploy:setup" do
thinking_sphinx.shared_sphinx_folder
thinking_sphinx.configure
thinking_sphinx.index
thinking_sphinx.start
end
removed whenever.update_crontab also fron deploy.rb
require 'thinking_sphinx/deploy/capistrano'
default_run_options[:pty] = true # Must be set for the password prompt from git to work
set :application, "kassi2"
set :repository, "git://github.com/sizzlelab/kassi.git"
set :user, "kassi" # The server's user for deploys
ssh_options[:forward_agent] = true
set :scm, :git
set :branch, "kassi2"
set :deploy_via, :remote_cache
set :deploy_to, "/var/datat/kassi2"
#set :host, "alpha.sizl.org"
if ENV['DEPLOY_ENV']
set :server_name, ENV['DEPLOY_ENV']
set :host, "#{ENV['DEPLOY_ENV']}.sizl.org"
else
set :server_name, "alpha"
set :host, "alpha.sizl.org"
end
# mongrel_cluster_size = {
# "alpha" => 2,
# "beta" => 3,
# "localhost" => 1
# }
#set :mongrel_cluster_size, mongrel_cluster_size[server_name]
set :mongrel_conf, "#{shared_path}/system/mongrel_cluster.yml"
set :rails_env, :production
set :path, "$PATH:/var/lib/gems/1.8/bin"
role :app, host
role :web, host
role :db, host, :primary => true
set :use_sudo, false
# If you are using Passenger mod_rails uncomment this:
# if you're still using the script/reapear helper you will need
# these http://github.com/rails/irs_process_scripts
# namespace :deploy do
# task :start do ; end
# task :stop do ; end
# task :restart, :roles => :app, :except => { :no_release => true } do
# run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
# end
# end
namespace :deploy do
task :preparations do
run "killall mongrel_rails" rescue nil
run "killall searchd" rescue nil
end
# task :before_start do
# mongrel.configure
# end
task :symlinks_to_shared_path do
run "rm -rf #{release_path}/public/images/listing_images"
run "rm -rf #{release_path}/tmp/performance"
run "ln -fs #{shared_path}/listing_images/ #{release_path}/public/images/listing_images"
run "ln -fs #{shared_path}/performance/ #{release_path}/tmp/performance"
run "ln -nfs #{shared_path}/system/database.yml #{release_path}/config/database.yml"
run "ln -nfs #{shared_path}/system/session_secret #{release_path}/config/session_secret"
run "ln -nfs #{shared_path}/system/config.yml #{release_path}/config/config.yml"
run "ln -nfs #{shared_path}/system/gmaps_api_key.yml #{release_path}/config/gmaps_api_key.yml"
run "ln -nfs #{shared_path}/db/sphinx #{release_path}/db/sphinx"
end
desc "Run the bundle install on the server"
task :bundle do
run "cd #{release_path} && RAILS_ENV=#{rails_env} bundle install #{shared_path}/gems/cache --without test"
end
desc "Modified restart task to work with mongrel cluster"
task :restart, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::restart -C
# #{shared_path}/system/mongrel_cluster.yml"
deploy.stop
deploy.start
end
desc "Modified start task to work with mongrel cluster"
task :start, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::start -C
# #{shared_path}/system/mongrel_cluster.yml"
run "cd #{deploy_to}/current && rails server -p 3500 -e production -d"
end
desc "Modified stop task to work with mongrel cluster"
task :stop, :roles => :app do
# run "cd #{deploy_to}/current && mongrel_rails cluster::stop -C
# #{shared_path}/system/mongrel_cluster.yml"
run "cd #{current_path} && mongrel_rails stop -p tmp/pids/server.pid" rescue nil
end
task :finalize do
#whenever.write_crontab
#apache.restart
run "sudo /etc/init.d/apache2 restart"
end
end
before "cold" do
preparations
end
after %w(deploy deploy:migrations deploy:cold deploy:start ) do
deploy.finalize
end
after "deploy:update_code" do
deploy.symlinks_to_shared_path
deploy.bundle
thinking_sphinx.rebuild
# whenever.update_crontab
end
after "deploy:setup" do
thinking_sphinx.shared_sphinx_folder
thinking_sphinx.configure
thinking_sphinx.index
thinking_sphinx.start
end
|
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/11/dd/ddb8650471155b43b83888f0efce74214c175115e0e37ff415d2f991f1b3/mycli-1.20.1.tar.gz"
sha256 "d4414ef8573a957b2cbb91db87bc48d48bfc3714ef23cf41cd4a5ad227561c6e"
revision 3
bottle do
cellar :any
sha256 "aa58544a49ad7ead938ab76e697d7ee9970713caebe3fc4159a02c7849a2a909" => :catalina
sha256 "f0f080d264f4e6290a254091c840fbe4db23c7872e8bcb451d79d920618dfb43" => :mojave
sha256 "7c7c30472e81644846dc5cbdf32835042101bfbedd40988cb7fa1f596b7725d6" => :high_sierra
end
depends_on "openssl@1.1"
depends_on "python@3.8"
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "libffi"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2d/bf/960e5a422db3ac1a5e612cb35ca436c3fc985ed4b7ed13a1b4879006f450/cffi-1.13.2.tar.gz"
sha256 "599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/43/36/ba00975df9d393c0ccc3b1bf1610227bc4c4d611a5c69249b57be8ba6253/cli_helpers-1.2.1.tar.gz"
sha256 "98db22eaa86827d99ee6af9f5f3923142d04df256425204530842b032849a165"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"
sha256 "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/be/60/da377e1bed002716fb2d5d1d1cab720f298cb33ecff7bf7adea72788e4e4/cryptography-2.8.tar.gz"
sha256 "3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/17/83/cec3653e2c0d7997a4c25f0bf3e6fb32b142eed74d974fa79643a09a5609/prompt_toolkit-3.0.2.tar.gz"
sha256 "63daee79aa8366c8f1c637f1a4876b890da5fc92a19ebd2f7080ebacb901e990"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/cb/9f/27d4844ac5bf158a33900dbad7985951e2910397998e85712da03ce125f0/Pygments-2.5.2.tar.gz"
sha256 "98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/da/15/23ba6592920e21cb40eb0fe0ea002d2b6177beb1ca8a4c1add5a8f32754d/PyMySQL-0.9.3.tar.gz"
sha256 "d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"
end
resource "six" do
url "https://files.pythonhosted.org/packages/94/3e/edcf6fef41d89187df7e38e868b2dd2182677922b600e880baad7749c865/six-1.13.0.tar.gz"
sha256 "30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/63/c8/229dfd2d18663b375975d953e2bdc06d0eed714f93dcb7732f39e349c438/sqlparse-0.3.0.tar.gz"
sha256 "7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/c4/41/523f6a05e6dc3329a5660f6a81254c6cd87e5cfb5b7482bae3391d86ec3a/tabulate-0.8.6.tar.gz"
sha256 "5470cc6687a091c7042cee89b2946d9235fe9f6d49c193a4ae2ac7bf386737c8"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/5e/33/92333eb80be0c96385dee338f30b53e24a8b415d5785e225d789b3f90feb/wcwidth-0.1.8.tar.gz"
sha256 "f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
mycli: update 1.20.1_3 bottle.
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/11/dd/ddb8650471155b43b83888f0efce74214c175115e0e37ff415d2f991f1b3/mycli-1.20.1.tar.gz"
sha256 "d4414ef8573a957b2cbb91db87bc48d48bfc3714ef23cf41cd4a5ad227561c6e"
revision 3
bottle do
cellar :any
sha256 "aa58544a49ad7ead938ab76e697d7ee9970713caebe3fc4159a02c7849a2a909" => :catalina
sha256 "f0f080d264f4e6290a254091c840fbe4db23c7872e8bcb451d79d920618dfb43" => :mojave
sha256 "7c7c30472e81644846dc5cbdf32835042101bfbedd40988cb7fa1f596b7725d6" => :high_sierra
sha256 "9027c192aab99712e53289bd7d1b2f3c345f1ef776dbf5d6082b0e209daae321" => :x86_64_linux
end
depends_on "openssl@1.1"
depends_on "python@3.8"
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "libffi"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2d/bf/960e5a422db3ac1a5e612cb35ca436c3fc985ed4b7ed13a1b4879006f450/cffi-1.13.2.tar.gz"
sha256 "599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/43/36/ba00975df9d393c0ccc3b1bf1610227bc4c4d611a5c69249b57be8ba6253/cli_helpers-1.2.1.tar.gz"
sha256 "98db22eaa86827d99ee6af9f5f3923142d04df256425204530842b032849a165"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"
sha256 "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/be/60/da377e1bed002716fb2d5d1d1cab720f298cb33ecff7bf7adea72788e4e4/cryptography-2.8.tar.gz"
sha256 "3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/17/83/cec3653e2c0d7997a4c25f0bf3e6fb32b142eed74d974fa79643a09a5609/prompt_toolkit-3.0.2.tar.gz"
sha256 "63daee79aa8366c8f1c637f1a4876b890da5fc92a19ebd2f7080ebacb901e990"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/cb/9f/27d4844ac5bf158a33900dbad7985951e2910397998e85712da03ce125f0/Pygments-2.5.2.tar.gz"
sha256 "98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/da/15/23ba6592920e21cb40eb0fe0ea002d2b6177beb1ca8a4c1add5a8f32754d/PyMySQL-0.9.3.tar.gz"
sha256 "d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"
end
resource "six" do
url "https://files.pythonhosted.org/packages/94/3e/edcf6fef41d89187df7e38e868b2dd2182677922b600e880baad7749c865/six-1.13.0.tar.gz"
sha256 "30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/63/c8/229dfd2d18663b375975d953e2bdc06d0eed714f93dcb7732f39e349c438/sqlparse-0.3.0.tar.gz"
sha256 "7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/c4/41/523f6a05e6dc3329a5660f6a81254c6cd87e5cfb5b7482bae3391d86ec3a/tabulate-0.8.6.tar.gz"
sha256 "5470cc6687a091c7042cee89b2946d9235fe9f6d49c193a4ae2ac7bf386737c8"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/5e/33/92333eb80be0c96385dee338f30b53e24a8b415d5785e225d789b3f90feb/wcwidth-0.1.8.tar.gz"
sha256 "f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
|
require "bundler/capistrano"
require 'capistrano/ext/multistage'
require 'haml'
require File.expand_path('../../lib/yaml_editor', __FILE__)
set :stages, %w(
rites-staging rites-production rites-ri-production
itsisu-dev itsisu-staging itsisu-production
smartgraphs-staging smartgraphs-production smartgraphs-aws1
has-dev has-staging has-production has-aws1
geniverse-dev geniverse-production
genigames-dev genigames-staging genigames-production
interactions-staging interactions-production
genomedynamics-dev genomedynamics-staging
sparks-dev sparks-staging sparks-production sparks-aws1
xproject-dev )
set :default_stage, "development"
set :rake, "bundle exec rake"
def render(file,opts={})
template = File.read(file)
haml_engine = Haml::Engine.new(template)
output = haml_engine.render(nil,opts)
output
end
#############################################################
# Maintenance mode
#############################################################
task :disable_web, :roles => :web do
on_rollback { delete "#{shared_path}/system/maintenance.html" }
maintenance = render("./app/views/layouts/maintenance.haml",
{
:back_up => ENV['BACKUP'],
:reason => ENV['REASON'],
:message => ENV['MESSAGE']
})
run "mkdir -p #{shared_path}/system/"
put maintenance, "#{shared_path}/system/maintenance.html",
:mode => 0644
end
task :enable_web, :roles => :web do
run "rm #{shared_path}/system/maintenance.html"
end
#############################################################
# Application
#############################################################
set :application, "rites"
set :deploy_to, "/web/rites.concord.org"
#############################################################
# Settings
#############################################################
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
ssh_options[:compression] = false
set :use_sudo, true
set :scm_verbose, true
set :rails_env, "production"
set :user, "deploy"
#############################################################
# Git
#############################################################
set :scm, :git
set :branch, "production"
set :git_enable_submodules, 1
# wondering if we can do something special for this? create
# a special deploy user on github?
set(:scm_user) do
Capistrano::CLI.ui.ask "Enter your git username: "
end
set(:scm_passphrase) do
Capistrano::CLI.password_prompt( "Enter your git password: ")
end
set :repository, "git://github.com/concord-consortium/rigse.git"
set :deploy_via, :remote_cache
#############################################################
# DB
#############################################################
namespace :db do
desc 'Dumps the production database to db/production_data.sql on the remote server'
task :remote_db_dump, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:dump --trace"
end
desc 'Loads the production database in db/production_data.sql on the remote server'
task :remote_db_load, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:load --trace"
end
desc '[NOTE: use "fetch_remote_db" instead!] Downloads db/production_data.sql from the remote production environment to your local machine'
task :remote_db_download, :roles => :db, :only => { :primary => true } do
remote_db_compress
ssh_compression = ssh_options[:compression]
ssh_options[:compression] = true
download("#{deploy_to}/#{current_dir}/db/production_data.sql.gz", "db/production_data.sql.gz", :via => :scp)
ssh_options[:compression] = ssh_compression
end
desc '[NOTE: use "push_remote_db" instead!] Uploads db/production_data.sql to the remote production environment from your local machine'
task :remote_db_upload, :roles => :db, :only => { :primary => true } do
ssh_compression = ssh_options[:compression]
ssh_options[:compression] = true
`gzip -f db/production_data.sql` unless File.exists?("db/production_data.sql.gz")
upload("db/production_data.sql.gz", "#{deploy_to}/#{current_dir}/db/production_data.sql.gz", :via => :scp)
ssh_options[:compression] = ssh_compression
remote_db_uncompress
end
task :remote_db_compress, :roles => :db, :only => { :primary => true } do
run "gzip -f #{deploy_to}/#{current_dir}/db/production_data.sql"
end
task :remote_db_uncompress, :roles => :db, :only => { :primary => true } do
run "gunzip -f #{deploy_to}/#{current_dir}/db/production_data.sql.gz"
end
desc 'Cleans up data dump file'
task :remote_db_cleanup, :roles => :db, :only => { :primary => true } do
execute_on_servers(options) do |servers|
self.sessions[servers.first].sftp.connect do |tsftp|
tsftp.remove "#{deploy_to}/#{current_dir}/db/production_data.sql"
tsftp.remove "#{deploy_to}/#{current_dir}/db/production_data.sql.gz"
end
end
end
desc 'Dumps, downloads and then cleans up the production data dump'
task :fetch_remote_db do
remote_db_dump
remote_db_download
remote_db_cleanup
end
desc 'Uploads, inserts, and then cleans up the production data dump'
task :push_remote_db do
remote_db_upload
remote_db_load
remote_db_cleanup
end
desc 'Copies config/initializers/site_keys.rb from the remote environment to your local machine'
task :fetch_remote_site_keys, :roles => :app do
download("#{deploy_to}/shared/config/initializers/site_keys.rb", "config/initializers/site_keys.rb", :via => :sftp)
end
desc 'Copies config/initializers/site_keys.rb from the remote environment to your local machine'
task :push_local_site_keys, :roles => :app do
upload("config/initializers/site_keys.rb", "#{deploy_to}/shared/config/initializers/site_keys.rb", :via => :sftp)
end
desc "Pulls uploaded attachments from the remote server"
task :fetch_remote_attachments, :roles => :web do
remote_dir = "#{shared_path}/system/attachments/"
local_dir = "public/system/attachments/"
run_locally "rsync -avx --delete #{fetch(:user)}@#{domain}:#{remote_dir} #{local_dir}"
end
desc "Pushes uploaded attachments to the remote server"
task :push_local_attachments, :roles => :web do
remote_dir = "#{shared_path}/system/attachments/"
local_dir = "public/system/attachments/"
run_locally "rsync -avx --delete #{local_dir} #{fetch(:user)}@#{domain}:#{remote_dir}"
end
end
namespace :deploy do
# By default deploy:cleanup uses sudo(!)
# We don't want this when using a deploy user
set :use_sudo, false
#############################################################
# Passenger
#############################################################
# Restart passenger on deploy
desc "Restarting passenger with restart.txt"
task :restart, :roles => :app, :except => { :no_release => true } do
run "touch #{current_path}/tmp/restart.txt"
end
[:start, :stop].each do |t|
desc "#{t} task is a no-op with passenger"
task t, :roles => :app do ; end
end
desc "setup a new version of rigse from-scratch using bundle exec rake task of similar name"
task :setup_new_app do
run "cd #{deploy_to}/current; RAILS_ENV=production bundle exec rake app:setup:new_rites_app --trace"
end
desc "setup directory remote directory structure"
task :make_directory_structure do
run <<-CMD
mkdir -p #{deploy_to}/releases &&
mkdir -p #{shared_path} &&
mkdir -p #{shared_path}/config &&
mkdir -p #{shared_path}/log &&
mkdir -p #{shared_path}/pids &&
mkdir -p #{shared_path}/sis_import_data &&
mkdir -p #{shared_path}/config/nces_data &&
mkdir -p #{shared_path}/public/otrunk-examples &&
mkdir -p #{shared_path}/public/installers &&
mkdir -p #{shared_path}/config/initializers &&
mkdir -p #{shared_path}/system/attachments &&
touch #{shared_path}/config/database.yml &&
touch #{shared_path}/config/settings.yml &&
touch #{shared_path}/config/installer.yml &&
touch #{shared_path}/config/sis_import_data.yml &&
touch #{shared_path}/config/mailer.yml &&
touch #{shared_path}/config/initializers/site_keys.rb &&
touch #{shared_path}/config/initializers/subdirectory.rb &&
touch #{shared_path}/config/database.yml &&
touch #{shared_path}/config/google_analytics.yml
CMD
# support for running a SproutCore app from within the public directory
run "mkdir -p #{shared_path}/public/static"
run "mkdir -p #{shared_path}/public/labels"
end
desc "link in some shared resources, such as database.yml"
task :shared_symlinks do
run <<-CMD
ln -nfs #{shared_path}/config/database.yml #{release_path}/config/database.yml &&
ln -nfs #{shared_path}/config/settings.yml #{release_path}/config/settings.yml &&
ln -nfs #{shared_path}/config/installer.yml #{release_path}/config/installer.yml &&
ln -nfs #{shared_path}/config/paperclip.yml #{release_path}/config/paperclip.yml &&
ln -nfs #{shared_path}/config/aws_s3.yml #{release_path}/config/aws_s3.yml &&
ln -nfs #{shared_path}/config/newrelic.yml #{release_path}/config/newrelic.yml &&
ln -nfs #{shared_path}/config/sis_import_data.yml #{release_path}/config/sis_import_data.yml &&
ln -nfs #{shared_path}/config/mailer.yml #{release_path}/config/mailer.yml &&
ln -nfs #{shared_path}/config/initializers/site_keys.rb #{release_path}/config/initializers/site_keys.rb &&
ln -nfs #{shared_path}/config/initializers/subdirectory.rb #{release_path}/config/initializers/subdirectory.rb &&
ln -nfs #{shared_path}/public/otrunk-examples #{release_path}/public/otrunk-examples &&
ln -nfs #{shared_path}/public/installers #{release_path}/public/installers &&
ln -nfs #{shared_path}/config/nces_data #{release_path}/config/nces_data &&
ln -nfs #{shared_path}/sis_import_data #{release_path}/sis_import_data &&
ln -nfs #{shared_path}/system #{release_path}/public/system
CMD
# This is part of the setup necessary for using newrelics reporting gem
# run "ln -nfs #{shared_path}/config/newrelic.yml #{release_path}/config/newrelic.yml"
run "ln -nfs #{shared_path}/config/google_analytics.yml #{release_path}/config/google_analytics.yml"
# support for running SproutCore app from the public directory
run "ln -nfs #{shared_path}/public/static #{release_path}/public/static"
run "cd #{release_path}/public; for i in `ls #{shared_path}/public/labels`; do rm $i; ln -s #{shared_path}/public/labels/$i $i; done"
# by default capistrano creates symlinks for tmp/pids->pids, public/system->system, and log->log
end
desc "install required gems for application"
task :install_gems do
sudo "sh -c 'cd #{deploy_to}/current; bundle exec rake gems:install'"
end
desc "set correct file permissions of the deployed files"
task :set_permissions, :roles => :app do
# sudo "chown -R apache.users #{deploy_to}"
# sudo "chmod -R g+rw #{deploy_to}"
# Grant write access to the paperclip attachments folder
# sudo "chown -R apache.users #{shared_path}/system/attachments"
# sudo "chmod -R g+rw #{shared_path}/system/attachments"
end
# asset compilation included in Capfile load 'deploy/assets'
# desc "Create asset packages for production"
# task :create_asset_packages, :roles => :app do
# # run "cd #{deploy_to}/current && bundle exec compass compile --sass-dir public/stylesheets/scss/ --css-dir public/stylesheets/ -s compact --force"
# run "cd #{deploy_to}/current && bundle exec rake assets:precompile --trace"
# end
end
#############################################################
# IMPORT
#############################################################
namespace :import do
desc 'import grade span expectations from files in config/rigse_data/'
task :import_gses_from_file, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:import_gses_from_file --trace"
end
desc 'erase and import ITSI activities from the ITSI DIY'
task :erase_and_import_itsi_activities, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:erase_and_import_itsi_activities --trace"
end
desc 'erase and import ITSI Activities from the ITSI DIY collected as Units from the CCPortal'
task :erase_and_import_ccp_itsi_units, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:erase_and_import_ccp_itsi_units --trace"
end
desc "generate names for existing MavenJnlpServers that don't have them"
task :generate_names_for_maven_jnlp_servers, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:generate_names_for_maven_jnlp_servers --trace"
end
desc "generate MavenJnlp resources from jnlp servers in settings.yml"
task :generate_maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:generate_maven_jnlp_resources --trace"
end
desc"Generate OtrunkExamples:: Rails models from the content in the otrunk-examples dir."
task :generate_otrunk_examples_rails_models, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:generate_otrunk_examples_rails_models --trace"
end
desc"Create git clone of otrunk-examples in <shared_path>/public/otrunk-examples"
task :create_git_clone_of_otrunk_examples, :roles => :app do
run "cd #{shared_path} && " +
"mkdir -p public && " +
"cd public && " +
"git clone git://github.com/concord-consortium/otrunk-examples.git"
end
desc"Download nces data files from NCES websites"
task :download_nces_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:download_nces_data --trace"
end
desc "Import nces data from files: config/nces_data/* -- uses APP_CONFIG[:states_and_provinces] if defined to filter on states"
task :nces_data_from_files, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:import_nces_from_files --trace"
end
desc"reload the default probe and vendor_interface configurations."
task :reload_probe_configurations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:backup:load_probe_configurations --trace"
end
desc "Import RINET data"
task :import_sis_import_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:rinet --trace"
end
desc "Restore couchdb from S3"
task :restore_couchdb_from_backup, :roles => :app do
sudo "/usr/bin/restore_couchdb.sh"
end
end
#############################################################
# DELETE
#############################################################
namespace :delete do
desc "delete all the MavenJnlp resources"
task :maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:delete_maven_jnlp_resources --trace"
end
desc"Delete the otrunk-example models (Rails models)."
task :otrunk_example_models, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:delete_otrunk_example_models --trace"
end
end
#############################################################
# Convert
#############################################################
namespace :convert do
desc 'wrap orphaned activities in a parent investigation'
task :wrap_orphaned_activities_in_investigations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:make:investigations --trace"
end
desc 'set new grade_span_expectation attribute: gse_key'
task :set_gse_keys, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:set_gse_keys --trace"
end
desc 'find page_elements whithout owners and reclaim them'
task :reclaim_page_elements, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:reclaim_elements --trace"
end
desc 'transfer any Investigations owned by the anonymous user to the site admin user'
task :transfer_investigations_owned_by_anonymous, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:transfer_investigations_owned_by_anonymous --trace"
end
desc 'deep set user ownership on all investigations'
task :deep_set_user_on_all_investigations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:run_deep_set_user_on_all_investigations --trace"
end
desc 'clean up teacher notes owned by the wrong user'
task :clean_teacher_notes, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:clean_teacher_notes --trace"
end
desc 'add the author role to all users who have authored an Investigation'
task :add_author_role_to_authors, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:add_author_role_to_authors --trace"
end
desc "set publication_status to 'draft' for all Investigations without publication_status"
task :set_publication_status_to_draft, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:pub_status --trace"
end
desc "Data Collectors with a static graph_type to a static attribute; Embeddable::DataCollectors with a graph_type_id of nil to Sensor"
task :data_collectors_with_invalid_graph_types, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:data_collectors_with_invalid_graph_types --trace"
end
desc "copy truncated Embeddable::Xhtml from Embeddable::Xhtml#content, Embeddable::OpenResponse and Embeddable::MultipleChoice#prompt into name"
task :copy_truncated_xhtml_into_name, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:copy_truncated_xhtml_into_name --trace"
end
desc "generate date_str attributes from version_str for MavenJnlp::VersionedJnlpUrls"
task :generate_date_str_for_versioned_jnlp_urls, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:generate_date_str_for_versioned_jnlp_urls --trace"
end
desc "Create bundle and console loggers for learners"
task :create_bundle_and_console_loggers_for_learners, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:create_bundle_and_console_loggers_for_learners --trace"
end
# Tuesday, August 11, 2009
desc "Find and report on invalid Dataservice::BundleContent objects"
task :find_and_report_on_invalid_dataservice_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:find_and_report_on_invalid_dataservice_bundle_content_objects --trace"
end
desc "Find and delete invalid Dataservice::BundleContent objects"
task :find_and_delete_invalid_dataservice_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:find_and_delete_invalid_dataservice_bundle_content_objects --trace"
end
desc "generate otml, valid_xml, and empty attributes for BundleContent objects"
task :generate_otml_valid_xml_and_empty_attributes_for_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:generate_otml_valid_xml_and_empty_attributes_for_bundle_content_objects --trace"
end
# Thursday October 8, 2009
desc "Create default users, roles, district, school, course, and class, and greade_levels"
task :default_users_roles, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:default_users_roles --trace"
end
desc "Create default portal resources: district, school, course, and class, investigation and grades"
task :default_portal_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:default_portal_resources --trace"
end
desc "Create districts and schools from NCES records for States listed in settings.yml"
task :create_districts_and_schools_from_nces_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:create_districts_and_schools_from_nces_data --trace"
end
# Wed Dec 2nd
desc "Convert Existing Clazzes so that multiple Teachers can own a clazz. (many to many change)"
task :convert_clazzes_to_multi_teacher, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:convert_clazzes_to_multi_teacher --trace"
end
# Wed Dec 23nd, 2009
desc "Delete_and_regenerate_maven_jnlp_resources"
task :delete_and_regenerate_maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"ANSWER_YES=true bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:delete_and_regenerate_maven_jnlp_resources --trace"
end
# Wed Jan 6 2010
desc "Fixup inner pages: add static_page associations (run deploy:migrate first!)"
task :add_static_pages_to_inner_pages, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:add_static_page_to_inner_pages --trace"
end
# Feb 3, 2010
desc "Extract and process learner responses from existing OTrunk bundles"
task :extract_learner_responses_from_existing_bundles, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:extract_learner_responses_from_existing_bundles --trace"
end
desc "Erase all learner responses and reset the tables"
task :erase_all_learner_responses_and_reset_the_tables, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:erase_all_learner_responses_and_reset_the_tables --trace"
end
#Feb 4, 2010
desc "Convert all index-based MultipleChoice references in existing OTrunk bundles to local_id-based references."
task :convert_choice_answers_to_local_ids, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:convert_choice_answers_to_local_ids --trace"
end
# seb: 20100513
desc "Populate the new leaid, state, and zipcode portal district and school attributes with data from the NCES tables"
task :populate_new_district_and_school_attributes_with_data_from_nces_tables, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:populate_new_district_and_school_attributes_with_data_from_nces_tables --trace"
end
# seb: 20100513
desc "Erase the marshalled jnlps stored in the jnlp object directory by the jnlp gem: config/jnlp_objects"
task :empty_jnlp_object_cache, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:empty_jnlp_object_cache --trace"
end
# seb: 20101019
desc "Reset all activity position information"
task :reset_activity_positions, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:fixup:reset_activity_positions --trace"
end
# seb: 20110126
# See commit: Add "offerings_count" cache counter to runnables
# https://github.com/concord-consortium/rigse/commit/dadea520e3cda26a721e01428527a86222143c68
desc "Recalculate the 'offerings_count' field for runnable objects"
task :reset_offering_counts, :roles => :app do
# remove investigation cache files
run "rm -rf #{deploy_to}/#{current_dir}/public/investigations/*"
run "cd #{deploy_to}/#{current_dir} && bundle exec rake RAILS_ENV=#{rails_env} offerings:set_counts --trace"
end
# NP 20110512
desc "create an investigation to test all know probe_type / calibration combinations"
task :create_probe_testing_investigation, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:create_probe_testing_investigation --trace"
end
# seb: 20110516
# See commit: District#destroy cascades through dependents
# https://github.com/concord-consortium/rigse/commit/1c9e26919decfe322e0bca412b4fa41928b7108a
desc "*** WARNING *** Delete all real districts, schools, teachers, students, offerings, etc except for the virtual site district and school"
task :delete_all_real_schools, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && bundle exec rake RAILS_ENV=#{rails_env} app:schools:delete_all_real_schools --trace"
end
# seb: 20110715
# moved repo to https://github.com/concord-consortium/rigse
desc "change git remote url for origin to git://github.com/concord-consortium/rigse.git"
task :change_git_origin_url_to_concord_consortium, :roles => :app do
run("cd #{shared_path}/cached-copy; git remote set-url origin git://github.com/concord-consortium/rigse.git")
end
end
#
# generake (hehe) cap task to run rake tasks.
# found here: http://stackoverflow.com/questions/312214/how-do-i-run-a-rake-task-from-capistrano
namespace :rake_tasks do
desc "Run a rake task: cap staging rake:invoke task=a_certain_task"
# run like: cap staging rake:invoke task=a_certain_task
task :invoke do
run("cd #{deploy_to}/current; bundle exec rake #{ENV['task']} RAILS_ENV=#{rails_env}")
rake #{ENV['task']} RAILS_ENV=#{rails_env}")
end
end
#############################################################
# INSTALLER: Help to create installers on various hosts
#############################################################
namespace :installer do
desc 'copy config -- copy the local installer.yml to the server. For bootstraping a fresh instance.'
task :copy_config do
upload("config/installer.yml", "#{deploy_to}/#{current_dir}/config/installer.yml", :via => :scp)
end
desc 'create: downloads remote config, caches remote jars, builds installer, uploads new config and installer images'
task :create, :roles => :app do
# download the current config file to local config
%x[cp config/installer.yml config/installer.yml.mine]
download("#{deploy_to}/#{current_dir}/config/installer.yml", "config/installer.yml", :via => :scp)
# build the installers
# the yaml editor is broken...
# editor = YamlEditor.new('./config/installer.yml')
# editor.edit
# editor.write_file
# so instead just give the user a chance to manual edit the installer.yml file
Capistrano::CLI.ui.ask("You can now edit the config/installer.yml file, press enter when done.")
%x[bundle exec rake build:installer:rebuild_all ]
# post the config back up to remote server
upload("config/installer.yml", "#{deploy_to}/#{current_dir}/config/installer.yml", :via => :scp)
# copy the installers themselves up to the remote server
Dir.glob("resources/bitrock_installer/installers/*") do |filename|
basename = File.basename(filename)
puts "copying #{filename}"
upload(filename, "#{deploy_to}/#{current_dir}/public/installers/#{basename}", :via => :scp)
end
%x[cp config/installer.yml.mine config/installer.yml]
end
end
namespace 'account_data' do
desc 'upload_csv_for_district: copy the local csv import files to remote for district (set district=whatever)'
task 'upload_csv_for_district' do
district = ENV['district']
if district
domain = ENV['domain'] || 'rinet_sakai'
district_root = File.join('sis_import_data','districts',domain, 'csv')
from_dir = File.join('sis_import_data','districts',domain, 'csv',district)
to_dir = File.join(deploy_to,current_dir,'sis_import_data','districts',domain, 'csv')
upload(from_dir, to_dir, :via => :scp, :recursive => true)
end
end
end
namespace 'jnlp' do
desc "Bump the JNLP version to the current latest snaphot"
task :bump_snapshot_to_latest , :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:bump_snapshot_to_latest --trace"
end
end
before 'deploy:restart', 'deploy:set_permissions'
before 'deploy:update_code', 'deploy:make_directory_structure'
after 'deploy:update_code', 'deploy:shared_symlinks'
# see load 'deploy/assets' in Capfile
# after 'deploy:create_symlink', 'deploy:create_asset_packages'
after 'deploy:shared_symlinks', 'deploy:cleanup'
after 'installer:create', 'deploy:restart'
after 'convert:bump_jnlp_to_latest_snapshot', 'deploy:restart'
# start the delayed_job worker
# use a prefix incase multiple apps are deployed to the same server
require "delayed/recipes"
# need to use the &block syntax so that deploy_to is correctly setup
set(:delayed_job_args) { "--prefix '#{deploy_to}'" }
after "deploy:stop", "delayed_job:stop"
after "deploy:start", "delayed_job:start"
after "deploy:restart", "delayed_job:restart"
bump_jnlp Deployment fix
(cherry picked from commit 87c3bfffb59a7316e53d98c72881be61d8eaa293)
require "bundler/capistrano"
require 'capistrano/ext/multistage'
require 'haml'
require File.expand_path('../../lib/yaml_editor', __FILE__)
set :stages, %w(
rites-staging rites-production rites-ri-production
itsisu-dev itsisu-staging itsisu-production
smartgraphs-staging smartgraphs-production smartgraphs-aws1
has-dev has-staging has-production has-aws1
geniverse-dev geniverse-production
genigames-dev genigames-staging genigames-production
interactions-staging interactions-production
genomedynamics-dev genomedynamics-staging
sparks-dev sparks-staging sparks-production sparks-aws1
xproject-dev )
set :default_stage, "development"
set :rake, "bundle exec rake"
def render(file,opts={})
template = File.read(file)
haml_engine = Haml::Engine.new(template)
output = haml_engine.render(nil,opts)
output
end
#############################################################
# Maintenance mode
#############################################################
task :disable_web, :roles => :web do
on_rollback { delete "#{shared_path}/system/maintenance.html" }
maintenance = render("./app/views/layouts/maintenance.haml",
{
:back_up => ENV['BACKUP'],
:reason => ENV['REASON'],
:message => ENV['MESSAGE']
})
run "mkdir -p #{shared_path}/system/"
put maintenance, "#{shared_path}/system/maintenance.html",
:mode => 0644
end
task :enable_web, :roles => :web do
run "rm #{shared_path}/system/maintenance.html"
end
#############################################################
# Application
#############################################################
set :application, "rites"
set :deploy_to, "/web/rites.concord.org"
#############################################################
# Settings
#############################################################
default_run_options[:pty] = true
ssh_options[:forward_agent] = true
ssh_options[:compression] = false
set :use_sudo, true
set :scm_verbose, true
set :rails_env, "production"
set :user, "deploy"
#############################################################
# Git
#############################################################
set :scm, :git
set :branch, "production"
set :git_enable_submodules, 1
# wondering if we can do something special for this? create
# a special deploy user on github?
set(:scm_user) do
Capistrano::CLI.ui.ask "Enter your git username: "
end
set(:scm_passphrase) do
Capistrano::CLI.password_prompt( "Enter your git password: ")
end
set :repository, "git://github.com/concord-consortium/rigse.git"
set :deploy_via, :remote_cache
#############################################################
# DB
#############################################################
namespace :db do
desc 'Dumps the production database to db/production_data.sql on the remote server'
task :remote_db_dump, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:dump --trace"
end
desc 'Loads the production database in db/production_data.sql on the remote server'
task :remote_db_load, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:load --trace"
end
desc '[NOTE: use "fetch_remote_db" instead!] Downloads db/production_data.sql from the remote production environment to your local machine'
task :remote_db_download, :roles => :db, :only => { :primary => true } do
remote_db_compress
ssh_compression = ssh_options[:compression]
ssh_options[:compression] = true
download("#{deploy_to}/#{current_dir}/db/production_data.sql.gz", "db/production_data.sql.gz", :via => :scp)
ssh_options[:compression] = ssh_compression
end
desc '[NOTE: use "push_remote_db" instead!] Uploads db/production_data.sql to the remote production environment from your local machine'
task :remote_db_upload, :roles => :db, :only => { :primary => true } do
ssh_compression = ssh_options[:compression]
ssh_options[:compression] = true
`gzip -f db/production_data.sql` unless File.exists?("db/production_data.sql.gz")
upload("db/production_data.sql.gz", "#{deploy_to}/#{current_dir}/db/production_data.sql.gz", :via => :scp)
ssh_options[:compression] = ssh_compression
remote_db_uncompress
end
task :remote_db_compress, :roles => :db, :only => { :primary => true } do
run "gzip -f #{deploy_to}/#{current_dir}/db/production_data.sql"
end
task :remote_db_uncompress, :roles => :db, :only => { :primary => true } do
run "gunzip -f #{deploy_to}/#{current_dir}/db/production_data.sql.gz"
end
desc 'Cleans up data dump file'
task :remote_db_cleanup, :roles => :db, :only => { :primary => true } do
execute_on_servers(options) do |servers|
self.sessions[servers.first].sftp.connect do |tsftp|
tsftp.remove "#{deploy_to}/#{current_dir}/db/production_data.sql"
tsftp.remove "#{deploy_to}/#{current_dir}/db/production_data.sql.gz"
end
end
end
desc 'Dumps, downloads and then cleans up the production data dump'
task :fetch_remote_db do
remote_db_dump
remote_db_download
remote_db_cleanup
end
desc 'Uploads, inserts, and then cleans up the production data dump'
task :push_remote_db do
remote_db_upload
remote_db_load
remote_db_cleanup
end
desc 'Copies config/initializers/site_keys.rb from the remote environment to your local machine'
task :fetch_remote_site_keys, :roles => :app do
download("#{deploy_to}/shared/config/initializers/site_keys.rb", "config/initializers/site_keys.rb", :via => :sftp)
end
desc 'Copies config/initializers/site_keys.rb from the remote environment to your local machine'
task :push_local_site_keys, :roles => :app do
upload("config/initializers/site_keys.rb", "#{deploy_to}/shared/config/initializers/site_keys.rb", :via => :sftp)
end
desc "Pulls uploaded attachments from the remote server"
task :fetch_remote_attachments, :roles => :web do
remote_dir = "#{shared_path}/system/attachments/"
local_dir = "public/system/attachments/"
run_locally "rsync -avx --delete #{fetch(:user)}@#{domain}:#{remote_dir} #{local_dir}"
end
desc "Pushes uploaded attachments to the remote server"
task :push_local_attachments, :roles => :web do
remote_dir = "#{shared_path}/system/attachments/"
local_dir = "public/system/attachments/"
run_locally "rsync -avx --delete #{local_dir} #{fetch(:user)}@#{domain}:#{remote_dir}"
end
end
namespace :deploy do
# By default deploy:cleanup uses sudo(!)
# We don't want this when using a deploy user
set :use_sudo, false
#############################################################
# Passenger
#############################################################
# Restart passenger on deploy
desc "Restarting passenger with restart.txt"
task :restart, :roles => :app, :except => { :no_release => true } do
run "touch #{current_path}/tmp/restart.txt"
end
[:start, :stop].each do |t|
desc "#{t} task is a no-op with passenger"
task t, :roles => :app do ; end
end
desc "setup a new version of rigse from-scratch using bundle exec rake task of similar name"
task :setup_new_app do
run "cd #{deploy_to}/current; RAILS_ENV=production bundle exec rake app:setup:new_rites_app --trace"
end
desc "setup directory remote directory structure"
task :make_directory_structure do
run <<-CMD
mkdir -p #{deploy_to}/releases &&
mkdir -p #{shared_path} &&
mkdir -p #{shared_path}/config &&
mkdir -p #{shared_path}/log &&
mkdir -p #{shared_path}/pids &&
mkdir -p #{shared_path}/sis_import_data &&
mkdir -p #{shared_path}/config/nces_data &&
mkdir -p #{shared_path}/public/otrunk-examples &&
mkdir -p #{shared_path}/public/installers &&
mkdir -p #{shared_path}/config/initializers &&
mkdir -p #{shared_path}/system/attachments &&
touch #{shared_path}/config/database.yml &&
touch #{shared_path}/config/settings.yml &&
touch #{shared_path}/config/installer.yml &&
touch #{shared_path}/config/sis_import_data.yml &&
touch #{shared_path}/config/mailer.yml &&
touch #{shared_path}/config/initializers/site_keys.rb &&
touch #{shared_path}/config/initializers/subdirectory.rb &&
touch #{shared_path}/config/database.yml &&
touch #{shared_path}/config/google_analytics.yml
CMD
# support for running a SproutCore app from within the public directory
run "mkdir -p #{shared_path}/public/static"
run "mkdir -p #{shared_path}/public/labels"
end
desc "link in some shared resources, such as database.yml"
task :shared_symlinks do
run <<-CMD
ln -nfs #{shared_path}/config/database.yml #{release_path}/config/database.yml &&
ln -nfs #{shared_path}/config/settings.yml #{release_path}/config/settings.yml &&
ln -nfs #{shared_path}/config/installer.yml #{release_path}/config/installer.yml &&
ln -nfs #{shared_path}/config/paperclip.yml #{release_path}/config/paperclip.yml &&
ln -nfs #{shared_path}/config/aws_s3.yml #{release_path}/config/aws_s3.yml &&
ln -nfs #{shared_path}/config/newrelic.yml #{release_path}/config/newrelic.yml &&
ln -nfs #{shared_path}/config/sis_import_data.yml #{release_path}/config/sis_import_data.yml &&
ln -nfs #{shared_path}/config/mailer.yml #{release_path}/config/mailer.yml &&
ln -nfs #{shared_path}/config/initializers/site_keys.rb #{release_path}/config/initializers/site_keys.rb &&
ln -nfs #{shared_path}/config/initializers/subdirectory.rb #{release_path}/config/initializers/subdirectory.rb &&
ln -nfs #{shared_path}/public/otrunk-examples #{release_path}/public/otrunk-examples &&
ln -nfs #{shared_path}/public/installers #{release_path}/public/installers &&
ln -nfs #{shared_path}/config/nces_data #{release_path}/config/nces_data &&
ln -nfs #{shared_path}/sis_import_data #{release_path}/sis_import_data &&
ln -nfs #{shared_path}/system #{release_path}/public/system
CMD
# This is part of the setup necessary for using newrelics reporting gem
# run "ln -nfs #{shared_path}/config/newrelic.yml #{release_path}/config/newrelic.yml"
run "ln -nfs #{shared_path}/config/google_analytics.yml #{release_path}/config/google_analytics.yml"
# support for running SproutCore app from the public directory
run "ln -nfs #{shared_path}/public/static #{release_path}/public/static"
run "cd #{release_path}/public; for i in `ls #{shared_path}/public/labels`; do rm $i; ln -s #{shared_path}/public/labels/$i $i; done"
# by default capistrano creates symlinks for tmp/pids->pids, public/system->system, and log->log
end
desc "install required gems for application"
task :install_gems do
sudo "sh -c 'cd #{deploy_to}/current; bundle exec rake gems:install'"
end
desc "set correct file permissions of the deployed files"
task :set_permissions, :roles => :app do
# sudo "chown -R apache.users #{deploy_to}"
# sudo "chmod -R g+rw #{deploy_to}"
# Grant write access to the paperclip attachments folder
# sudo "chown -R apache.users #{shared_path}/system/attachments"
# sudo "chmod -R g+rw #{shared_path}/system/attachments"
end
# asset compilation included in Capfile load 'deploy/assets'
# desc "Create asset packages for production"
# task :create_asset_packages, :roles => :app do
# # run "cd #{deploy_to}/current && bundle exec compass compile --sass-dir public/stylesheets/scss/ --css-dir public/stylesheets/ -s compact --force"
# run "cd #{deploy_to}/current && bundle exec rake assets:precompile --trace"
# end
end
#############################################################
# IMPORT
#############################################################
namespace :import do
desc 'import grade span expectations from files in config/rigse_data/'
task :import_gses_from_file, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:import_gses_from_file --trace"
end
desc 'erase and import ITSI activities from the ITSI DIY'
task :erase_and_import_itsi_activities, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:erase_and_import_itsi_activities --trace"
end
desc 'erase and import ITSI Activities from the ITSI DIY collected as Units from the CCPortal'
task :erase_and_import_ccp_itsi_units, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:erase_and_import_ccp_itsi_units --trace"
end
desc "generate names for existing MavenJnlpServers that don't have them"
task :generate_names_for_maven_jnlp_servers, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:generate_names_for_maven_jnlp_servers --trace"
end
desc "generate MavenJnlp resources from jnlp servers in settings.yml"
task :generate_maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:generate_maven_jnlp_resources --trace"
end
desc"Generate OtrunkExamples:: Rails models from the content in the otrunk-examples dir."
task :generate_otrunk_examples_rails_models, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:generate_otrunk_examples_rails_models --trace"
end
desc"Create git clone of otrunk-examples in <shared_path>/public/otrunk-examples"
task :create_git_clone_of_otrunk_examples, :roles => :app do
run "cd #{shared_path} && " +
"mkdir -p public && " +
"cd public && " +
"git clone git://github.com/concord-consortium/otrunk-examples.git"
end
desc"Download nces data files from NCES websites"
task :download_nces_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:download_nces_data --trace"
end
desc "Import nces data from files: config/nces_data/* -- uses APP_CONFIG[:states_and_provinces] if defined to filter on states"
task :nces_data_from_files, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:import_nces_from_files --trace"
end
desc"reload the default probe and vendor_interface configurations."
task :reload_probe_configurations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} db:backup:load_probe_configurations --trace"
end
desc "Import RINET data"
task :import_sis_import_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:rinet --trace"
end
desc "Restore couchdb from S3"
task :restore_couchdb_from_backup, :roles => :app do
sudo "/usr/bin/restore_couchdb.sh"
end
end
#############################################################
# DELETE
#############################################################
namespace :delete do
desc "delete all the MavenJnlp resources"
task :maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:delete_maven_jnlp_resources --trace"
end
desc"Delete the otrunk-example models (Rails models)."
task :otrunk_example_models, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:import:delete_otrunk_example_models --trace"
end
end
#############################################################
# Convert
#############################################################
namespace :convert do
desc 'wrap orphaned activities in a parent investigation'
task :wrap_orphaned_activities_in_investigations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:make:investigations --trace"
end
desc 'set new grade_span_expectation attribute: gse_key'
task :set_gse_keys, :roles => :db, :only => { :primary => true } do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:set_gse_keys --trace"
end
desc 'find page_elements whithout owners and reclaim them'
task :reclaim_page_elements, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:reclaim_elements --trace"
end
desc 'transfer any Investigations owned by the anonymous user to the site admin user'
task :transfer_investigations_owned_by_anonymous, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:transfer_investigations_owned_by_anonymous --trace"
end
desc 'deep set user ownership on all investigations'
task :deep_set_user_on_all_investigations, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:run_deep_set_user_on_all_investigations --trace"
end
desc 'clean up teacher notes owned by the wrong user'
task :clean_teacher_notes, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:clean_teacher_notes --trace"
end
desc 'add the author role to all users who have authored an Investigation'
task :add_author_role_to_authors, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:add_author_role_to_authors --trace"
end
desc "set publication_status to 'draft' for all Investigations without publication_status"
task :set_publication_status_to_draft, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:pub_status --trace"
end
desc "Data Collectors with a static graph_type to a static attribute; Embeddable::DataCollectors with a graph_type_id of nil to Sensor"
task :data_collectors_with_invalid_graph_types, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:data_collectors_with_invalid_graph_types --trace"
end
desc "copy truncated Embeddable::Xhtml from Embeddable::Xhtml#content, Embeddable::OpenResponse and Embeddable::MultipleChoice#prompt into name"
task :copy_truncated_xhtml_into_name, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:copy_truncated_xhtml_into_name --trace"
end
desc "generate date_str attributes from version_str for MavenJnlp::VersionedJnlpUrls"
task :generate_date_str_for_versioned_jnlp_urls, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:generate_date_str_for_versioned_jnlp_urls --trace"
end
desc "Create bundle and console loggers for learners"
task :create_bundle_and_console_loggers_for_learners, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:create_bundle_and_console_loggers_for_learners --trace"
end
# Tuesday, August 11, 2009
desc "Find and report on invalid Dataservice::BundleContent objects"
task :find_and_report_on_invalid_dataservice_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:find_and_report_on_invalid_dataservice_bundle_content_objects --trace"
end
desc "Find and delete invalid Dataservice::BundleContent objects"
task :find_and_delete_invalid_dataservice_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:find_and_delete_invalid_dataservice_bundle_content_objects --trace"
end
desc "generate otml, valid_xml, and empty attributes for BundleContent objects"
task :generate_otml_valid_xml_and_empty_attributes_for_bundle_content_objects, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:generate_otml_valid_xml_and_empty_attributes_for_bundle_content_objects --trace"
end
# Thursday October 8, 2009
desc "Create default users, roles, district, school, course, and class, and greade_levels"
task :default_users_roles, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:default_users_roles --trace"
end
desc "Create default portal resources: district, school, course, and class, investigation and grades"
task :default_portal_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:default_portal_resources --trace"
end
desc "Create districts and schools from NCES records for States listed in settings.yml"
task :create_districts_and_schools_from_nces_data, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} portal:setup:create_districts_and_schools_from_nces_data --trace"
end
# Wed Dec 2nd
desc "Convert Existing Clazzes so that multiple Teachers can own a clazz. (many to many change)"
task :convert_clazzes_to_multi_teacher, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:convert_clazzes_to_multi_teacher --trace"
end
# Wed Dec 23nd, 2009
desc "Delete_and_regenerate_maven_jnlp_resources"
task :delete_and_regenerate_maven_jnlp_resources, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"ANSWER_YES=true bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:delete_and_regenerate_maven_jnlp_resources --trace"
end
# Wed Jan 6 2010
desc "Fixup inner pages: add static_page associations (run deploy:migrate first!)"
task :add_static_pages_to_inner_pages, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:add_static_page_to_inner_pages --trace"
end
# Feb 3, 2010
desc "Extract and process learner responses from existing OTrunk bundles"
task :extract_learner_responses_from_existing_bundles, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:extract_learner_responses_from_existing_bundles --trace"
end
desc "Erase all learner responses and reset the tables"
task :erase_all_learner_responses_and_reset_the_tables, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:erase_all_learner_responses_and_reset_the_tables --trace"
end
#Feb 4, 2010
desc "Convert all index-based MultipleChoice references in existing OTrunk bundles to local_id-based references."
task :convert_choice_answers_to_local_ids, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:convert_choice_answers_to_local_ids --trace"
end
# seb: 20100513
desc "Populate the new leaid, state, and zipcode portal district and school attributes with data from the NCES tables"
task :populate_new_district_and_school_attributes_with_data_from_nces_tables, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:convert:populate_new_district_and_school_attributes_with_data_from_nces_tables --trace"
end
# seb: 20100513
desc "Erase the marshalled jnlps stored in the jnlp object directory by the jnlp gem: config/jnlp_objects"
task :empty_jnlp_object_cache, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:jnlp:empty_jnlp_object_cache --trace"
end
# seb: 20101019
desc "Reset all activity position information"
task :reset_activity_positions, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:fixup:reset_activity_positions --trace"
end
# seb: 20110126
# See commit: Add "offerings_count" cache counter to runnables
# https://github.com/concord-consortium/rigse/commit/dadea520e3cda26a721e01428527a86222143c68
desc "Recalculate the 'offerings_count' field for runnable objects"
task :reset_offering_counts, :roles => :app do
# remove investigation cache files
run "rm -rf #{deploy_to}/#{current_dir}/public/investigations/*"
run "cd #{deploy_to}/#{current_dir} && bundle exec rake RAILS_ENV=#{rails_env} offerings:set_counts --trace"
end
# NP 20110512
desc "create an investigation to test all know probe_type / calibration combinations"
task :create_probe_testing_investigation, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"bundle exec rake RAILS_ENV=#{rails_env} app:setup:create_probe_testing_investigation --trace"
end
# seb: 20110516
# See commit: District#destroy cascades through dependents
# https://github.com/concord-consortium/rigse/commit/1c9e26919decfe322e0bca412b4fa41928b7108a
desc "*** WARNING *** Delete all real districts, schools, teachers, students, offerings, etc except for the virtual site district and school"
task :delete_all_real_schools, :roles => :app do
run "cd #{deploy_to}/#{current_dir} && bundle exec rake RAILS_ENV=#{rails_env} app:schools:delete_all_real_schools --trace"
end
# seb: 20110715
# moved repo to https://github.com/concord-consortium/rigse
desc "change git remote url for origin to git://github.com/concord-consortium/rigse.git"
task :change_git_origin_url_to_concord_consortium, :roles => :app do
run("cd #{shared_path}/cached-copy; git remote set-url origin git://github.com/concord-consortium/rigse.git")
end
end
#
# generake (hehe) cap task to run rake tasks.
# found here: http://stackoverflow.com/questions/312214/how-do-i-run-a-rake-task-from-capistrano
namespace :rake_tasks do
desc "Run a rake task: cap staging rake:invoke task=a_certain_task"
# run like: cap staging rake:invoke task=a_certain_task
task :invoke do
run("cd #{deploy_to}/current; bundle exec rake #{ENV['task']} RAILS_ENV=#{rails_env}")
rake #{ENV['task']} RAILS_ENV=#{rails_env}")
end
end
#############################################################
# INSTALLER: Help to create installers on various hosts
#############################################################
namespace :installer do
desc 'copy config -- copy the local installer.yml to the server. For bootstraping a fresh instance.'
task :copy_config do
upload("config/installer.yml", "#{deploy_to}/#{current_dir}/config/installer.yml", :via => :scp)
end
desc 'create: downloads remote config, caches remote jars, builds installer, uploads new config and installer images'
task :create, :roles => :app do
# download the current config file to local config
%x[cp config/installer.yml config/installer.yml.mine]
download("#{deploy_to}/#{current_dir}/config/installer.yml", "config/installer.yml", :via => :scp)
# build the installers
# the yaml editor is broken...
# editor = YamlEditor.new('./config/installer.yml')
# editor.edit
# editor.write_file
# so instead just give the user a chance to manual edit the installer.yml file
Capistrano::CLI.ui.ask("You can now edit the config/installer.yml file, press enter when done.")
%x[bundle exec rake build:installer:rebuild_all ]
# post the config back up to remote server
upload("config/installer.yml", "#{deploy_to}/#{current_dir}/config/installer.yml", :via => :scp)
# copy the installers themselves up to the remote server
Dir.glob("resources/bitrock_installer/installers/*") do |filename|
basename = File.basename(filename)
puts "copying #{filename}"
upload(filename, "#{deploy_to}/#{current_dir}/public/installers/#{basename}", :via => :scp)
end
%x[cp config/installer.yml.mine config/installer.yml]
end
end
namespace 'account_data' do
desc 'upload_csv_for_district: copy the local csv import files to remote for district (set district=whatever)'
task 'upload_csv_for_district' do
district = ENV['district']
if district
domain = ENV['domain'] || 'rinet_sakai'
district_root = File.join('sis_import_data','districts',domain, 'csv')
from_dir = File.join('sis_import_data','districts',domain, 'csv',district)
to_dir = File.join(deploy_to,current_dir,'sis_import_data','districts',domain, 'csv')
upload(from_dir, to_dir, :via => :scp, :recursive => true)
end
end
end
namespace 'jnlp' do
desc "Bump the JNLP version to the current latest snaphot"
task :bump_snapshot_to_latest , :roles => :app do
run "cd #{deploy_to}/#{current_dir} && " +
"RAILS_ENV=#{rails_env} bundle exec rake app:jnlp:bump_snapshot_to_latest --trace"
end
end
before 'deploy:restart', 'deploy:set_permissions'
before 'deploy:update_code', 'deploy:make_directory_structure'
after 'deploy:update_code', 'deploy:shared_symlinks'
# see load 'deploy/assets' in Capfile
# after 'deploy:create_symlink', 'deploy:create_asset_packages'
after 'deploy:shared_symlinks', 'deploy:cleanup'
after 'installer:create', 'deploy:restart'
after 'convert:bump_jnlp_to_latest_snapshot', 'deploy:restart'
# start the delayed_job worker
# use a prefix incase multiple apps are deployed to the same server
require "delayed/recipes"
# need to use the &block syntax so that deploy_to is correctly setup
set(:delayed_job_args) { "--prefix '#{deploy_to}'" }
after "deploy:stop", "delayed_job:stop"
after "deploy:start", "delayed_job:start"
after "deploy:restart", "delayed_job:restart"
|
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/11/dd/ddb8650471155b43b83888f0efce74214c175115e0e37ff415d2f991f1b3/mycli-1.20.1.tar.gz"
sha256 "d4414ef8573a957b2cbb91db87bc48d48bfc3714ef23cf41cd4a5ad227561c6e"
revision 1
bottle do
cellar :any
sha256 "1e00109d6e68aff3552e170ebcb1b4cf1bc5283cb0810fce6555bb8365c797b9" => :mojave
sha256 "68c76e2b2a7cbb81fd8ec2a610ed744fa6cd3563e799e60a84004d8a7c9da144" => :high_sierra
sha256 "20f2f2af26ba71afafa6e1c8616a3961b5c62f33ea5f6368a0eb45fbca569ba3" => :sierra
end
depends_on "openssl@1.1"
depends_on "python"
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz"
sha256 "041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/43/36/ba00975df9d393c0ccc3b1bf1610227bc4c4d611a5c69249b57be8ba6253/cli_helpers-1.2.1.tar.gz"
sha256 "98db22eaa86827d99ee6af9f5f3923142d04df256425204530842b032849a165"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"
sha256 "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/c2/95/f43d02315f4ec074219c6e3124a87eba1d2d12196c2767fadfdc07a83884/cryptography-2.7.tar.gz"
sha256 "e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/94/a0/57dc47115621d9b3fcc589848cdbcbb6c4c130186e8fc4c4704766a7a699/prompt_toolkit-2.0.9.tar.gz"
sha256 "2519ad1d8038fd5fc8e770362237ad0364d16a7650fb5724af6997ed5515e3c1"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/da/15/23ba6592920e21cb40eb0fe0ea002d2b6177beb1ca8a4c1add5a8f32754d/PyMySQL-0.9.3.tar.gz"
sha256 "d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"
end
resource "six" do
url "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"
sha256 "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/63/c8/229dfd2d18663b375975d953e2bdc06d0eed714f93dcb7732f39e349c438/sqlparse-0.3.0.tar.gz"
sha256 "7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/c2/fd/202954b3f0eb896c53b7b6f07390851b1fd2ca84aa95880d7ae4f434c4ac/tabulate-0.8.3.tar.gz"
sha256 "8af07a39377cee1103a5c8b3330a421c2d99b9141e9cc5ddd2e3263fea416943"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
mycli: update 1.20.1_1 bottle.
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/11/dd/ddb8650471155b43b83888f0efce74214c175115e0e37ff415d2f991f1b3/mycli-1.20.1.tar.gz"
sha256 "d4414ef8573a957b2cbb91db87bc48d48bfc3714ef23cf41cd4a5ad227561c6e"
revision 1
bottle do
cellar :any
sha256 "ec2089af8fd2b62b1abaf73ee07358f20091f0672b16cea5a9a57db17e6640be" => :mojave
sha256 "1af3274bd5ca31c33f37cdfba59d8a203ef3b45336a132a3f8b71d87a4834988" => :high_sierra
sha256 "f3cb3c2bc3b9c58977055ab28ba2cfbf9012d76b3a73733583b3ab4f7293d336" => :sierra
end
depends_on "openssl@1.1"
depends_on "python"
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz"
sha256 "041c81822e9f84b1d9c401182e174996f0bae9991f33725d059b771744290774"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/43/36/ba00975df9d393c0ccc3b1bf1610227bc4c4d611a5c69249b57be8ba6253/cli_helpers-1.2.1.tar.gz"
sha256 "98db22eaa86827d99ee6af9f5f3923142d04df256425204530842b032849a165"
end
resource "click" do
url "https://files.pythonhosted.org/packages/f8/5c/f60e9d8a1e77005f664b76ff8aeaee5bc05d0a91798afd7f53fc998dbc47/Click-7.0.tar.gz"
sha256 "5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/c2/95/f43d02315f4ec074219c6e3124a87eba1d2d12196c2767fadfdc07a83884/cryptography-2.7.tar.gz"
sha256 "e6347742ac8f35ded4a46ff835c60e68c22a536a8ae5c4422966d06946b6d4c6"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/94/a0/57dc47115621d9b3fcc589848cdbcbb6c4c130186e8fc4c4704766a7a699/prompt_toolkit-2.0.9.tar.gz"
sha256 "2519ad1d8038fd5fc8e770362237ad0364d16a7650fb5724af6997ed5515e3c1"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/da/15/23ba6592920e21cb40eb0fe0ea002d2b6177beb1ca8a4c1add5a8f32754d/PyMySQL-0.9.3.tar.gz"
sha256 "d8c059dcd81dedb85a9f034d5e22dcb4442c0b201908bede99e306d65ea7c8e7"
end
resource "six" do
url "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"
sha256 "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/63/c8/229dfd2d18663b375975d953e2bdc06d0eed714f93dcb7732f39e349c438/sqlparse-0.3.0.tar.gz"
sha256 "7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/c2/fd/202954b3f0eb896c53b7b6f07390851b1fd2ca84aa95880d7ae4f434c4ac/tabulate-0.8.3.tar.gz"
sha256 "8af07a39377cee1103a5c8b3330a421c2d99b9141e9cc5ddd2e3263fea416943"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
|
# Capfile adapted from:
# http://kris.me.uk/2011/10/28/rails-rvm-passenger-capistrano-git-apache.html
# RVM bootstrap
$:.unshift(File.expand_path('./lib', ENV['rvm_path']))
require 'rvm/capistrano'
set :rvm_ruby_string, '1.9.2-p290'
# bundler bootstrap
require "bundler/capistrano"
# main details
set :application, "wishlist"
server "mhyee.com", :app, :web, :db, :primary => true
#server details
default_run_options[:pty] = true
ssh_options[:keys] = File.join(ENV["HOME"], ".ssh", "app@mhyee.com")
set :deploy_to, "/var/www/wishlist.mhyee.com"
set :deploy_via, :remote_cache
set :user, "app"
set :use_sudo, false
# repo details
set :scm, :git
set :repository, "git@git.mhyee.com:wishlist.git"
set :scm_username, "app"
set :branch, "master"
namespace :deploy do
task :start do ; end
task :stop do ; end
desc "Restart application"
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
desc "Uploads and symlinks sensitive configuration files"
task :secrets do
['databse.yml', 'secrets.yml'].each do |file|
upload "#{Rails.root}/config/#{file}", "#{shared_path}/config/#{file}"
run "ln -sf #{shared_path}/config/#{file} #{current_release}/config/#{file}"
end
end
end
after "deploy:symlink", "deploy:secrets"
Fix typo
# Capfile adapted from:
# http://kris.me.uk/2011/10/28/rails-rvm-passenger-capistrano-git-apache.html
# RVM bootstrap
$:.unshift(File.expand_path('./lib', ENV['rvm_path']))
require 'rvm/capistrano'
set :rvm_ruby_string, '1.9.2-p290'
# bundler bootstrap
require "bundler/capistrano"
# main details
set :application, "wishlist"
server "mhyee.com", :app, :web, :db, :primary => true
#server details
default_run_options[:pty] = true
ssh_options[:keys] = File.join(ENV["HOME"], ".ssh", "app@mhyee.com")
set :deploy_to, "/var/www/wishlist.mhyee.com"
set :deploy_via, :remote_cache
set :user, "app"
set :use_sudo, false
# repo details
set :scm, :git
set :repository, "git@git.mhyee.com:wishlist.git"
set :scm_username, "app"
set :branch, "master"
namespace :deploy do
task :start do ; end
task :stop do ; end
desc "Restart application"
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
desc "Uploads and symlinks sensitive configuration files"
task :secrets do
['database.yml', 'secrets.yml'].each do |file|
upload "#{Rails.root}/config/#{file}", "#{shared_path}/config/#{file}"
run "ln -sf #{shared_path}/config/#{file} #{current_release}/config/#{file}"
end
end
end
after "deploy:symlink", "deploy:secrets" |
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/de/19/74c21a13074d13d955e2ad16943b0f12b0bf78845dc130f2d46f451b65cc/mycli-1.16.0.tar.gz"
sha256 "29c65537b6616619b45956550fa76662caa6631eeee96e5cafd407909d8e9649"
bottle do
cellar :any
sha256 "0e595019db782a382f315a1c449334391b17630194193b45c61bfdd4e20efeb2" => :high_sierra
sha256 "a1d590e59cd55c5539032bbb6e3682de3ac687c924418a57875fdac7cc7042c1" => :sierra
sha256 "efb1798af243e65cc470d754439b5d82fde609081d5e77b40e0a29ea0d36b5d1" => :el_capitan
end
depends_on "python@2" if MacOS.version <= :snow_leopard
depends_on "openssl"
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "backports.csv" do
url "https://files.pythonhosted.org/packages/6a/0b/2071ad285e87dd26f5c02147ba13abf7ec777ff20416a60eb15ea204ca76/backports.csv-1.0.5.tar.gz"
sha256 "8c421385cbc6042ba90c68c871c5afc13672acaf91e1508546d6cda6725ebfc6"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/10/f7/3b302ff34045f25065091d40e074479d6893882faef135c96f181a57ed06/cffi-1.11.4.tar.gz"
sha256 "df9083a992b17a28cd4251a3f5c879e0198bb26c9e808c4647e0a18739f1d11d"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/88/25/1fd3b2181b4e1fec360a1176a91ab715297b55db045e91af798c31c58aa3/cli_helpers-1.0.1.tar.gz"
sha256 "55353117960700dfbe000a71cda0bad1ac865e3a9999f1fa81047fa9e1322d42"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/78/c5/7188f15a92413096c93053d5304718e1f6ba88b818357d05d19250ebff85/cryptography-2.1.4.tar.gz"
sha256 "e4d967371c5b6b2e67855066471d844c5d52d210c36c28d49a8507b96e2c5291"
end
resource "enum34" do
url "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"
sha256 "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/f4/bd/0467d62790828c23c47fc1dfa1b1f052b24efdf5290f071c7a91d0d82fd3/idna-2.6.tar.gz"
sha256 "2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"
end
resource "ipaddress" do
url "https://files.pythonhosted.org/packages/f0/ba/860a4a3e283456d6b7e2ab39ce5cf11a3490ee1a363652ac50abf9f0f5df/ipaddress-1.0.19.tar.gz"
sha256 "200d8686011d470b5e4de207d803445deee427455cd0cb7c982b68cf82524f81"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz"
sha256 "858588f1983ca497f1cf4ffde01d978a3ea02b01c8a26a8bbc5cd2e66d816917"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/8c/2d/aad7f16146f4197a11f8e91fb81df177adcc2073d36a17b1491fd09df6ed/pycparser-2.18.tar.gz"
sha256 "99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz"
sha256 "dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/a8/b4/3544c8e6ed9b1c6a00e5b302e3d5a646e43a8a0ac5216f5ae8706688706e/PyMySQL-0.8.0.tar.gz"
sha256 "32da4a66397077d42908e449688f2ec71c2b18892a6cd04f03ab2aa828a70f40"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/79/3c/2ad76ba49f9e3d88d2b58e135b7821d93741856d1fe49970171f73529303/sqlparse-0.2.4.tar.gz"
sha256 "ce028444cfab83be538752a2ffdb56bc417b7784ff35bb9a3062413717807dec"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/12/c2/11d6845db5edf1295bc08b2f488cf5937806586afe42936c3f34c097ebdc/tabulate-0.8.2.tar.gz"
sha256 "e4ca13f26d0a6be2a2915428dc21e732f1e44dad7f76d7030b2ef1ec251cf7f2"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
mycli: depend on python@2
class Mycli < Formula
include Language::Python::Virtualenv
desc "CLI for MySQL with auto-completion and syntax highlighting"
homepage "https://mycli.net/"
url "https://files.pythonhosted.org/packages/de/19/74c21a13074d13d955e2ad16943b0f12b0bf78845dc130f2d46f451b65cc/mycli-1.16.0.tar.gz"
sha256 "29c65537b6616619b45956550fa76662caa6631eeee96e5cafd407909d8e9649"
bottle do
cellar :any
sha256 "0e595019db782a382f315a1c449334391b17630194193b45c61bfdd4e20efeb2" => :high_sierra
sha256 "a1d590e59cd55c5539032bbb6e3682de3ac687c924418a57875fdac7cc7042c1" => :sierra
sha256 "efb1798af243e65cc470d754439b5d82fde609081d5e77b40e0a29ea0d36b5d1" => :el_capitan
end
depends_on "python@2"
depends_on "openssl"
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "backports.csv" do
url "https://files.pythonhosted.org/packages/6a/0b/2071ad285e87dd26f5c02147ba13abf7ec777ff20416a60eb15ea204ca76/backports.csv-1.0.5.tar.gz"
sha256 "8c421385cbc6042ba90c68c871c5afc13672acaf91e1508546d6cda6725ebfc6"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/10/f7/3b302ff34045f25065091d40e074479d6893882faef135c96f181a57ed06/cffi-1.11.4.tar.gz"
sha256 "df9083a992b17a28cd4251a3f5c879e0198bb26c9e808c4647e0a18739f1d11d"
end
resource "cli-helpers" do
url "https://files.pythonhosted.org/packages/88/25/1fd3b2181b4e1fec360a1176a91ab715297b55db045e91af798c31c58aa3/cli_helpers-1.0.1.tar.gz"
sha256 "55353117960700dfbe000a71cda0bad1ac865e3a9999f1fa81047fa9e1322d42"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "configobj" do
url "https://files.pythonhosted.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz"
sha256 "a2f5650770e1c87fb335af19a9b7eb73fc05ccf22144eb68db7d00cd2bcb0902"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/78/c5/7188f15a92413096c93053d5304718e1f6ba88b818357d05d19250ebff85/cryptography-2.1.4.tar.gz"
sha256 "e4d967371c5b6b2e67855066471d844c5d52d210c36c28d49a8507b96e2c5291"
end
resource "enum34" do
url "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz"
sha256 "8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/f4/bd/0467d62790828c23c47fc1dfa1b1f052b24efdf5290f071c7a91d0d82fd3/idna-2.6.tar.gz"
sha256 "2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f"
end
resource "ipaddress" do
url "https://files.pythonhosted.org/packages/f0/ba/860a4a3e283456d6b7e2ab39ce5cf11a3490ee1a363652ac50abf9f0f5df/ipaddress-1.0.19.tar.gz"
sha256 "200d8686011d470b5e4de207d803445deee427455cd0cb7c982b68cf82524f81"
end
resource "prompt_toolkit" do
url "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz"
sha256 "858588f1983ca497f1cf4ffde01d978a3ea02b01c8a26a8bbc5cd2e66d816917"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/8c/2d/aad7f16146f4197a11f8e91fb81df177adcc2073d36a17b1491fd09df6ed/pycparser-2.18.tar.gz"
sha256 "99a8ca03e29851d96616ad0404b4aad7d9ee16f25c9f9708a11faf2810f7b226"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz"
sha256 "dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
end
resource "PyMySQL" do
url "https://files.pythonhosted.org/packages/a8/b4/3544c8e6ed9b1c6a00e5b302e3d5a646e43a8a0ac5216f5ae8706688706e/PyMySQL-0.8.0.tar.gz"
sha256 "32da4a66397077d42908e449688f2ec71c2b18892a6cd04f03ab2aa828a70f40"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
resource "sqlparse" do
url "https://files.pythonhosted.org/packages/79/3c/2ad76ba49f9e3d88d2b58e135b7821d93741856d1fe49970171f73529303/sqlparse-0.2.4.tar.gz"
sha256 "ce028444cfab83be538752a2ffdb56bc417b7784ff35bb9a3062413717807dec"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/12/c2/11d6845db5edf1295bc08b2f488cf5937806586afe42936c3f34c097ebdc/tabulate-0.8.2.tar.gz"
sha256 "e4ca13f26d0a6be2a2915428dc21e732f1e44dad7f76d7030b2ef1ec251cf7f2"
end
resource "terminaltables" do
url "https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz"
sha256 "f3eb0eb92e3833972ac36796293ca0906e998dc3be91fbe1f8615b331b853b81"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"mycli", "--help"
end
end
|
set :application, "yogo"
set :use_sudo, false
set :scm, :git
set :repository, "git://github.com/yogo/yogo.git"
set :branch, "master"
set :deploy_via, :remote_cache
set :copy_exclude, [".git"]
# set :user, "yogo"
#
# role :web, "yogo.cns.montana.edu" # Your HTTP server, Apache/etc
# role :app, "yogo.cns.montana.edu" # This may be the same as your `Web` server
set :ran_user_settings, false
task :user_settings do
if !ran_user_settings
server_prompt = "What server are you deploying to?"
set :temp_server, Proc.new { Capistrano::CLI.ui.ask(server_prompt)}
role :web, "#{temp_server}"
role :app, "#{temp_server}"
user_prompt = "What user are you deploying to the server under? (defaults to 'yogo')"
set :temp_user, Proc.new { Capistrano::CLI.ui.ask(user_prompt)}
if temp_user.empty?
set :user, "yogo"
set :deploy_to, "/home/yogo/rails/yogo/"
else
set :user, "#{temp_user}"
set :deploy_to, "/home/#{temp_user}/rails/yogo/"
end
set :ran_user_settings, true
end
end
[ "bundle:install", "deploy", "deploy:check", "deploy:cleanup", "deploy:cold", "deploy:migrate",
"deploy:migrations", "deploy:pending", "deploy:pending:diff", "deploy:rollback", "deploy:rollback:code",
"deploy:setup", "deploy:symlink", "deploy:update", "deploy:update_code", "deploy:upload", "deploy:web:disable",
"deploy:web:enable", "invoke", "persvr:setup", "persvr:start", "persvr:stop", "persvr:drop",
"persvr:version", "shell" ].each do |task|
before task, :user_settings
end
# before deploy, :user_settings
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
end
namespace :db do
task :setup do
run "mkdir -p #{deploy_to}#{shared_dir}/database/persvr"
run "mkdir -p #{deploy_to}#{shared_dir}/database/persevere"
end
task :symlink do
run "ln -nfs #{deploy_to}#{shared_dir}/database/persvr #{release_path}/db/persvr"
run "ln -nfs #{deploy_to}#{shared_dir}/database/persevere #{release_path}/vendor/persevere"
end
end
after "deploy:setup", "db:setup"
after "deploy:update_code", "db:symlink"
namespace :assets do
task :setup do
run "mkdir -p #{deploy_to}#{shared_dir}/assets/files"
run "mkdir -p #{deploy_to}#{shared_dir}/assets/images"
end
task :symlink do
run "ln -nfs #{deploy_to}#{shared_dir}/assets/files #{release_path}/public/files"
run "ln -nfs #{deploy_to}#{shared_dir}/assets/images #{release_path}/public/images"
end
end
after "deploy:setup", "assets:setup"
after "deploy:update_code", "assets:symlink"
task :setup_for_server do
run("rm #{release_path}/config/settings.yml && cp #{release_path}/config/server_settings.yml #{release_path}/config/settings.yml")
end
after "deploy:update_code", "setup_for_server"
namespace :bundle do
desc "Run bundle install on the server"
task :install do
run("bash -c 'cd #{current_path} && bundle install'")
end
end
after 'setup_for_server', 'bundle:install'
namespace :persvr do
desc "Setup Persevere on the server"
task :setup do
run("bash -c 'cd #{current_path} && rake persvr:setup'")
end
desc "Start Persevere on the server"
task :start do
puts '************************* This takes me a long time sometimes *************************'
puts '************************************* Be patient **************************************'
run("bash -c 'cd #{current_path} && rake persvr:start PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
end
desc "Stop Persevere on the server"
task :stop do
puts '************************* This takes me a long time sometimes *************************'
puts '************************************* Be patient **************************************'
run("bash -c 'cd #{current_path} && rake persvr:start PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
end
task :drop do
run("bash -c 'cd #{current_path} && rake persvr:drop PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
end
task :version do
run("bash -c 'cd #{current_path} && rake persvr:version PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
end
end
updated the yogo deploy file for crux
set :application, "crux"
set :use_sudo, false
set :scm, :git
set :repository, "git://github.com/pol/crux.git"
set :branch, "master"
set :deploy_via, :remote_cache
set :copy_exclude, [".git"]
# set :user, "yogo"
#
# role :web, "yogo.cns.montana.edu" # Your HTTP server, Apache/etc
# role :app, "yogo.cns.montana.edu" # This may be the same as your `Web` server
set :ran_user_settings, false
task :user_settings do
if !ran_user_settings
set :user, "crux"
set :deploy_to, "/home/crux/"
server "crux.msu.montana.edu", :app, :web, :db, :primary => true
# server_prompt = "What server are you deploying to?"
# set :temp_server, Proc.new { Capistrano::CLI.ui.ask(server_prompt)}
# role :web, "#{temp_server}"
# role :app, "#{temp_server}"
# user_prompt = "What user are you deploying to the server under? (defaults to 'yogo')"
# set :temp_user, Proc.new { Capistrano::CLI.ui.ask(user_prompt)}
# if temp_user.empty?
# set :user, "yogo"
# set :deploy_to, "/home/yogo/rails/yogo/"
# else
# set :user, "#{temp_user}"
# set :deploy_to, "/home/#{temp_user}/rails/yogo/"
# end
set :ran_user_settings, true
end
end
[ "bundle:install", "deploy", "deploy:check", "deploy:cleanup", "deploy:cold", "deploy:migrate",
"deploy:migrations", "deploy:pending", "deploy:pending:diff", "deploy:rollback", "deploy:rollback:code",
"deploy:setup", "deploy:symlink", "deploy:update", "deploy:update_code", "deploy:upload", "deploy:web:disable",
"deploy:web:enable", "invoke", "persvr:setup", "persvr:start", "persvr:stop", "persvr:drop",
"persvr:version", "shell" ].each do |task|
before task, :user_settings
end
# before deploy, :user_settings
namespace :deploy do
task :start do ; end
task :stop do ; end
task :restart, :roles => :app, :except => { :no_release => true } do
run "#{try_sudo} touch #{File.join(current_path,'tmp','restart.txt')}"
end
end
namespace :db do
task :setup do
run "mkdir -p #{deploy_to}#{shared_dir}/blazeds"
end
task :symlink do
run "ln -nfs #{deploy_to}#{shared_dir}/blazeds #{release_path}/blazeds"
end
end
after "deploy:setup", "db:setup"
after "deploy:update_code", "db:symlink"
namespace :assets do
task :setup do
run "mkdir -p #{deploy_to}#{shared_dir}/assets/files"
run "mkdir -p #{deploy_to}#{shared_dir}/assets/images"
end
task :symlink do
run "ln -nfs #{deploy_to}#{shared_dir}/assets/files #{release_path}/public/files"
run "ln -nfs #{deploy_to}#{shared_dir}/assets/images #{release_path}/public/images"
end
end
after "deploy:setup", "assets:setup"
after "deploy:update_code", "assets:symlink"
task :setup_for_server do
run("rm #{release_path}/config/settings.yml && cp #{release_path}/config/server_settings.yml #{release_path}/config/settings.yml")
end
after "deploy:update_code", "setup_for_server"
namespace :bundle do
desc "Run bundle install on the server"
task :install do
run("bash -c 'cd #{current_path} && bundle install'")
end
end
after 'setup_for_server', 'bundle:install'
namespace :tomcat do
desc "Start the Tomcat Instance on the server (blazeds and persevere)"
task :start do
puts '************************* This takes me a long time sometimes *************************'
puts '************************************* Be patient **************************************'
run("bash -c 'cd #{current_path} && rake blazeds:start RAILS_ENV=production'")
end
desc "Stop the Tomcat Instance on the server (blazeds and persevere)"
task :stop do
puts '************************* This takes me a long time sometimes *************************'
puts '************************************* Be patient **************************************'
run("bash -c 'cd #{current_path} && rake blazeds:stop RAILS_ENV=production'")
end
end
# namespace :persvr do
# desc "Setup Persevere on the server"
# task :setup do
# run("bash -c 'cd #{current_path} && rake persvr:setup'")
# end
#
# desc "Start Persevere on the server"
# task :start do
# puts '************************* This takes me a long time sometimes *************************'
# puts '************************************* Be patient **************************************'
# run("bash -c 'cd #{current_path} && rake persvr:start PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
# end
#
# desc "Stop Persevere on the server"
# task :stop do
# puts '************************* This takes me a long time sometimes *************************'
# puts '************************************* Be patient **************************************'
# run("bash -c 'cd #{current_path} && rake persvr:start PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
# end
#
# task :drop do
# run("bash -c 'cd #{current_path} && rake persvr:drop PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
# end
#
# task :version do
# run("bash -c 'cd #{current_path} && rake persvr:version PERSEVERE_HOME=#{deploy_to}#{shared_dir}/database/persevere RAILS_ENV=production'")
# end
# end
|
myman: import from homebrew/games.
class Myman < Formula
desc "Text-mode videogame inspired by Namco's Pac-Man"
homepage "http://myman.sourceforge.net/"
url "https://downloads.sourceforge.net/project/myman/myman-cvs/myman-cvs-2009-10-30/myman-wip-2009-10-30.tar.gz"
sha256 "bf69607eabe4c373862c81bf56756f2a96eecb8eaa8c911bb2abda78b40c6d73"
head ":pserver:anonymous:@myman.cvs.sourceforge.net:/cvsroot/myman", :using => :cvs
bottle do
rebuild 1
sha256 "452b64835fbf52eec3d5b83532153caf2f0fd7c039b35b589031bbcc9db7f0ad" => :sierra
sha256 "fb2e03ca7d79febb09bbb7f192fc2c6c3fa9cd401dbcd4dfae9b01746aa6faa6" => :el_capitan
sha256 "60e5b8dca2b167ff37369ec25c208d653e716415e603f2a53db32186c05958cf" => :yosemite
end
depends_on "coreutils" => :build
depends_on "gnu-sed" => :build
depends_on "homebrew/dupes/groff" => :build
def install
ENV["RMDIR"] = "grmdir"
ENV["SED"] = "gsed"
ENV["INSTALL"] = "ginstall"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/myman", "-k"
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.